Barr, Keiran; Laframboise, Jacob; Ungi, Tamas; Hookey, Lawrence; Fichtinger, Gabor
Automated segmentation of computed tomography colonography images using a 3D U-Net Conference
SPIE Medical Imaging, 2020.
@conference{KBarr2020,
title = {Automated segmentation of computed tomography colonography images using a 3D U-Net},
author = {Keiran Barr and Jacob Laframboise and Tamas Ungi and Lawrence Hookey and Gabor Fichtinger},
doi = {https://doi.org/10.1117/12.2549749},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
booktitle = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Victoria; Ungi, Tamas; Sunderland, Kyle R.; Pigeau, Grace; Schonewille, Abigael; Fichtinger, Gabor
Using multiple frame U-net for automated segmentation of spinal ultrasound images Conference
18th Annual Imaging Network Ontario (ImNO) Symposium, 2020.
@conference{Wu2020b,
title = {Using multiple frame U-net for automated segmentation of spinal ultrasound images},
author = {Victoria Wu and Tamas Ungi and Kyle R. Sunderland and Grace Pigeau and Abigael Schonewille and Gabor Fichtinger},
url = {https://www.imno.ca/sites/default/files/ImNO2020Proceedings.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wu2020b.pdf},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {18th Annual Imaging Network Ontario (ImNO) Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Barr, Colton; Lasso, Andras; Asselin, Mark; Pieper, Steve; Robertson, Faith C.; Gormley, William B.; Fichtinger, Gabor
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{BarrC2020,
title = {Towards portable image guidance and automatic patient registration using an RGB-D camera and video projector},
author = {Colton Barr and Andras Lasso and Mark Asselin and Steve Pieper and Faith C. Robertson and William B. Gormley and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Barr2020.pdf},
doi = {10.1117/12.2549723},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Fedorov, Andriy; Beichel, Reinhard; Kalpathy-Cramer, Jayashree; Clunie, David; Onken, Michael; Riesmeier, Jörg; Herz, Christian; Bauer, Christian; Beers, Andrew; Fillion-Robin, Jean-ChristopheC.; Lasso, Andras; Pinter, Csaba; Pieper, Steve; Nolden, Marco; Maier-Hein, Klaus; Herrmann, Markus D.; Saltz, Joel; Prior, Fred; Fennessy, Fiona M.; Buatti, John; Kikinis, Ron
Quantitative Imaging Informatics for Cancer Research Journal Article
In: JCO Clinical Cancer Informatics, vol. 4, pp. 444-453., 2020.
@article{Fedorov2020,
title = {Quantitative Imaging Informatics for Cancer Research},
author = {Andriy Fedorov and Reinhard Beichel and Jayashree Kalpathy-Cramer and David Clunie and Michael Onken and Jörg Riesmeier and Christian Herz and Christian Bauer and Andrew Beers and Jean-ChristopheC. Fillion-Robin and Andras Lasso and Csaba Pinter and Steve Pieper and Marco Nolden and Klaus Maier-Hein and Markus D. Herrmann and Joel Saltz and Fred Prior and Fiona M. Fennessy and John Buatti and Ron Kikinis},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Fedorov2020.pdf},
doi = {https://doi.org/10. 1200/CCI.19.00165},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {JCO Clinical Cancer Informatics},
volume = {4},
pages = {444-453.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Laframboise, Jacob; Ungi, Tamas; Sunderland, Kyle R.; Zevin, Boris; Fichtinger, Gabor
Open source platform for automated collection of training data to support video-based feedback in surgical simulators Conference
SPIE Medical Imaging, SPIE, Houston, United States, 2020.
@conference{Laframboise2020a,
title = {Open source platform for automated collection of training data to support video-based feedback in surgical simulators},
author = {Jacob Laframboise and Tamas Ungi and Kyle R. Sunderland and Boris Zevin and Gabor Fichtinger},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE},
address = {Houston, United States},
abstract = {<p><strong>Purpose:</strong> Surgical training could be improved by automatic detection of workflow steps. A platform to collect and organize tracking and video data would enable rapid development of deep learning solutions for surgical training. The purpose of this research is to demonstrate 3D Slicer / PLUS Toolkit as a platform for video annotation by identifying and annotating tools interacting with tissues in simulated hernia repair. <strong>Methods:</strong> Tracking data from an optical tracker and video data from a camera are collected by PLUS and 3D Slicer. To demonstrate the platform in use, we identify tissues during a surgical procedure using a neural network. The tracking data is used to identify what tool is in use. The solution is deployed with a custom Slicer module. <strong>Results:</strong> This platform allowed the collection and storage of enough tracked video data for training a convolutional neural network (CNN) to detect interactions with tissues and tools. The CNN was trained on this data and applied to new data with a testing accuracy of 98%. The model’s predictions can be weighted over several frames with a custom Slicer module to improve accuracy. <strong>Conclusion:</strong> We found the 3D Slicer and PLUS Toolkit platform to be a viable platform for training and deploying a solution that combines automatic video processing and optical tool tracking. We designed a proof of concept model to identify tissues with a trained CNN in real time along with tracking of surgical tools.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wiercigroch, Julia; Hashtrudi-Zaad, Keyvan; Ungi, Tamas; Bisleri, Gianluigi; Fichtinger, Gabor
Force and torque feedback in endoscopic vessel harvesting Conference
Medical Imaging 2020: Image-Guided Procedures, Robotic Inverventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{Wiercigroch2020a,
title = {Force and torque feedback in endoscopic vessel harvesting},
author = {Julia Wiercigroch and Keyvan Hashtrudi-Zaad and Tamas Ungi and Gianluigi Bisleri and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wiercigroch2020a.pdf},
doi = {https://doi.org/10.1117/12.2550160},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Inverventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Jamzad, Amoon; Kaufmann, Martin; Rubino, Rachel; Sedghi, Alireza; Ungi, Tamas; Asselin, Mark; Yam, Scott; Rudan, John; Nicol, Christopher; Fichtinger, Gabor; Mousavi, Parvin
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{Connolly2020a,
title = {Classification of tumor signatures from electrosurgical vapors using mass spectrometry and machine learning: a feasibility study},
author = {Laura Connolly and Amoon Jamzad and Martin Kaufmann and Rachel Rubino and Alireza Sedghi and Tamas Ungi and Mark Asselin and Scott Yam and John Rudan and Christopher Nicol and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2020a.pdf},
doi = {https://doi.org/10.1117/12.2549343},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Victoria; Ungi, Tamas; Sunderland, Kyle R.; Pigeau, Grace; Schonewille, Abigael; Fichtinger, Gabor
SPIE Medical Imaging, 2020.
@conference{Wu2020a,
title = {Automatic segmentation of spinal ultrasound landmarks with U-net using multiple consecutive images for input},
author = {Victoria Wu and Tamas Ungi and Kyle R. Sunderland and Grace Pigeau and Abigael Schonewille and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2020a-manuscript.pdf},
doi = {10.1117/12.2549584},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Choi, Perry S.; Nam, HannahH; Lasso, Andras; Herz, Christian; Drouin, Simon; Harrild, David M.; Quartermain, Michael; Fichtinger, Gabor; Mascio, Christopher E.; Emani, Sitaram; Jolley, MatthewA
3D Modeling of Surgically Implanted Stent-Based Valves in the Mitral Position in Children Journal Article
In: The Annals of Thoracic Surgery, 2020.
@article{Choi2020,
title = {3D Modeling of Surgically Implanted Stent-Based Valves in the Mitral Position in Children},
author = {Perry S. Choi and HannahH Nam and Andras Lasso and Christian Herz and Simon Drouin and David M. Harrild and Michael Quartermain and Gabor Fichtinger and Christopher E. Mascio and Sitaram Emani and MatthewA Jolley},
url = {https://doi.org/10.1016/j.athoracsur.2020.02.020
},
doi = {10.1016/j.athoracsur.2020.02.020},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {The Annals of Thoracic Surgery},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Janssen, Natasja; Kaufmann, Martin; Santilli, Alice; Jamzad, Amoon; Kaitlin, Vanderbeck; Ren, Kevin; Ungi, Tamas; Mousavi, Parvin; Rudan, John; McKay, Doug; Wang, Amy; Fichtinger, Gabor
Navigated tissue characterization during skin cancer surgery Journal Article
In: Int J Comput Assist Radiol Surg, 2020.
@article{Janssen2020a,
title = {Navigated tissue characterization during skin cancer surgery},
author = {Natasja Janssen and Martin Kaufmann and Alice Santilli and Amoon Jamzad and Vanderbeck Kaitlin and Kevin Ren and Tamas Ungi and Parvin Mousavi and John Rudan and Doug McKay and Amy Wang and Gabor Fichtinger},
url = {https://doi.org/10.1007/s11548-020-02200-4},
doi = {10.1007/s11548-020-02200-4},
year = {2020},
date = {2020-01-01},
journal = {Int J Comput Assist Radiol Surg},
abstract = {<p><strong>Purpose: </strong>Basal cell carcinoma (BCC) is the most commonly diagnosed skin cancer and is treated by surgical resection. Incomplete tumor removal requires surgical revision, leading to significant healthcare costs and impaired cosmesis. We investigated the clinical feasibility of a surgical navigation system for BCC surgery, based on molecular tissue characterization using rapid evaporative ionization mass spectrometry (REIMS).</p>
<p><strong>Methods: </strong>REIMS enables direct tissue characterization by analysis of cell-specific molecules present within surgical smoke, produced during electrocautery tissue resection. A tissue characterization model was built by acquiring REIMS spectra of BCC, healthy skin and fat from ex vivo skin cancer specimens. This model was used for tissue characterization during navigated skin cancer surgery. Navigation was enabled by optical tracking and real-time visualization of the cautery relative to a contoured resection volume. The surgical smoke was aspirated into a mass spectrometer and directly analyzed with REIMS. Classified BCC was annotated at the real-time position of the cautery. Feasibility of the navigation system, and tissue classification accuracy for ex vivo and intraoperative surgery were evaluated.</p>
<p><strong>Results: </strong>Fifty-four fresh excision specimens were used to build the ex vivo model of BCC, normal skin and fat, with 92% accuracy. While 3 surgeries were successfully navigated without breach of sterility, the intraoperative performance of the ex vivo model was low (< 50%). Hypotheses are: (1) the model was trained on heterogeneous mass spectra that did not originate from a single tissue type, (2) during surgery mixed tissue types were resected and thus presented to the model, and (3) the mass spectra were not validated by pathology.</p>
<p><strong>Conclusion: </strong>REIMS-navigated skin cancer surgery has the potential to detect and localize remaining tumor intraoperatively. Future work will be focused on improving our model by using a precise pencil cautery tip for burning localized tissue types, and having pathology-validated mass spectra.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
<p><strong>Methods: </strong>REIMS enables direct tissue characterization by analysis of cell-specific molecules present within surgical smoke, produced during electrocautery tissue resection. A tissue characterization model was built by acquiring REIMS spectra of BCC, healthy skin and fat from ex vivo skin cancer specimens. This model was used for tissue characterization during navigated skin cancer surgery. Navigation was enabled by optical tracking and real-time visualization of the cautery relative to a contoured resection volume. The surgical smoke was aspirated into a mass spectrometer and directly analyzed with REIMS. Classified BCC was annotated at the real-time position of the cautery. Feasibility of the navigation system, and tissue classification accuracy for ex vivo and intraoperative surgery were evaluated.</p>
<p><strong>Results: </strong>Fifty-four fresh excision specimens were used to build the ex vivo model of BCC, normal skin and fat, with 92% accuracy. While 3 surgeries were successfully navigated without breach of sterility, the intraoperative performance of the ex vivo model was low (< 50%). Hypotheses are: (1) the model was trained on heterogeneous mass spectra that did not originate from a single tissue type, (2) during surgery mixed tissue types were resected and thus presented to the model, and (3) the mass spectra were not validated by pathology.</p>
<p><strong>Conclusion: </strong>REIMS-navigated skin cancer surgery has the potential to detect and localize remaining tumor intraoperatively. Future work will be focused on improving our model by using a precise pencil cautery tip for burning localized tissue types, and having pathology-validated mass spectra.</p>
Lasso, Andras; Pinter, Csaba; Choueib, Saleh; Ungi, Tamas; Fichtinger, Gabor
Enhance medical software applications with immersive virtual reality experience Conference
Techna Symposium, Toronto, ON, Canada, 2019.
@conference{Lasso2019,
title = {Enhance medical software applications with immersive virtual reality experience},
author = {Andras Lasso and Csaba Pinter and Saleh Choueib and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Lasso2019.pdf},
year = {2019},
date = {2019-10-01},
urldate = {2019-10-01},
booktitle = {Techna Symposium},
address = {Toronto, ON, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Vaughan, Thomas; Brastianos, H; Ungi, Tamas; Lasso, Andras; Falkson, Conrad; Fichtinger, Gabor
Needle Navigation and Catheter Reconstruction for Breast Brachytherapy Using Open Source Software Journal Article
In: Acta Polytechnica Hungarica, vol. 16, no. 8, pp. 99-118, 2019.
@article{Vaughan2019a,
title = {Needle Navigation and Catheter Reconstruction for Breast Brachytherapy Using Open Source Software},
author = {Thomas Vaughan and H Brastianos and Tamas Ungi and Andras Lasso and Conrad Falkson and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Vaughan2019a_0.pdf},
year = {2019},
date = {2019-09-01},
urldate = {2019-09-01},
journal = {Acta Polytechnica Hungarica},
volume = {16},
number = {8},
pages = {99-118},
abstract = {<p>\emph{Abstract: Interstitial breast brachytherapy is a method to deliver radiation therapy directly to the site of cancer. It is a challenging procedure because of issues in localizing the seroma, needles, and catheters within the soft tissue. In this paper we present two open-source technologies based on electromagnetic tracking: a navigation system to help target needles using a tracked needle guide, and software for electromagnetic reconstruction of catheter paths. These technologies were validated phantom studies. We found that the navigation system helped a radiation oncologist to target needles more accurately than under ultrasound guidance (60 needles under each condition, 3.8 vs 3.3 mm placement error},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Pinter, Csaba; Lasso, Andras; Asselin, Mark; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Fichtinger, Gabor
SlicerVR for image-guided therapy planning in immersive virtual reality Conference
The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK, London, UK, 2019.
@conference{Pinter2019a,
title = {SlicerVR for image-guided therapy planning in immersive virtual reality},
author = {Csaba Pinter and Andras Lasso and Mark Asselin and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2019a_0.pdf},
year = {2019},
date = {2019-06-01},
urldate = {2019-06-01},
booktitle = {The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK},
pages = {91-92},
address = {London, UK},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C
AUGMENTED REALITY TRAINING PLATFORM FOR PLACEMENT OF NEUROSURGICAL BURR HOLES Masters Thesis
Queen’s University, Kingston, ON, 2019.
@mastersthesis{Baum2019c,
title = {AUGMENTED REALITY TRAINING PLATFORM FOR PLACEMENT OF NEUROSURGICAL BURR HOLES},
author = {Zachary M C Baum},
url = {http://hdl.handle.net/1974/26123
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2019c.pdf},
year = {2019},
date = {2019-04-01},
urldate = {2019-04-01},
volume = {Masters},
address = {Kingston, ON},
school = {Queen’s University},
abstract = {<p>Augmented reality has been used in neurosurgery to aid in the visualization of lesions, though it has not been widely adopted for simulation-based neurosurgical training. This work aims to determine if augmented reality can improve identification of drill location and drill angle for neurosurgical procedures and to define objective metrics for assessing trainee performance.<br />
An augmented reality visualization system was developed using the Microsoft HoloLens. Trainee performance metrics were defined and validated intra-operatively in fifteen neurosurgical cases by attending neurosurgeons and trainees. Trainee performance in localization of drill location and angle tasks was assessed in a simulated training with augmented reality visualization and compared with two other visualization methods.<br /> The proposed metrics allowed us to significantly differentiate levels of competence between attending neurosurgeons and trainees in identification of drill location with (p = 0.011) and without (p = 0.001) the HoloLens and drill angle with (p = 0.032) the HoloLens. Augmented reality visualization significantly improved trainee performance in localization of drill location (p < 0.001 and p = 0.008) and angle (p < 0.001 and p < 0.001) in comparison to two other visualization methods. Trainees rated augmented reality visualization equally or more helpful compared to the two other visualization methods.<br />
Trainee performance assessment with augmented reality visualization and the proposed performance metrics stands to add practical value to neurosurgical training curricula. This work represents a necessary step in curriculum development in neurosurgical training for the task of drill location and angle localization in a variety of neurosurgical procedures.</p>},
keywords = {},
pubstate = {published},
tppubtype = {mastersthesis}
}
An augmented reality visualization system was developed using the Microsoft HoloLens. Trainee performance metrics were defined and validated intra-operatively in fifteen neurosurgical cases by attending neurosurgeons and trainees. Trainee performance in localization of drill location and angle tasks was assessed in a simulated training with augmented reality visualization and compared with two other visualization methods.<br /> The proposed metrics allowed us to significantly differentiate levels of competence between attending neurosurgeons and trainees in identification of drill location with (p = 0.011) and without (p = 0.001) the HoloLens and drill angle with (p = 0.032) the HoloLens. Augmented reality visualization significantly improved trainee performance in localization of drill location (p < 0.001 and p = 0.008) and angle (p < 0.001 and p < 0.001) in comparison to two other visualization methods. Trainees rated augmented reality visualization equally or more helpful compared to the two other visualization methods.<br />
Trainee performance assessment with augmented reality visualization and the proposed performance metrics stands to add practical value to neurosurgical training curricula. This work represents a necessary step in curriculum development in neurosurgical training for the task of drill location and angle localization in a variety of neurosurgical procedures.</p>
Wiercigroch, Julia; Baum, Zachary M C; Ungi, Tamas; Fritz, Jan; Fichtinger, Gabor
Validation of a low-cost adjustable, handheld needle guide for spine interventions Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Wiercigroch2019,
title = {Validation of a low-cost adjustable, handheld needle guide for spine interventions},
author = {Julia Wiercigroch and Zachary M C Baum and Tamas Ungi and Jan Fritz and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wiercigroch2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lia, H.; Baum, Zachary M C; Vaughan, Thomas; Ungi, Tamas; McGregor, T.; Fichtinger, Gabor
Usability and accuracy of an electromagnetically tracked partial nephrectomy navigation system Conference
17th Annual Imaging Network of Ontario Symposium (ImNO), 2019.
@conference{Lia2019a,
title = {Usability and accuracy of an electromagnetically tracked partial nephrectomy navigation system},
author = {H. Lia and Zachary M C Baum and Thomas Vaughan and Tamas Ungi and T. McGregor and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Lia2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {17th Annual Imaging Network of Ontario Symposium (ImNO)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Church, Ben; Lasso, Andras; Ungi, Tamas; Schlenger, Christopher; Borschneck, Daniel P.; Mousavi, Parvin; Fichtinger, Gabor
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Baum2019a,
title = {Step-wise identification of ultrasound-visible anatomical landmarks for 3D visualization of scoliotic spine},
author = {Zachary M C Baum and Ben Church and Andras Lasso and Tamas Ungi and Christopher Schlenger and Daniel P. Borschneck and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2019a_0.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Ungi, Tamas; Lasso, Andras; Vaughan, Thomas; Asselin, Mark; Mousavi, Parvin; Yam, Scott; Fichtinger, Gabor
Mechanically-Controlled Spectroscopic Imaging for Tissue Classification Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Connolly2019a,
title = {Mechanically-Controlled Spectroscopic Imaging for Tissue Classification},
author = {Laura Connolly and Tamas Ungi and Andras Lasso and Thomas Vaughan and Mark Asselin and Parvin Mousavi and Scott Yam and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2019a_3.pdf},
doi = {https://doi.org/10.1117/12.2512481},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Choueib, Saleh; Pinter, Csaba; Lasso, Andras; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Fichtinger, Gabor
Evaluation of 3D Slicer as a medical virtual reality visualization platform Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, no. 38, SPIE Medical Imaging SPIE Medical Imaging, San Diego, California, 2019.
@conference{Choueib2019a,
title = {Evaluation of 3D Slicer as a medical virtual reality visualization platform},
author = {Saleh Choueib and Csaba Pinter and Andras Lasso and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Choueib2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
number = {38},
publisher = {SPIE Medical Imaging},
address = {San Diego, California},
organization = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, R.; Ungi, Tamas; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor
Comparison of convolutional neural networks for central venous catheterization tool detection Conference
Imaging Network of Ontario Symposium, Toronto, Ontario, 2019.
@conference{Hisey2019,
title = {Comparison of convolutional neural networks for central venous catheterization tool detection},
author = {R. Hisey and Tamas Ungi and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/RHisey_ImNO2019_0.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {Imaging Network of Ontario Symposium},
address = {Toronto, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}