Connolly, Laura; Jamzad, Amoon; Nikniazi, Arash; Poushimin, Rana; Lasso, Andras; Sunderland, Kyle R.; Ungi, Tamas; Nunzi, Jean Michel; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
An open-source testbed for developing image-guided robotic tumor-bed inspection Conference
Imaging Network of Ontario (ImNO) Symposium, 2022.
@conference{connolly2022b,
title = {An open-source testbed for developing image-guided robotic tumor-bed inspection},
author = {Laura Connolly and Amoon Jamzad and Arash Nikniazi and Rana Poushimin and Andras Lasso and Kyle R. Sunderland and Tamas Ungi and Jean Michel Nunzi and John Rudan and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/01/Connolly2022b.pdf},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Imaging Network of Ontario (ImNO) Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Degeut, Anton; Sunderland, Kyle R.; Lasso, Andras; Ungi, Tamas; Rudan, John; Taylor, Russell H.; Mousavi, Parvin; Fichtinger, Gabor
An open-source platform for cooperative semi-autonomous robotic surgery Conference
IEEE International Conference on Autonomous Systems, IEEE IEEE, Montreal, Quebec, 2021.
@conference{Connolly2021,
title = {An open-source platform for cooperative semi-autonomous robotic surgery},
author = {Laura Connolly and Anton Degeut and Kyle R. Sunderland and Andras Lasso and Tamas Ungi and John Rudan and Russell H. Taylor and Parvin Mousavi and Gabor Fichtinger},
doi = {https://doi.org/10.1109/ICAS49788.2021.9551149},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
booktitle = {IEEE International Conference on Autonomous Systems},
publisher = {IEEE},
address = {Montreal, Quebec},
organization = {IEEE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Sunderland, Kyle R.; Lasso, Andras; Degeut, Anton; Ungi, Tamas; Rudan, John; Taylor, Russell H.; Mousavi, Parvin; Fichtinger, Gabor
A platform for robot-assisted Intraoperative imaging in breast conserving surgery Conference
Imaging Network of Ontario Symposium, Imaging Network of Ontario Symposium, Online, 2021.
@conference{Connolly2021b,
title = {A platform for robot-assisted Intraoperative imaging in breast conserving surgery},
author = {Laura Connolly and Kyle R. Sunderland and Andras Lasso and Anton Degeut and Tamas Ungi and John Rudan and Russell H. Taylor and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2021a_1.pdf},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Imaging Network of Ontario Symposium},
publisher = {Imaging Network of Ontario Symposium},
address = {Online},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Gauvin, Gabrielle; Yeo, Caitlin T; Ungi, Tamas; Merchant, Shaila; Lasso, Andras; Jabs, Doris; Vaughan, Thomas; Rudan, John; Walker, Ross; Fichtinger, Gabor; Engel, C. Jay
Real-time electromagnetic navigation for breast-conserving surgery using NaviKnife technology: A matched case-control study Journal Article
In: The Breast Journal, vol. 26, no. 3, pp. 399-405, 2020.
@article{Gauvin2019,
title = {Real-time electromagnetic navigation for breast-conserving surgery using NaviKnife technology: A matched case-control study},
author = {Gabrielle Gauvin and Caitlin T Yeo and Tamas Ungi and Shaila Merchant and Andras Lasso and Doris Jabs and Thomas Vaughan and John Rudan and Ross Walker and Gabor Fichtinger and C. Jay Engel},
doi = {10.1111/tbj.13480},
year = {2020},
date = {2020-09-01},
urldate = {2020-09-01},
journal = {The Breast Journal},
volume = {26},
number = {3},
pages = {399-405},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Pinter, Csaba; Lasso, Andras; Choueib, Saleh; Asselin, Mark; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Jolley, MatthewA; Fichtinger, Gabor
SlicerVR for Medical Intervention Training and Planning in Immersive Virtual Reality Journal Article
In: IEEE Transactions on Medical Robotics and Bionics, vol. 2, no. 2, pp. 108-117, 2020.
@article{Pinter2020,
title = {SlicerVR for Medical Intervention Training and Planning in Immersive Virtual Reality},
author = {Csaba Pinter and Andras Lasso and Saleh Choueib and Mark Asselin and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and MatthewA Jolley and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Pinter2020a_0.pdf},
doi = {10.1109/TMRB.2020.2983199},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
journal = {IEEE Transactions on Medical Robotics and Bionics},
volume = {2},
number = {2},
pages = {108-117},
abstract = {<p>Virtual reality (VR) provides immersive visualization that has proved to be useful in a variety of medical applications. Currently, however, no free open-source software platform exists that would provide comprehensive support for translational clinical researchers in prototyping experimental VR scenarios in training, planning or guiding medical interventions. By integrating VR functions in 3D Slicer, an established medical image analysis and visualization platform, SlicerVR enables virtual reality experience by a single click. It provides functions to navigate and manipulate the virtual scene, as well as various settings to abate the feeling of motion sickness. SlicerVR allows for shared collaborative VR experience both locally and remotely. We present illustrative scenarios created with SlicerVR in a wide spectrum of applications, including echocardiography, neurosurgery, spine surgery, brachytherapy, intervention training and personalized patient education. SlicerVR is freely available under BSD type license as an extension to 3D Slicer and it has been downloaded over 7,800 times at the time of writing this article.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Choi, Perry S.; Nam, HannahH; Lasso, Andras; Herz, Christian; Drouin, Simon; Harrild, David M.; Quartermain, Michael; Fichtinger, Gabor; Mascio, Christopher E.; Emani, Sitaram; Jolley, MatthewA
3D Modeling of Surgically Implanted Stent-Based Valves in the Mitral Position in Children Journal Article
In: The Annals of Thoracic Surgery, 2020.
@article{Choi2020,
title = {3D Modeling of Surgically Implanted Stent-Based Valves in the Mitral Position in Children},
author = {Perry S. Choi and HannahH Nam and Andras Lasso and Christian Herz and Simon Drouin and David M. Harrild and Michael Quartermain and Gabor Fichtinger and Christopher E. Mascio and Sitaram Emani and MatthewA Jolley},
url = {https://doi.org/10.1016/j.athoracsur.2020.02.020
},
doi = {10.1016/j.athoracsur.2020.02.020},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {The Annals of Thoracic Surgery},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fedorov, Andriy; Beichel, Reinhard; Kalpathy-Cramer, Jayashree; Clunie, David; Onken, Michael; Riesmeier, Jörg; Herz, Christian; Bauer, Christian; Beers, Andrew; Fillion-Robin, Jean-ChristopheC.; Lasso, Andras; Pinter, Csaba; Pieper, Steve; Nolden, Marco; Maier-Hein, Klaus; Herrmann, Markus D.; Saltz, Joel; Prior, Fred; Fennessy, Fiona M.; Buatti, John; Kikinis, Ron
Quantitative Imaging Informatics for Cancer Research Journal Article
In: JCO Clinical Cancer Informatics, vol. 4, pp. 444-453., 2020.
@article{Fedorov2020,
title = {Quantitative Imaging Informatics for Cancer Research},
author = {Andriy Fedorov and Reinhard Beichel and Jayashree Kalpathy-Cramer and David Clunie and Michael Onken and Jörg Riesmeier and Christian Herz and Christian Bauer and Andrew Beers and Jean-ChristopheC. Fillion-Robin and Andras Lasso and Csaba Pinter and Steve Pieper and Marco Nolden and Klaus Maier-Hein and Markus D. Herrmann and Joel Saltz and Fred Prior and Fiona M. Fennessy and John Buatti and Ron Kikinis},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Fedorov2020.pdf},
doi = {https://doi.org/10. 1200/CCI.19.00165},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {JCO Clinical Cancer Informatics},
volume = {4},
pages = {444-453.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Colton; Lasso, Andras; Asselin, Mark; Pieper, Steve; Robertson, Faith C.; Gormley, William B.; Fichtinger, Gabor
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{BarrC2020,
title = {Towards portable image guidance and automatic patient registration using an RGB-D camera and video projector},
author = {Colton Barr and Andras Lasso and Mark Asselin and Steve Pieper and Faith C. Robertson and William B. Gormley and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Barr2020.pdf},
doi = {10.1117/12.2549723},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lasso, Andras; Pinter, Csaba; Choueib, Saleh; Ungi, Tamas; Fichtinger, Gabor
Enhance medical software applications with immersive virtual reality experience Conference
Techna Symposium, Toronto, ON, Canada, 2019.
@conference{Lasso2019,
title = {Enhance medical software applications with immersive virtual reality experience},
author = {Andras Lasso and Csaba Pinter and Saleh Choueib and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Lasso2019.pdf},
year = {2019},
date = {2019-10-01},
urldate = {2019-10-01},
booktitle = {Techna Symposium},
address = {Toronto, ON, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Vaughan, Thomas; Brastianos, H; Ungi, Tamas; Lasso, Andras; Falkson, Conrad; Fichtinger, Gabor
Needle Navigation and Catheter Reconstruction for Breast Brachytherapy Using Open Source Software Journal Article
In: Acta Polytechnica Hungarica, vol. 16, no. 8, pp. 99-118, 2019.
@article{Vaughan2019a,
title = {Needle Navigation and Catheter Reconstruction for Breast Brachytherapy Using Open Source Software},
author = {Thomas Vaughan and H Brastianos and Tamas Ungi and Andras Lasso and Conrad Falkson and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Vaughan2019a_0.pdf},
year = {2019},
date = {2019-09-01},
urldate = {2019-09-01},
journal = {Acta Polytechnica Hungarica},
volume = {16},
number = {8},
pages = {99-118},
abstract = {<p>\emph{Abstract: Interstitial breast brachytherapy is a method to deliver radiation therapy directly to the site of cancer. It is a challenging procedure because of issues in localizing the seroma, needles, and catheters within the soft tissue. In this paper we present two open-source technologies based on electromagnetic tracking: a navigation system to help target needles using a tracked needle guide, and software for electromagnetic reconstruction of catheter paths. These technologies were validated phantom studies. We found that the navigation system helped a radiation oncologist to target needles more accurately than under ultrasound guidance (60 needles under each condition, 3.8 vs 3.3 mm placement error},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Pinter, Csaba; Lasso, Andras; Asselin, Mark; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Fichtinger, Gabor
SlicerVR for image-guided therapy planning in immersive virtual reality Conference
The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK, London, UK, 2019.
@conference{Pinter2019a,
title = {SlicerVR for image-guided therapy planning in immersive virtual reality},
author = {Csaba Pinter and Andras Lasso and Mark Asselin and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2019a_0.pdf},
year = {2019},
date = {2019-06-01},
urldate = {2019-06-01},
booktitle = {The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK},
pages = {91-92},
address = {London, UK},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Holden, M.; Tan, Pearl; Hookey, Lawrence; Fichtinger, Gabor
Analyzing the curvature of the colon in different patient positions Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Laframboise2019a,
title = {Analyzing the curvature of the colon in different patient positions},
author = {Jacob Laframboise and Tamas Ungi and Andras Lasso and Mark Asselin and M. Holden and Pearl Tan and Lawrence Hookey and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Laframboise2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Choueib, Saleh; Pinter, Csaba; Lasso, Andras; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Fichtinger, Gabor
Assessment of immersive medical virtual reality visualization using 3D Slicer Conference
17th Annual Imaging Network Ontario Symposium (ImNO), London, Ontario, 2019.
@conference{Choueib2019b,
title = {Assessment of immersive medical virtual reality visualization using 3D Slicer},
author = {Saleh Choueib and Csaba Pinter and Andras Lasso and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Choueib2019b.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {17th Annual Imaging Network Ontario Symposium (ImNO)},
address = {London, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Choueib, Saleh; Pinter, Csaba; Lasso, Andras; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Fichtinger, Gabor
Evaluation of 3D Slicer as a medical virtual reality visualization platform Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, no. 38, SPIE Medical Imaging SPIE Medical Imaging, San Diego, California, 2019.
@conference{Choueib2019a,
title = {Evaluation of 3D Slicer as a medical virtual reality visualization platform},
author = {Saleh Choueib and Csaba Pinter and Andras Lasso and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Choueib2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
number = {38},
publisher = {SPIE Medical Imaging},
address = {San Diego, California},
organization = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Ungi, Tamas; Lasso, Andras; Vaughan, Thomas; Asselin, Mark; Mousavi, Parvin; Yam, Scott; Fichtinger, Gabor
Mechanically-Controlled Spectroscopic Imaging for Tissue Classification Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Connolly2019a,
title = {Mechanically-Controlled Spectroscopic Imaging for Tissue Classification},
author = {Laura Connolly and Tamas Ungi and Andras Lasso and Thomas Vaughan and Mark Asselin and Parvin Mousavi and Scott Yam and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2019a_3.pdf},
doi = {https://doi.org/10.1117/12.2512481},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Church, Ben; Lasso, Andras; Ungi, Tamas; Schlenger, Christopher; Borschneck, Daniel P.; Mousavi, Parvin; Fichtinger, Gabor
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Baum2019a,
title = {Step-wise identification of ultrasound-visible anatomical landmarks for 3D visualization of scoliotic spine},
author = {Zachary M C Baum and Ben Church and Andras Lasso and Tamas Ungi and Christopher Schlenger and Daniel P. Borschneck and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2019a_0.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Pinter, Csaba; Lasso, Andras; Fichtinger, Gabor
Polymorph Segmentation Representation for Medical Image Computing Journal Article
In: Computer Methods and Programs in Biomedicine, vol. 171, pp. 19-26, 2019.
@article{Pinter2019,
title = {Polymorph Segmentation Representation for Medical Image Computing},
author = {Csaba Pinter and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2019_Manuscript.pdf},
doi = {https://doi.org/10.1016/j.cmpb.2019.02.011},
year = {2019},
date = {2019-02-01},
urldate = {2019-02-01},
journal = {Computer Methods and Programs in Biomedicine},
volume = {171},
pages = {19-26},
abstract = {<p><strong>Background and Objective: </strong>Segmentation is a ubiquitous operation in medical image computing. Various data representations can describe segmentation results, such as labelmap volumes or surface models. Conversions between them are often required, which typically include complex data processing steps. We identified four challenges related to managing multiple representations: <a name="OLE_LINK3"></a><a name="OLE_LINK2">conversion </a>method selection, data provenance, data consistency, and coherence of in-memory objects. <strong>Methods:</strong> A complex data container preserves identity and provenance of the contained representations and ensures data coherence. Conversions are executed automatically on-demand. A graph containing the implemented conversion algorithms determines each execution, ensuring consistency between various representations. The design and implementation of a software library are proposed, in order to provide a readily usable software tool to manage segmentation data in multiple data representations. A low-level core library called PolySeg implemented in The Visualization Toolkit (VTK) manages the data objects and conversions. It is used by a high-level application layer, which has been implemented in the medical image visualization and analysis platform 3D Slicer. The application layer provides advanced visualization, transformation, interoperability, and other functions. <strong>Results: </strong>The core conversion algorithms comprising the graph were validated. Several applications were implemented based on the library, demonstrating advantages in terms of usability and ease of software development in each case. The Segment Editor application provides fast, comprehensive, and easy-to-use manual and semi-automatic segmentation workflows. Clinical applications for gel dosimetry, external beam planning, and MRI-ultrasound image fusion in brachytherapy were rapidly prototyped resulting robust applications that are already in use in clinical research. The conversion algorithms were found to be accurate and reliable using these applications. <strong>Conclusions:</strong> A generic software library has been designed and developed for automatic management of multiple data formats in segmentation tasks. It enhances both user and developer experience, enabling fast and convenient manual workflows and quicker and more robust software prototyping. The software’s BSD-style open-source license allows complete freedom of use of the library.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Nguyen, Alexander V.; Lasso, Andras; Nam, HannahH; Faerber, Jennifer; Aly, Ahmed H.; Pouch, Alison M.; Scanlan, Adam B.; McGowan, FrancisX; Mercer-Rosa, Laura; Cohen, Meryl S.; Simpson, John; Fichtinger, Gabor; Jolley, MatthewA
Dynamic Three-Dimensional Geometry of the Tricuspid Valve Annulus in Hypoplastic Left Heart Syndrome with a Fontan Circulation Journal Article
In: Journal of the American Society of Echocardiography, 2019, ISSN: 0894-7317.
@article{NGUYEN2019,
title = {Dynamic Three-Dimensional Geometry of the Tricuspid Valve Annulus in Hypoplastic Left Heart Syndrome with a Fontan Circulation},
author = {Alexander V. Nguyen and Andras Lasso and HannahH Nam and Jennifer Faerber and Ahmed H. Aly and Alison M. Pouch and Adam B. Scanlan and FrancisX McGowan and Laura Mercer-Rosa and Meryl S. Cohen and John Simpson and Gabor Fichtinger and MatthewA Jolley},
url = {http://www.sciencedirect.com/science/article/pii/S0894731719300021
},
doi = {https://doi.org/10.1016/j.echo.2019.01.002},
issn = {0894-7317},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
journal = {Journal of the American Society of Echocardiography},
abstract = {<p>Background Tricuspid regurgitation (TR) is a significant contributor to morbidity and mortality in patients with hypoplastic left heart syndrome. The goal of this study was to characterize the dynamic annular motion of the tricuspid valve in patients with HLHS with a Fontan circulation and assess the relation to tricuspid valve function. Methods Tricuspid annuli of 48 patients with HLHS with a Fontan circulation were modeled at end-diastole, mid-systole, end-systole, and mid-diastole using transthoracic three-dimensional echocardiography and custom code in 3D Slicer. The angle of the anterior papillary muscle (APM) relative to the annular plane in each systolic phase was also measured. Results Imaging was performed 5.0 years (interquartile range, 2–11 years) after Fontan operation. The tricuspid annulus varies in shape significantly throughout the cardiac cycle, changing in sphericity (P < .001) but not in annular height or bending angle. In univariate modeling, patients with significant TR had larger changes in septolateral diameter, lateral quadrant area, and posterior quadrant area (P < .05 for all) as well as lower (more laterally directed) APM angles (P < .001) than patients with mild or less TR. In multivariate modeling, a 1 mm/(body surface area)0.5 increase in the maximum change in septolateral diameter was associated with a 1.7-fold increase in having moderate or greater TR, while a 10° decrease in APM angle at mid-systole was associated with an almost 2.5-fold increase in moderate or greater TR (P <= .01 for all). Conclusions The tricuspid annulus in patients with HLHS with a Fontan circulation changes in shape significantly throughout the cardiac cycle but remains relatively planar. Increased change in septolateral diameter and decreased APM angle are strongly associated with the presence of TR. These findings may inform annuloplasty methods and subvalvular interventions in these complex patients.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Baum, Zachary M C; Lasso, Andras; Ryan, Sarah; Ungi, Tamas; Rae, Emily; Zevin, Boris; Levy, Ron; Fichtinger, Gabor
Augmented reality training platform for neurosurgical burr hole localization Journal Article
In: Journal of Medical Robotics Research, vol. 4, no. 3-4, pp. 1942001-1 - 1942001-13, 2019.
@article{Baum2020,
title = {Augmented reality training platform for neurosurgical burr hole localization},
author = {Zachary M C Baum and Andras Lasso and Sarah Ryan and Tamas Ungi and Emily Rae and Boris Zevin and Ron Levy and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2020a.pdf},
doi = {https://doi.org/10.1142/S2424905X19420017},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
journal = {Journal of Medical Robotics Research},
volume = {4},
number = {3-4},
pages = {1942001-1 - 1942001-13},
abstract = {<p>Augmented reality (AR) is used in neurosurgery to visualize lesions and plan procedures pre-operatively and intra-operatively, though its use has not been widely adopted in simulation-based neurosurgical training for the same tasks. This work defines metrics to determine performance in drill position and angle identification for neurosurgical training. The metrics were validated intra-operatively and in a simulated training environment, demonstrating that trainees identify drill position and angle faster and more accurately with AR compared to standard techniques. Training using AR and the proposed metrics stands to add value to neurosurgical curricula development.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Perrin, Sydney; Baum, Zachary M C; Asselin, Mark; Underwood, Grace; Choueib, Saleh; Lia, H.; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
Reproducibility of freehand calibrations for ultrasound-guided needle navigation Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Perrin2019a,
title = {Reproducibility of freehand calibrations for ultrasound-guided needle navigation},
author = {Sydney Perrin and Zachary M C Baum and Mark Asselin and Grace Underwood and Saleh Choueib and H. Lia and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Perrin2019a.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Asselin, Mark; Kaufmann, Martin; Wiercigroch, Julia; Ungi, Tamas; Lasso, Andras; Rudan, John; Fichtinger, Gabor
Navigated real-time molecular analysis in the operating theatre, demonstration of concept Conference
SPIE Medical Imaging 2019, 2019.
@conference{Asselin2019a,
title = {Navigated real-time molecular analysis in the operating theatre, demonstration of concept},
author = {Mark Asselin and Martin Kaufmann and Julia Wiercigroch and Tamas Ungi and Andras Lasso and John Rudan and Gabor Fichtinger},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019},
abstract = {<p><strong>PURPOSE</strong>: In the operating theatre surgeons are accustomed to using spatially navigated tools in conjunction with<br />
standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but<br />
doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated<br />
system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the<br />
dissected tissue using mass spectrometry.<br />
<strong>METHODS</strong>: Using the 3D Slicer software package, we have integrated a commercially available neurosurgical<br />
navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife)<br />
that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated<br />
patient comprised of an MRI scan from a brain cancer patient deformably registered to a plastic skull model. On the<br />
skull model we placed porcine and bovine tissues to simulate cancerous and healthy tissue, respectively. We built a<br />
PCA/LDA model to distinguish between these tissue types. The tissue classifications were displayed in a spatially<br />
localized manner in the pre-operative imaging, in both 2D and 3D views.<br />
<strong>RESULTS</strong>: We have demonstrated the feasibility of performing spatially navigated intra-operative analysis of tissues by<br />
mass spectrometry. We show that machine learning can classify our sample tissues, with an average computed<br />
confidence of 99.37 % for porcine tissue and 99.36% for bovine tissue.<br />
<strong>CONCLUSION</strong>: In this paper we demonstrate a proof of concept system for navigated intra-operative molecular<br />
analysis. This system may enable intra-operative awareness of spatially localized tissue classification during dissection,<br />
information that is especially useful in tumor surgeries where margins may not be visible to the unassisted eye.<br />
<strong>Keywords</strong>: image guided therapy, intra-operative mass spectrometry, iKnife, 3D Slicer, open-source, rapid evaporative<br />
ionization mass spectrometry (REIMS)</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but<br />
doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated<br />
system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the<br />
dissected tissue using mass spectrometry.<br />
<strong>METHODS</strong>: Using the 3D Slicer software package, we have integrated a commercially available neurosurgical<br />
navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife)<br />
that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated<br />
patient comprised of an MRI scan from a brain cancer patient deformably registered to a plastic skull model. On the<br />
skull model we placed porcine and bovine tissues to simulate cancerous and healthy tissue, respectively. We built a<br />
PCA/LDA model to distinguish between these tissue types. The tissue classifications were displayed in a spatially<br />
localized manner in the pre-operative imaging, in both 2D and 3D views.<br />
<strong>RESULTS</strong>: We have demonstrated the feasibility of performing spatially navigated intra-operative analysis of tissues by<br />
mass spectrometry. We show that machine learning can classify our sample tissues, with an average computed<br />
confidence of 99.37 % for porcine tissue and 99.36% for bovine tissue.<br />
<strong>CONCLUSION</strong>: In this paper we demonstrate a proof of concept system for navigated intra-operative molecular<br />
analysis. This system may enable intra-operative awareness of spatially localized tissue classification during dissection,<br />
information that is especially useful in tumor surgeries where margins may not be visible to the unassisted eye.<br />
<strong>Keywords</strong>: image guided therapy, intra-operative mass spectrometry, iKnife, 3D Slicer, open-source, rapid evaporative<br />
ionization mass spectrometry (REIMS)</p>
Lund, Shaun; Vaughan, Thomas; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Yeo, Caitlin; Engel, C. Jay; Fichtinger, Gabor
Controlling virtual views in navigated breast conserving surgery Conference
SPIE Medical Imaging 2019, 2019.
@conference{Lund2019a,
title = {Controlling virtual views in navigated breast conserving surgery},
author = {Shaun Lund and Thomas Vaughan and Tamas Ungi and Andras Lasso and Mark Asselin and Caitlin Yeo and C. Jay Engel and Gabor Fichtinger},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019},
abstract = {<p><strong>PURPOSE</strong>: Lumpectomy is the resection of a tumor in the breast while retaining as much healthy tissue as possible.<br />
Navigated lumpectomy seeks to improve on the traditional technique by employing computer guidance to achieve the<br />
complete excision of the cancer with optimal retention of healthy tissue. Setting up navigation in the OR relies on the<br />
manual interactions of a trained technician to align three-dimensional virtual views to the patient’s physical position<br />
and maintain their alignment throughout surgery. This work develops automatic alignment tools to improve the<br />
operability of navigation software for lumpectomies.<br />
<strong>METHODS</strong>: Preset view buttons were developed to refine view setup to a single interaction. These buttons were<br />
tested by measuring the reduction in setup time and the number of manual interactions avoided through their use. An<br />
auto-center feature was created to ensure that three-dimensional models of anatomy and instruments were in the center<br />
of view throughout surgery. Recorded data from 32 lumpectomy cases were replayed and the number of auto-center<br />
view shifts was counted from the first cautery incision until the completion of the excision of cancerous tissue.<br />
<strong>RESULTS</strong>: View setup can now be performed in a single interaction compared to an average of 13 interactions<br />
(taking 83 seconds) when performed manually. The auto-center feature was activated an average of 33 times in the<br /> cases studied (n=32).<br />
<strong>CONCLUSION</strong>: The auto-center feature enhances the operability of the surgical navigation system, reducing the<br />
number of manual interactions required by a technician during the surgery. This feature along with preset camera view<br />
options are instrumental in the shift towards a completely surgeon-operable navigated lumpectomy system.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Navigated lumpectomy seeks to improve on the traditional technique by employing computer guidance to achieve the<br />
complete excision of the cancer with optimal retention of healthy tissue. Setting up navigation in the OR relies on the<br />
manual interactions of a trained technician to align three-dimensional virtual views to the patient’s physical position<br />
and maintain their alignment throughout surgery. This work develops automatic alignment tools to improve the<br />
operability of navigation software for lumpectomies.<br />
<strong>METHODS</strong>: Preset view buttons were developed to refine view setup to a single interaction. These buttons were<br />
tested by measuring the reduction in setup time and the number of manual interactions avoided through their use. An<br />
auto-center feature was created to ensure that three-dimensional models of anatomy and instruments were in the center<br />
of view throughout surgery. Recorded data from 32 lumpectomy cases were replayed and the number of auto-center<br />
view shifts was counted from the first cautery incision until the completion of the excision of cancerous tissue.<br />
<strong>RESULTS</strong>: View setup can now be performed in a single interaction compared to an average of 13 interactions<br />
(taking 83 seconds) when performed manually. The auto-center feature was activated an average of 33 times in the<br /> cases studied (n=32).<br />
<strong>CONCLUSION</strong>: The auto-center feature enhances the operability of the surgical navigation system, reducing the<br />
number of manual interactions required by a technician during the surgery. This feature along with preset camera view<br />
options are instrumental in the shift towards a completely surgeon-operable navigated lumpectomy system.</p>
Baum, Zachary M C; Ryan, Sarah; Rae, Emily; Lasso, Andras; Ungi, Tamas; Levy, Ron; Fichtinger, Gabor
Assessment of intraoperative neurosurgical planning with the Microsoft HoloLens Conference
17th Annual Imaging Network Ontario Symposium (ImNO), Imaging Network Ontario (ImNO), London, Ontario, 2019.
@conference{Baum2019b,
title = {Assessment of intraoperative neurosurgical planning with the Microsoft HoloLens},
author = {Zachary M C Baum and Sarah Ryan and Emily Rae and Andras Lasso and Tamas Ungi and Ron Levy and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2019b.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {17th Annual Imaging Network Ontario Symposium (ImNO)},
publisher = {Imaging Network Ontario (ImNO)},
address = {London, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
House, Rachael; Kunz, M.; Valiquette, Chantal; Lasso, Andras; Ungi, Tamas; Rudan, John; Martou, Glykeria; Fichtinger, Gabor
Canadian Society of Plastic Surgeons (CSPS) Annual Meeting, St. Johns, Newfoundland, 2019.
@conference{House2019a,
title = {Monitoring volume changes for breast reconstruction surgery using three dimensional optical surface scanning},
author = {Rachael House and M. Kunz and Chantal Valiquette and Andras Lasso and Tamas Ungi and John Rudan and Glykeria Martou and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/House2019a-manuscript.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {Canadian Society of Plastic Surgeons (CSPS) Annual Meeting},
address = {St. Johns, Newfoundland},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Holden, M.; Tan, Pearl; Hookey, Lawrence; Fichtinger, Gabor
Quantifying the effect of patient position on the curvature of colons Conference
17th Annual Imaging Network Ontario Symposium (ImNO), Imaging Network Ontario (ImNO), London, Ontario, 2019.
@conference{Laframboise2019b,
title = {Quantifying the effect of patient position on the curvature of colons},
author = {Jacob Laframboise and Tamas Ungi and Andras Lasso and Mark Asselin and M. Holden and Pearl Tan and Lawrence Hookey and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Laframboise2019b.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {17th Annual Imaging Network Ontario Symposium (ImNO)},
publisher = {Imaging Network Ontario (ImNO)},
address = {London, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Jolley, MatthewA; Lasso, Andras; Nam, HannahH; Dinh, Patrick V.; Scanlan, Adam B.; Nguyen, Alexander V.; Ilina, Anna; Morray, Brian; Glatz, Andrew C.; McGowan, FrancisX; Whitehead, Kevin; Dori, Yoav; Gorman, Robert C.; Gorman, Robert C.; Fichtinger, Gabor; Gillespie, Matthew J.
Toward predictive modeling of catheter-based pulmonary valve replacement into native right ventricular outflow tracts Journal Article
In: Catheterization and Cardiovascular Interventions, 2018.
@article{doi:10.1002/ccd.27962,
title = {Toward predictive modeling of catheter-based pulmonary valve replacement into native right ventricular outflow tracts},
author = {MatthewA Jolley and Andras Lasso and HannahH Nam and Patrick V. Dinh and Adam B. Scanlan and Alexander V. Nguyen and Anna Ilina and Brian Morray and Andrew C. Glatz and FrancisX McGowan and Kevin Whitehead and Yoav Dori and Robert C. Gorman and Robert C. Gorman and Gabor Fichtinger and Matthew J. Gillespie},
url = {https://onlinelibrary.wiley.com/doi/abs/10.1002/ccd.27962},
doi = {10.1002/ccd.27962},
year = {2018},
date = {2018-11-01},
urldate = {2018-11-01},
journal = {Catheterization and Cardiovascular Interventions},
abstract = {<p>Abstract Background Pulmonary insufficiency is a consequence of transannular patch repair in Tetralogy of Fallot (ToF) leading to late morbidity and mortality. Transcatheter native outflow tract pulmonary valve replacement has become a reality. However, predicting a secure, atraumatic implantation of a catheter-based device remains a significant challenge due to the complex and dynamic nature of the right ventricular outflow tract (RVOT). We sought to quantify the differences in compression and volume for actual implants, and those predicted by pre-implant modeling. Methods We used custom software to interactively place virtual transcatheter pulmonary valves (TPVs) into RVOT models created from pre-implant and post Harmony valve implant CT scans of 5 ovine surgical models of TOF to quantify and visualize device volume and compression. Results Virtual device placement visually mimicked actual device placement and allowed for quantification of device volume and radius. On average, simulated proximal and distal device volumes and compression did not vary statistically throughout the cardiac cycle (P = 0.11) but assessment was limited by small sample size. In comparison to actual implants, there was no significant pairwise difference in the proximal third of the device (P > 0.80), but the simulated distal device volume was significantly underestimated relative to actual device implant volume (P = 0.06). Conclusions This study demonstrates that pre-implant modeling which assumes a rigid vessel wall may not accurately predict the degree of distal RVOT expansion following actual device placement. We suggest the potential for virtual modeling of TPVR to be a useful adjunct to procedural planning, but further development is needed.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yeo, Caitlin T; MacDonald, Andrew; Ungi, Tamas; Lasso, Andras; Jalink, Diederick; Zevin, Boris; Fichtinger, Gabor; Nanji, Sulaiman
In: Journal of Surgical Education, vol. 75, no. 3, pp. 792-797, 2018.
@article{Yeo2017b,
title = {Utility of 3D Reconstruction of 2D Liver Computed Tomography/Magnetic Resonance Images as a Surgical Planning Tool for Residents in Liver Resection Surgery},
author = {Caitlin T Yeo and Andrew MacDonald and Tamas Ungi and Andras Lasso and Diederick Jalink and Boris Zevin and Gabor Fichtinger and Sulaiman Nanji},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Yeo2017.pdf},
doi = {http://dx.doi.org/10.1016/j.jsurg.2017.07.031},
year = {2018},
date = {2018-08-01},
urldate = {2018-08-01},
journal = {Journal of Surgical Education},
volume = {75},
number = {3},
pages = {792-797},
abstract = {<div class="content" style="line-height: 1.5em; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 12px;">
<h3>Objective</h3> <div class="content" style="line-height: 1.5em;">
<p>A fundamental aspect of surgical planning in liver resections is the identification of key vessel tributaries to preserve healthy liver tissue while fully resecting the tumor(s). Current surgical planning relies primarily on the surgeon’s ability to mentally reconstruct 2D computed tomography/magnetic resonance (CT/MR) images into 3D and plan resection margins. This creates significant cognitive load, especially for trainees, as it relies on image interpretation, anatomical and surgical knowledge, experience, and spatial sense. The purpose of this study is to determine if 3D reconstruction of preoperative CT/MR images will assist resident-level trainees in making appropriate operative plans for liver resection surgery.</p>
</div>
</div> <div class="content" style="line-height: 1.5em; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 12px;">
<h3>Design</h3> <div class="content" style="line-height: 1.5em;">
<p>Ten preoperative patient CT/MR images were selected. Images were case-matched, 5 to 2D planning and 5 to 3D planning. Images from the 3D group were segmented to create interactive digital models that the resident can manipulate to view the tumor(s) in relation to landmark hepatic structures. Residents were asked to evaluate the images and devise a surgical resection plan for each image. The resident alternated between 2D and 3D planning, in a randomly generated order. The primary outcome was the accuracy of resident’s plan compared to expert opinion. Time to devise each surgical plan was the secondary outcome. Residents completed a prestudy and poststudy questionnaire regarding their experience with liver surgery and the 3D planning software.</p>
</div>
</div> <div class="content" style="line-height: 1.5em; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 12px;">
<h3>Setting and Participants</h3> <div class="content" style="line-height: 1.5em;">
<p>Senior level surgical residents from the Queen’s University General Surgery residency program were recruited to participate.</p>
</div>
</div> <div class="content" style="line-height: 1.5em; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 12px;">
<h3>Results</h3> <div class="content" style="line-height: 1.5em;">
<p>A total of 14 residents participated in the study. The median correct response rate was 2 of 5 (40%; range: 0-4) for the 2D group, and 3 of 5 (60%; range: 1-5) for the 3D group (p < 0.01). The average time to complete each plan was 156 ± 107 seconds for the 2D group, and 84 ± 73 seconds for the 3D group (p < 0.01). A total 13 of 14 residents found the 3D model easier to use than the 2D. Most residents noticed a difference between the 2 modalities and found that the 3D model improved their confidence with the surgical plan proposed.</p>
</div>
</div> <div class="content" style="line-height: 1.5em; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 12px;">
<h3>Conclusions</h3> <div class="content" style="line-height: 1.5em;">
<p>The results of this study show that 3D reconstruction for liver surgery planning increases accuracy of resident surgical planning and decreases amount of time required. 3D reconstruction would be a useful model for improving trainee understanding of liver anatomy and surgical resection, and would serve as an adjunct to current 2D planning methods. This has the potential to be developed into a module for teaching liver surgery in a competency-based medical curriculum.</p>
</div>
</div>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
<h3>Objective</h3> <div class="content" style="line-height: 1.5em;">
<p>A fundamental aspect of surgical planning in liver resections is the identification of key vessel tributaries to preserve healthy liver tissue while fully resecting the tumor(s). Current surgical planning relies primarily on the surgeon’s ability to mentally reconstruct 2D computed tomography/magnetic resonance (CT/MR) images into 3D and plan resection margins. This creates significant cognitive load, especially for trainees, as it relies on image interpretation, anatomical and surgical knowledge, experience, and spatial sense. The purpose of this study is to determine if 3D reconstruction of preoperative CT/MR images will assist resident-level trainees in making appropriate operative plans for liver resection surgery.</p>
</div>
</div> <div class="content" style="line-height: 1.5em; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 12px;">
<h3>Design</h3> <div class="content" style="line-height: 1.5em;">
<p>Ten preoperative patient CT/MR images were selected. Images were case-matched, 5 to 2D planning and 5 to 3D planning. Images from the 3D group were segmented to create interactive digital models that the resident can manipulate to view the tumor(s) in relation to landmark hepatic structures. Residents were asked to evaluate the images and devise a surgical resection plan for each image. The resident alternated between 2D and 3D planning, in a randomly generated order. The primary outcome was the accuracy of resident’s plan compared to expert opinion. Time to devise each surgical plan was the secondary outcome. Residents completed a prestudy and poststudy questionnaire regarding their experience with liver surgery and the 3D planning software.</p>
</div>
</div> <div class="content" style="line-height: 1.5em; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 12px;">
<h3>Setting and Participants</h3> <div class="content" style="line-height: 1.5em;">
<p>Senior level surgical residents from the Queen’s University General Surgery residency program were recruited to participate.</p>
</div>
</div> <div class="content" style="line-height: 1.5em; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 12px;">
<h3>Results</h3> <div class="content" style="line-height: 1.5em;">
<p>A total of 14 residents participated in the study. The median correct response rate was 2 of 5 (40%; range: 0-4) for the 2D group, and 3 of 5 (60%; range: 1-5) for the 3D group (p < 0.01). The average time to complete each plan was 156 ± 107 seconds for the 2D group, and 84 ± 73 seconds for the 3D group (p < 0.01). A total 13 of 14 residents found the 3D model easier to use than the 2D. Most residents noticed a difference between the 2 modalities and found that the 3D model improved their confidence with the surgical plan proposed.</p>
</div>
</div> <div class="content" style="line-height: 1.5em; font-family: "Helvetica Neue", Helvetica, Arial, sans-serif; font-size: 12px;">
<h3>Conclusions</h3> <div class="content" style="line-height: 1.5em;">
<p>The results of this study show that 3D reconstruction for liver surgery planning increases accuracy of resident surgical planning and decreases amount of time required. 3D reconstruction would be a useful model for improving trainee understanding of liver anatomy and surgical resection, and would serve as an adjunct to current 2D planning methods. This has the potential to be developed into a module for teaching liver surgery in a competency-based medical curriculum.</p>
</div>
</div>
García-Mato, D.; Holden, M.; Lasso, Andras; Szulewski, A.; Pascau, Javier; Fichtinger, Gabor
3D Gaze tracking for skill assessment in ultrasound-guided needle insertions Conference
Computer Assisted Radiology and Surgery, 32nd International Congress and Exhibition, vol. 13, Springer, Berlin, Germany, 2018.
@conference{GarciaMato2018a,
title = {3D Gaze tracking for skill assessment in ultrasound-guided needle insertions},
author = {D. García-Mato and M. Holden and Andras Lasso and A. Szulewski and Javier Pascau and Gabor Fichtinger},
doi = {https://doi.org/10.1007/s11548-018-1766-y},
year = {2018},
date = {2018-06-01},
urldate = {2018-06-01},
booktitle = {Computer Assisted Radiology and Surgery, 32nd International Congress and Exhibition},
volume = {13},
pages = {S52-S53},
publisher = {Springer},
address = {Berlin, Germany},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lia, H.; Paulin, Gregory; Yeo, Caitlin T; Andrews, Jessica; Yi, Nelson; Haq, Hassan; Emmanuel, Steve; Ludig, Kristian; Keri, Zsuzsanna; Lasso, Andras; Fichtinger, Gabor
HoloLens in Suturing Training Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, Houston, Texas, 2018.
@conference{Lia2018a,
title = {HoloLens in Suturing Training},
author = {H. Lia and Gregory Paulin and Caitlin T Yeo and Jessica Andrews and Nelson Yi and Hassan Haq and Steve Emmanuel and Kristian Ludig and Zsuzsanna Keri and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Lia2018a.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
address = {Houston, Texas},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Rae, Emily; Lasso, Andras; Holden, M.; Morin, Evelyn; Levy, Ron; Fichtinger, Gabor
Neurosurgical burr hole placement using the Microsoft HoloLens Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10576, Houston, Texas, 2018.
@conference{Emily2018a,
title = {Neurosurgical burr hole placement using the Microsoft HoloLens},
author = {Emily Rae and Andras Lasso and M. Holden and Evelyn Morin and Ron Levy and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Rae2018a.pdf},
doi = {10.1117/12.2293680},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10576},
pages = {105760T},
address = {Houston, Texas},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Pinter, Csaba; Travers, Bryan; Baum, Zachary M C; Kamali, Shahrokh; Ungi, Tamas; Lasso, Andras; Church, Ben; Fichtinger, Gabor
Real-time transverse process detection in ultrasound Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, Houston, Texas, 2018.
@conference{Pinter2018,
title = {Real-time transverse process detection in ultrasound},
author = {Csaba Pinter and Bryan Travers and Zachary M C Baum and Shahrokh Kamali and Tamas Ungi and Andras Lasso and Ben Church and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2018.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
address = {Houston, Texas},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Asselin, Mark; Lasso, Andras; Ungi, Tamas; Fichtinger, Gabor
Towards webcam-based tracking for interventional navigation Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, Houston, Texas, 2018.
@conference{Asselin2018a,
title = {Towards webcam-based tracking for interventional navigation},
author = {Mark Asselin and Andras Lasso and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Asselin2018a.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
address = {Houston, Texas},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Church, Ben; Schlenger, Christopher; Fichtinger, Gabor
Visual aid for identifying vertebral landmarks in ultrasound Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10576, Houston, Texas, 2018.
@conference{Baum2018a,
title = {Visual aid for identifying vertebral landmarks in ultrasound},
author = {Zachary M C Baum and Tamas Ungi and Andras Lasso and Ben Church and Christopher Schlenger and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2018a.pdf},
doi = {10.1117/12.2292820},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10576},
pages = {105760Z},
address = {Houston, Texas},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Pinter, Csaba; Travers, Bryan; Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Church, Ben; Fichtinger, Gabor
Real-time transverse process delineation in tracked ultrasound for scoliosis measurement Conference
Imaging Network Ontario Symposium (ImNO 2018), Toronto, Canada, 2018.
@conference{Pinter2018a,
title = {Real-time transverse process delineation in tracked ultrasound for scoliosis measurement},
author = {Csaba Pinter and Bryan Travers and Zachary M C Baum and Tamas Ungi and Andras Lasso and Ben Church and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2018a.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {Imaging Network Ontario Symposium (ImNO 2018)},
address = {Toronto, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lia, H.; Paulin, Gregory; Yeo, Caitlin T; Andrews, Jessica; Yi, Nelson; Haq, Hassan; Emmanuel, Steve; Ludig, Kristian; Keri, Zsuzsanna; Lasso, Andras; Fichtinger, Gabor
Basic suturing training using Microsoft HoloLens Conference
ImNO 2018, Toronto, ON, 2018.
@conference{Lia2018b,
title = {Basic suturing training using Microsoft HoloLens},
author = {H. Lia and Gregory Paulin and Caitlin T Yeo and Jessica Andrews and Nelson Yi and Hassan Haq and Steve Emmanuel and Kristian Ludig and Zsuzsanna Keri and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Lia2018b.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {ImNO 2018},
address = {Toronto, ON},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ilina, Anna; Pinter, Csaba; Lasso, Andras; Lai, Ingrid; Joshi, C. P.; Alexander, Kevin; Schreiner, L. John; Hanna, Timothy; Fichtinger, Gabor
3D Surface Scanning for Tumour Localization in Non-Melanoma Skin Cancer Conference
16th Annual Imaging Network Ontario Symposium (ImNO), Toronto, Canada, 2018.
@conference{Ilina2018a,
title = {3D Surface Scanning for Tumour Localization in Non-Melanoma Skin Cancer},
author = {Anna Ilina and Csaba Pinter and Andras Lasso and Ingrid Lai and C. P. Joshi and Kevin Alexander and L. John Schreiner and Timothy Hanna and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ilina2018a_0.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ilina2018a-poster.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {16th Annual Imaging Network Ontario Symposium (ImNO)},
address = {Toronto, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Church, Ben; Schlenger, Christopher; Fichtinger, Gabor
Ultrasound-based vertebral landmark localization using deformable spine models Conference
16th Annual Imaging Network Ontario Symposiuim, Imaging Network Ontario (ImNO), Toronto, Canada, 2018.
@conference{Baum2018b,
title = {Ultrasound-based vertebral landmark localization using deformable spine models},
author = {Zachary M C Baum and Tamas Ungi and Andras Lasso and Ben Church and Christopher Schlenger and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2018b.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2018b-Poster.pdf},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {16th Annual Imaging Network Ontario Symposiuim},
publisher = {Imaging Network Ontario (ImNO)},
address = {Toronto, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ilina, Anna; Pinter, Csaba; Lasso, Andras; Lai, Ingrid; Joshi, C. P.; Alexander, Kevin; Schreiner, L. John; Hanna, Timothy; Fichtinger, Gabor
Target Definition with 3D Surface Scanning for Orthovoltage Radiation Therapy Planning Conference
CARS, Berlin, Germany, 2018.
@conference{Ilina2018b,
title = {Target Definition with 3D Surface Scanning for Orthovoltage Radiation Therapy Planning},
author = {Anna Ilina and Csaba Pinter and Andras Lasso and Ingrid Lai and C. P. Joshi and Kevin Alexander and L. John Schreiner and Timothy Hanna and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ilina2018b.pdf},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {CARS},
address = {Berlin, Germany},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Asselin, Mark; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
A Training Tool for Ultrasound-Guided Central Line Insertion with Webcam-Based Position Tracking Conference
Simulation, Image Processing, and Ultrasound Systems for Assisted Diagnosis and Navigation, 2018.
@conference{Asselin2018c,
title = {A Training Tool for Ultrasound-Guided Central Line Insertion with Webcam-Based Position Tracking},
author = {Mark Asselin and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {http://dx.doi.org/10.1007/978-3-030-01045-4_2
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Asselin2018c.pdf},
doi = {10.1007/978-3-030-01045-4_2},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Simulation, Image Processing, and Ultrasound Systems for Assisted Diagnosis and Navigation},
abstract = {<p>PURPOSE: This paper describes an open-source ultrasound-guided central line insertion training system. Modern clinical guidelines are increasingly recommending ultrasound guidance for this procedure due to the decrease in morbidity it provides. However, there are no adequate low-cost systems for helping new clinicians train their inter-hand coordination for this demanding procedure. METHODS: This paper details a training platform which can be recreated with any standard ultrasound machine using inexpensive components. We describe the hardware, software, and calibration procedures with the intention that a reader can recreate this system themselves. RESULTS: The reproducibility and accuracy of the ultrasound calibration for this system was examined. We found that across the ultrasound image the calibration error was less than 2 mm. In a small feasibility study, two participants performed 5 needle insertions each with an average of slightly above 2 mm error. CONCLUSION: We conclude that the accuracy of the system is sufficient for clinician training.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lasso, Andras; Nam, HannahH; Dinh, Patrick V.; Pinter, Csaba; Fillion-Robin, Jean-ChristopheC.; Pieper, Steve; Jhaveri, Sankhesh; Vimort, Jean-Baptiste; Martin, Ken; Asselin, Mark; McGowan, FrancisX; Kikinis, Ron; Fichtinger, Gabor; Jolley, MatthewA
Interaction with Volume-Rendered Three-Dimensional Echocardiographic Images in Virtual Reality Journal Article
In: J Am Soc Echocardiogr, vol. 31, no. 10, pp. 1158-60–, 2018.
@article{Lasso2018,
title = {Interaction with Volume-Rendered Three-Dimensional Echocardiographic Images in Virtual Reality},
author = {Andras Lasso and HannahH Nam and Patrick V. Dinh and Csaba Pinter and Jean-ChristopheC. Fillion-Robin and Steve Pieper and Sankhesh Jhaveri and Jean-Baptiste Vimort and Ken Martin and Mark Asselin and FrancisX McGowan and Ron Kikinis and Gabor Fichtinger and MatthewA Jolley},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
journal = {J Am Soc Echocardiogr},
volume = {31},
number = {10},
pages = {1158-60–},
abstract = {<p>Three-dimensional (3D) imaging is increasingly important in echocardiography. However, viewing of 3D images on a flat, two-dimensional screen is a barrier to comprehension of latent information. There have been previous attempts to visualize the full 3D nature of the data, but they have not been widely adopted. For example, 3D printing offers realistic interaction but is time consuming, has limited means for the observer to move into or through the model, and is not yet practical for routine clinical use. Furthermore, the heart beats, and 3D printed models are static. Stereoscopic viewing on 2D screens (as at a movie theater) is possible but is expensive, may not provide an immersive experience, and does not have integrated 3D input devices (controllers).</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
House, Rachael; Lasso, Andras; Kunz, M.; Rudan, John; Martou, Glykeria; Fichtinger, Gabor
CARS, 2018.
@conference{House2018a,
title = {Breast volume measurement using three-dimensional surface scan for fat grafting planning and monitoring},
author = {Rachael House and Andras Lasso and M. Kunz and John Rudan and Glykeria Martou and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/House2018a.pdf},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {CARS},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
House, Rachael; Lasso, Andras; Kunz, M.; Rudan, John; Martou, Glykeria; Fichtinger, Gabor
Breast volume computation for planning and monitoring fat grafting Conference
Imaging Network Ontario (IMNO), 2018.
@conference{House2018b,
title = {Breast volume computation for planning and monitoring fat grafting},
author = {Rachael House and Andras Lasso and M. Kunz and John Rudan and Glykeria Martou and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/House2018b.pdf},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Imaging Network Ontario (IMNO)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Rae, Emily; Lasso, Andras; Holden, M.; Morin, Evelyn; Levy, Ron; Fichtinger, Gabor
Accuracy of the Microsoft HoloLens for neurosurgical burr hole placement Conference
16th Annual Imaging Network Ontario Symposium (ImNO), Toronto, Canada, 2018.
@conference{Rae2018b,
title = {Accuracy of the Microsoft HoloLens for neurosurgical burr hole placement},
author = {Emily Rae and Andras Lasso and M. Holden and Evelyn Morin and Ron Levy and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Rae2018b.pdf},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {16th Annual Imaging Network Ontario Symposium (ImNO)},
address = {Toronto, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Herz, Christian; Fillion-Robin, Jean-ChristopheC.; Onken, Michael; Riesmeier, Jörg; Lasso, Andras; Pinter, Csaba; Fichtinger, Gabor; Pieper, Steve; Clunie, David; Kikinis, Ron; Fedorov, Andriy
dcmqi: An Open Source Library for Standardized Communication of Quantitative Image Analysis Results Using DICOM Journal Article
In: Cancer Research, vol. 77, no. 21, pp. e87–e90, 2017, ISSN: 0008-5472.
@article{Herz2017,
title = {dcmqi: An Open Source Library for Standardized Communication of Quantitative Image Analysis Results Using DICOM},
author = {Christian Herz and Jean-ChristopheC. Fillion-Robin and Michael Onken and Jörg Riesmeier and Andras Lasso and Csaba Pinter and Gabor Fichtinger and Steve Pieper and David Clunie and Ron Kikinis and Andriy Fedorov},
url = {http://cancerres.aacrjournals.org/content/77/21/e87
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Herz2017_1.pdf},
doi = {10.1158/0008-5472.CAN-17-0336},
issn = {0008-5472},
year = {2017},
date = {2017-11-01},
urldate = {2017-11-01},
journal = {Cancer Research},
volume = {77},
number = {21},
pages = {e87–e90},
abstract = {<p>Quantitative analysis of clinical image data is an active area of research that holds promise for precision medicine, early assessment of treatment response, and objective characterization of the disease. Interoperability, data sharing, and the ability to mine the resulting data are of increasing importance, given the explosive growth in the number of quantitative analysis methods being proposed. The Digital Imaging and Communications in Medicine (DICOM) standard is widely adopted for image and metadata in radiology. dcmqi (DICOM for Quantitative Imaging) is a free, open source library that implements conversion of the data stored in commonly used research formats into the standard DICOM representation. dcmqi source code is distributed under BSD-style license. It is freely available as a precompiled binary package for every major operating system, as a Docker image, and as an extension to 3D Slicer. Installation and usage instructions are provided in the GitHub repository at https://github.com/qiicr/dcmqi. Cancer Res; 77(21); e87–90. ©2017 AACR.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Brastianos, Harry; Vaughan, Thomas; Lasso, Andras; Westerland, Mary; Gooding, James; Fichtinger, Gabor; Falkson, Conrad
Real Time Electromagnetic Reconstruction of Catheter Paths in Multi-Catheter Interstitial Brachytherapy: A Phantom Model Conference
vol. Int J Radiat Oncol Biol Phys 99, 2017.
@conference{Brastianos2017b,
title = {Real Time Electromagnetic Reconstruction of Catheter Paths in Multi-Catheter Interstitial Brachytherapy: A Phantom Model},
author = {Harry Brastianos and Thomas Vaughan and Andras Lasso and Mary Westerland and James Gooding and Gabor Fichtinger and Conrad Falkson},
year = {2017},
date = {2017-10-01},
urldate = {2017-10-01},
volume = {Int J Radiat Oncol Biol Phys 99},
pages = {E642},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Moult, Eric; Lasso, Andras; Ungi, Tamas; Pinter, Csaba; Welch, Mattea; Fichtinger, Gabor
Improved temporal calibration of tracked ultrasound: an open-source solution Conference
ImNO2013 - Imaging Network Ontario Symposium, vol. 2, no. 04, 2017.
@conference{Moult2017,
title = {Improved temporal calibration of tracked ultrasound: an open-source solution},
author = {Eric Moult and Andras Lasso and Tamas Ungi and Csaba Pinter and Mattea Welch and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Moult2017.pdf
},
year = {2017},
date = {2017-07-01},
urldate = {2017-07-01},
booktitle = {ImNO2013 - Imaging Network Ontario Symposium},
journal = {Journal of Medical Robotics Research},
volume = {2},
number = {04},
pages = {1750008},
abstract = {<p>In tracked ultrasound systems, temporal misalignment between image and tracker data results in incorrect image pose. We present a fully automatic temporal calibration. We image a flat plate in water with a tracked probe undergoing periodic uniaxial freehand translation. Using robust line detection scheme, we compute temporal misalignment as difference between probe and corresponding image position. From 240 sequences, standard deviation was under 5ms for standard imaging parameters. Source code is available in Public Library for Ultrasound Research, PLUS (www.plustoolkit.org).</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Brastianos, Harry; Vaughan, Thomas; Lasso, Andras; Westerland, Mary; Gooding, James; Ungi, Tamas; Fichtinger, Gabor; Falkson, Conrad
Demonstration of Catheter Insertion Using Electromagnetic Guidance in Breast Brachytherapy Conference
European Society for Radiotherapy & Oncology (ESTRO) Annual Congress, vol. Radiother Oncol 123, 2017.
@conference{Brastianos2016c,
title = {Demonstration of Catheter Insertion Using Electromagnetic Guidance in Breast Brachytherapy},
author = {Harry Brastianos and Thomas Vaughan and Andras Lasso and Mary Westerland and James Gooding and Tamas Ungi and Gabor Fichtinger and Conrad Falkson},
year = {2017},
date = {2017-05-01},
urldate = {2017-05-01},
booktitle = {European Society for Radiotherapy & Oncology (ESTRO) Annual Congress},
volume = {Radiother Oncol 123},
pages = {S91},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ilina, Anna; Lasso, Andras; Jolley, MatthewA; Wohler, Brittany; Nguyen, Alexander V.; Scanlan, Adam B.; Baum, Zachary M C; McGowan, FrancisX; Fichtinger, Gabor
Patient-specific pediatric silicone heart valve models based on 3D ultrasound Conference
SPIE Medical Imaging 2017, vol. 10135, SPIE Medical Imaging SPIE Medical Imaging, Orlando, FL, United States, Feb. 16, 2017, 2017.
@conference{Ilina2017a,
title = {Patient-specific pediatric silicone heart valve models based on 3D ultrasound},
author = {Anna Ilina and Andras Lasso and MatthewA Jolley and Brittany Wohler and Alexander V. Nguyen and Adam B. Scanlan and Zachary M C Baum and FrancisX McGowan and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ilina2017a.pdf},
year = {2017},
date = {2017-03-01},
urldate = {2017-03-01},
booktitle = {SPIE Medical Imaging 2017},
volume = {10135},
publisher = {SPIE Medical Imaging},
address = {Orlando, FL, United States, Feb. 16, 2017},
organization = {SPIE Medical Imaging},
abstract = {<p>PURPOSE: Patient-specific heart and valve models have shown promise as training and planning tools for heart surgery, but physically realistic valve models remain elusive. Available proprietary, simulation-focused heart valve models are generic adult mitral valves and do not allow for patient-specific modeling as may be needed for rare diseases such as congenitally abnormal valves. We propose creating silicone valve models from a 3D-printed plastic mold as a solution that can be adapted to any individual patient and heart valve at a fraction of the cost of direct 3D-printing using soft materials.<br />
<br />
METHODS: Leaflets of a pediatric mitral valve, a tricuspid valve in a patient with hypoplastic left heart syndrome, and a complete atrioventricular canal valve were segmented from ultrasound images. A custom software was developed to automatically generate molds for each valve based on the segmentation. These molds were 3D-printed and used to make silicone valve models. The models were designed with cylindrical rims of different sizes surrounding the leaflets, to show the outline of the valve and add rigidity. Pediatric cardiac surgeons practiced suturing on the models and evaluated them for use as surgical planning and training tools.<br />
<br />
RESULTS: Five out of six surgeons reported that the valve models would be very useful as training tools for cardiac surgery. In this first iteration of valve models, leaflets were felt to be unrealistically thick or stiff compared to real pediatric leaflets. A thin tube rim was preferred for valve flexibility.<br />
<br />
CONCLUSION: The valve models were well received and considered to be valuable and accessible tools for heart valve surgery training. Further improvements will be made based on surgeons’ feedback.<br />
<br />
Keywords: surgery, training, heart valve models, pediatric, patient-specific, 3D-printing, congenital heart disease, mitral valve, tricuspid valve, complete atrioventricular canal defect, ultrasound</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
<br />
METHODS: Leaflets of a pediatric mitral valve, a tricuspid valve in a patient with hypoplastic left heart syndrome, and a complete atrioventricular canal valve were segmented from ultrasound images. A custom software was developed to automatically generate molds for each valve based on the segmentation. These molds were 3D-printed and used to make silicone valve models. The models were designed with cylindrical rims of different sizes surrounding the leaflets, to show the outline of the valve and add rigidity. Pediatric cardiac surgeons practiced suturing on the models and evaluated them for use as surgical planning and training tools.<br />
<br />
RESULTS: Five out of six surgeons reported that the valve models would be very useful as training tools for cardiac surgery. In this first iteration of valve models, leaflets were felt to be unrealistically thick or stiff compared to real pediatric leaflets. A thin tube rim was preferred for valve flexibility.<br />
<br />
CONCLUSION: The valve models were well received and considered to be valuable and accessible tools for heart valve surgery training. Further improvements will be made based on surgeons’ feedback.<br />
<br />
Keywords: surgery, training, heart valve models, pediatric, patient-specific, 3D-printing, congenital heart disease, mitral valve, tricuspid valve, complete atrioventricular canal defect, ultrasound</p>
Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
Usability of a real-time tracked augmented reality display system in musculoskeletal injections Conference
SPIE Medical Imaging 2017: Image-Guided Procedures, Robotic Interventions, and Modeling, 2017.
@conference{Baum2017a,
title = {Usability of a real-time tracked augmented reality display system in musculoskeletal injections},
author = {Zachary M C Baum and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2017a_0.pdf},
doi = {10.1117/12.2255897},
year = {2017},
date = {2017-03-01},
urldate = {2017-03-01},
booktitle = {SPIE Medical Imaging 2017: Image-Guided Procedures, Robotic Interventions, and Modeling},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kamali, Shahrokh; Ungi, Tamas; Lasso, Andras; Yan, Christina; Lougheed, Matthew; Fichtinger, Gabor
Localization of the transverse processes in ultrasound for spinal curvature measurement Conference
SPIE Medical Imaging, vol. 10135, 2017.
@conference{Kamali2017a,
title = {Localization of the transverse processes in ultrasound for spinal curvature measurement},
author = {Shahrokh Kamali and Tamas Ungi and Andras Lasso and Christina Yan and Matthew Lougheed and Gabor Fichtinger},
url = {http://proceedings.spiedigitallibrary.org/proceeding.aspx?articleid=2615387
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Kamali2017a.pdf},
doi = {10.1117/12.2256007},
year = {2017},
date = {2017-02-01},
urldate = {2017-02-01},
booktitle = {SPIE Medical Imaging},
volume = {10135},
pages = {101350I-101350I-7},
abstract = {<p>PURPOSE: In scoliosis monitoring, tracked ultrasound has been explored as a safer imaging alternative to traditional radiography. The use of ultrasound in spinal curvature measurement requires identification of vertebral landmarks such as transverse processes, but as bones have reduced visibility in ultrasound imaging, skeletal landmarks are typically segmented manually, which is an exceedingly laborious and long process. We propose an automatic algorithm to segment and localize the surface of bony areas in the transverse process for scoliosis in ultrasound.METHODS: The algorithm uses cascade of filters to remove low intensity pixels, smooth the image and detect bony edges. By applying first differentiation, candidate bony areas are classified. The average intensity under each area has a correlation with the possibility of a shadow, and areas with strong shadow are kept for bone segmentation. The segmented images are used to reconstruct a 3-D volume to represent the whole spinal structure around the transverse processes. RESULTS: A comparison between the manual ground truth segmentation and the automatic algorithm in 50 images showed 0.17 mm average difference. The time to process all 1,938 images was about 37 Sec. (0.0191 Sec. / Image), including reading the original sequence file.CONCLUSION: Initial experiments showed the algorithm to be sufficiently accurate and fast for segmentation transverse processes in ultrasound for spinal curvature measurement. An extensive evaluation of the method is currently underway on images from a larger patient cohort and using multiple observers in producing ground truth segmentation.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}