Dr. Tamas Ungi is a Senior Research Scientist at Queen’s University in Kingston, Ontario, Canada, where he holds adjunct faculty appointments at the School of Computing and the Department of Surgery. He received his MD degree in 2006 and PhD in Radiology in 2011, both from the University of Szeged in Hungary. Dr. Ungi’s research focuses on the translation of new technologies to clinical research, with a particular interest in image-guided medical interventions, skills education, and real-time applications of artificial intelligence in ultrasound-guided procedures.
Kim, Andrew S.; Yeung, Chris; Szabo, Robert; Sunderland, Kyle; Hisey, Rebecca; Morton, David; Kikinis, Ron; Diao, Babacar; Mousavi, Parvin; Ungi, Tamas; Fichtinger, Gabor
SPIE, 2024.
@proceedings{Kim2024,
title = {Percutaneous nephrostomy needle guidance using real-time 3D anatomical visualization with live ultrasound segmentation},
author = {Andrew S. Kim and Chris Yeung and Robert Szabo and Kyle Sunderland and Rebecca Hisey and David Morton and Ron Kikinis and Babacar Diao and Parvin Mousavi and Tamas Ungi and Gabor Fichtinger},
editor = {Maryam E. Rettmann and Jeffrey H. Siewerdsen},
doi = {10.1117/12.3006533},
year = {2024},
date = {2024-03-29},
urldate = {2024-03-29},
publisher = {SPIE},
abstract = {
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.
Radcliffe, Olivia; Connolly, Laura; Ungi, Tamas; Yeo, Caitlin; Rudan, John F.; Fichtinger, Gabor; Mousavi, Parvin
Navigated surgical resection cavity inspection for breast conserving surgery Proceedings
2023.
@proceedings{nokey,
title = {Navigated surgical resection cavity inspection for breast conserving surgery},
author = {Olivia Radcliffe and Laura Connolly and Tamas Ungi and Caitlin Yeo and John F. Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1117/12.2654015},
year = {2023},
date = {2023-04-03},
abstract = {Up to 40% of Breast Conserving Surgery (BCS) patients must undergo repeat surgery because cancer is left behind in the resection cavity. The mobility of the breast resection cavity makes it difficult to localize residual cancer and, therefore, cavity shaving is a common technique for cancer removal. Cavity shaving involves removing an additional layer of tissue from the entire resection cavity, often resulting in unnecessary healthy tissue loss. In this study, we demonstrated a navigation system and open-source software module that facilitates visualization of the breast resection cavity for targeted localization of residual cancer.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
Cernelev, Pavel-Dumitru; Moga, Kristof; Groves, Leah; Haidegger, Tamás; Fichtinger, Gabor; Ungi, Tamas
Determining boundaries of accurate tracking for electromagnetic sensors Conference
SPIE, 2023.
@conference{Cernelev2023,
title = {Determining boundaries of accurate tracking for electromagnetic sensors},
author = {Pavel-Dumitru Cernelev and Kristof Moga and Leah Groves and Tamás Haidegger and Gabor Fichtinger and Tamas Ungi},
editor = {Cristian A. Linte and Jeffrey H. Siewerdsen},
doi = {10.1117/12.2654428},
year = {2023},
date = {2023-04-03},
urldate = {2023-04-03},
publisher = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor; Korzeniowski, Martin
Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction Conference
Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting, Canadian Association for Radiation Oncologists, 2022.
@conference{Kitner2022ab,
title = {Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger and Martin Korzeniowski},
year = {2022},
date = {2022-09-01},
urldate = {2022-09-01},
booktitle = {Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting},
publisher = {Canadian Association for Radiation Oncologists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Degeut, Anton; Leonard, Simon; Tokuda, Junichi; Ungi, Tamas; Krieger, Axel; Kazanzides, Peter; Mousavi, Parvin; Fichtinger, Gabor; Taylor, Russell H.
Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions Journal Article
In: Sensors, vol. 22, 2022.
@article{Connolly2022c,
title = {Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions},
author = {Laura Connolly and Anton Degeut and Simon Leonard and Junichi Tokuda and Tamas Ungi and Axel Krieger and Peter Kazanzides and Parvin Mousavi and Gabor Fichtinger and Russell H. Taylor},
doi = {https://doi.org/10.3390/s22145336},
year = {2022},
date = {2022-07-01},
journal = {Sensors},
volume = {22},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline Conference
Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting, Canadian Organization of Medical Physicists, 2022.
@conference{Kitner2022a,
title = {Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
booktitle = {Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting},
publisher = {Canadian Organization of Medical Physicists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hu, Zoe; Fauerbach, Paola V. Nasute; Yeung, Chris; Ungi, Tamas; Rudan, John; Engel, C. Jay; Mousavi, Parvin; Fichtinger, Gabor; Jabs, Doris
Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 17, no. 9, pp. 1663–1672, 2022.
@article{Hu2022,
title = {Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation},
author = {Zoe Hu and Paola V. Nasute Fauerbach and Chris Yeung and Tamas Ungi and John Rudan and C. Jay Engel and Parvin Mousavi and Gabor Fichtinger and Doris Jabs},
doi = {10.1007/s11548-022-02658-4},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {17},
number = {9},
pages = {1663–1672},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Klosa, Elizabeth; Hisey, R.; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Identifying tissues for task recognition in training of open inguinal hernia repairs Conference
Imaging Network of Ontario Symposium, 2022.
@conference{Klosa2022b,
title = {Identifying tissues for task recognition in training of open inguinal hernia repairs},
author = {Elizabeth Klosa and R. Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Klosa2022b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of using object detection for performance assessment in central venous catherization Conference
Imaging Network of Ontario Symposium, 2022.
@conference{ODriscoll2022b,
title = {Feasibility of using object detection for performance assessment in central venous catherization},
author = {Olivia O’Driscoll and R. Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2021b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy Conference
Imaging Network Ontario (IMNO) 2022 Symposium, Imaging Network of Ontario, Online, 2022.
@conference{Kitner2022ac,
title = {Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network Ontario (IMNO) 2022 Symposium},
publisher = {Imaging Network of Ontario},
address = {Online},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Klosa, Elizabeth; Hisey, R.; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Tissue segmentation for workflow recognition in open inguinal hernia repair training Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{Klosa2022a,
title = {Tissue segmentation for workflow recognition in open inguinal hernia repair training},
author = {Elizabeth Klosa and R. Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Klosa2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {PURPOSE: As medical education adopts a competency-based training method, experts are spending substantial amounts of time instructing and assessing trainees’ competence. In this study, we look to develop a computer-assisted training platform that can provide instruction and assessment of open inguinal hernia repairs without needing an expert observer. We recognize workflow tasks based on the tool-tissue interactions, suggesting that we first need a method to identify tissues. This study aims to train a neural network in identifying tissues in a low-cost phantom as we work towards identifying the tool-tissue interactions needed for task recognition. METHODS: Eight simulated tissues were segmented throughout five videos from experienced surgeons who performed open inguinal hernia repairs on phantoms. A U-Net was trained using leave-one-user-out cross validation. The average F-score, false positive rate and false negative rate were calculated for each tissue to evaluate the U-Net’s performance. RESULTS: Higher F-scores and lower false negative and positive rates were recorded for the skin, hernia sac, spermatic cord, and nerves, while slightly lower metrics were recorded for the subcutaneous tissue, Scarpa’s fascia, external oblique aponeurosis and superficial epigastric vessels. CONCLUSION: The U-Net performed better in recognizing tissues that were relatively larger in size and more prevalent, while struggling to recognize smaller tissues only briefly visible. Since workflow recognition does not require perfect segmentation, we believe our U-Net is sufficient in recognizing the tissues of an inguinal hernia repair phantom. Future studies will explore combining our segmentation U-Net with tool detection as we work towards workflow recognition.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of object detection for skill assessment in central venous catheterization Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{ODriscoll2022a,
title = {Feasibility of object detection for skill assessment in central venous catheterization},
author = {Olivia O’Driscoll and R. Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {<p><strong>Purpose: </strong>Computer-assisted surgical skill assessment methods have traditionally relied on tracking tool motion with physical sensors. These tracking systems can be expensive, bulky, and impede tool function. Recent advances in object detection networks have made it possible to quantify tool motion using only a camera. These advances open the door for a low-cost alternative to current physical tracking systems for surgical skill assessment. This study determines the feasibility of using metrics computed with object detection by comparing them to widely accepted metrics computed using traditional tracking methods in central venous catheterization. <strong>Methods:</strong> Both video and tracking data were recorded from participants performing central venous catheterization on a venous access phantom. A Faster Region-Based Convolutional Neural Network was trained to recognize the ultrasound probe and syringe on the video data. Tracking-based metrics were computed using the Perk Tutor extension of 3D Slicer. The path length and usage time for each tool were then computed using both the video and tracking data. The metrics from object detection and tracking were compared using Spearman rank correlation. <strong>Results: </strong>The path lengths had a rank correlation coefficient of 0.22 for the syringe (p<0.03) and 0.35 (p<0.001) for the ultrasound probe. For the usage times, the correlation coefficient was 0.37 (p<0.001) for the syringe and 0.34 (p<0.001) for the ultrasound probe. <strong>Conclusions</strong>: The video-based metrics correlated significantly with the tracked metrics, suggesting that object detection could be a feasible skill assessment method for central venous catheterization.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Jamzad, Amoon; Nikniazi, Arash; Poushimin, Rana; Lasso, Andras; Sunderland, Kyle R.; Ungi, Tamas; Nunzi, Jean Michel; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
An open-source testbed for developing image-guided robotic tumor-bed inspection Conference
Imaging Network of Ontario (ImNO) Symposium, 2022.
@conference{connolly2022b,
title = {An open-source testbed for developing image-guided robotic tumor-bed inspection},
author = {Laura Connolly and Amoon Jamzad and Arash Nikniazi and Rana Poushimin and Andras Lasso and Kyle R. Sunderland and Tamas Ungi and Jean Michel Nunzi and John Rudan and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/01/Connolly2022b.pdf},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Imaging Network of Ontario (ImNO) Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Poole, Meredith; Ungi, Tamas; Fichtinger, Gabor; Zevin, Boris
Training in soft tissue resection using real-time visual computer navigation feedback from the Surgery Tutor: A randomized controlled trial Journal Article
In: Surgery, 2021, ISSN: 0039-6060.
@article{Poole2021,
title = {Training in soft tissue resection using real-time visual computer navigation feedback from the Surgery Tutor: A randomized controlled trial},
author = {Meredith Poole and Tamas Ungi and Gabor Fichtinger and Boris Zevin},
url = {https://www.sciencedirect.com/science/article/pii/S0039606021011855},
doi = {https://doi.org/10.1016/j.surg.2021.11.037},
issn = {0039-6060},
year = {2021},
date = {2021-12-01},
urldate = {2021-12-01},
journal = {Surgery},
abstract = {<p>Background In competency-based medical education, surgery trainees are often required to learn procedural skills in a simulated setting before proceeding to the clinical environment. The Surgery Tutor computer navigation platform allows for real-time proctor-less assessment of open soft tissue resection skills; however, the use of this platform as an aid in acquisition of procedural skills is yet to be explored. Methods In this prospective randomized controlled trial, 20 final year medical students were randomized to receive either training with real-time computer navigation feedback (Intervention, n = 10) or simulation training without navigation feedback (Control, n = 10) during resection of simulated non-palpable soft tissue tumors. Real-time computer navigation feedback allowed participants to visualize the position of their scalpel relative to the tumor. Computer navigation feedback was removed for postintervention assessment. Primary outcome was positive margin rate. Secondary outcomes were procedure time, mass of tissue excised, number of scalpel motions, and distance traveled by the scalpel. Results Training with real-time computer navigation resulted in a significantly lower positive margin rate as compared to training without navigation feedback (0% vs 40%, P = .025). All other performance metrics were not significantly different between the 2 groups. Participants in the intervention group displayed significant improvement in positive margin rate from baseline to final assessment (80% vs 0%, P < .01), whereas participants in the Control group did not. Conclusion Real-time visual computer navigation feedback from the Surgery Tutor resulted in superior acquisition of procedural skills as compared to training without navigation feedback.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Colton; Hisey, R.; Ungi, Tamas; Fichtinger, Gabor
Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization Conference
43rd Conference of the IEEE Engineering Medicine and Biology Society, 2021.
@conference{CBarr2021b,
title = {Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization},
author = {Colton Barr and R. Hisey and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CBarr2021a.pdf},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
booktitle = {43rd Conference of the IEEE Engineering Medicine and Biology Society},
abstract = {<p>Central Line Tutor is a system that facilitates real-time feedback during training for central venous catheterization. One limitation of Central Line Tutor is its reliance on expensive, cumbersome electromagnetic tracking to facilitate various training aids, including ultrasound task identification and segmentation of neck vasculature. The purpose of this study is to validate deep learning methods for vessel segmentation and ultrasound pose classification in order to mitigate the system’s reliance on electromagnetic tracking. A large dataset of segmented and classified ultrasound images was generated from participant data captured using Central Line Tutor. A U-Net architecture was used to perform vessel segmentation, while a shallow Convolutional Neural Network (CNN) architecture was designed to classify the pose of the ultrasound probe. A second classifier architecture was also tested that used the U-Net output as the CNN input. The mean testing set Intersect over Union score for U-Net cross-validation was 0.746 ± 0.052. The mean test set classification accuracy for the CNN was 92.0% ± 3.0, while the U-Net + CNN achieved 92.7% ± 2.1%. This study highlights the potential for deep learning on ultrasound images to replace the current electromagnetic tracking-based methods for vessel segmentation and ultrasound pose classification, and represents an important step towards removing the electromagnetic tracker altogether. Removing the need for an external tracking system would significantly reduce the cost of Central Line Tutor and make it far more accessible to the medical trainees that would benefit from it most.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
System for central venous catheterization training using computer vision-based workflow feedback Journal Article
In: IEEE Transactions on Biomedical Engineering, 2021.
@article{Hisey2021b,
title = {System for central venous catheterization training using computer vision-based workflow feedback},
author = {R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
journal = {IEEE Transactions on Biomedical Engineering},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Degeut, Anton; Sunderland, Kyle R.; Lasso, Andras; Ungi, Tamas; Rudan, John; Taylor, Russell H.; Mousavi, Parvin; Fichtinger, Gabor
An open-source platform for cooperative semi-autonomous robotic surgery Conference
IEEE International Conference on Autonomous Systems, IEEE IEEE, Montreal, Quebec, 2021.
@conference{Connolly2021,
title = {An open-source platform for cooperative semi-autonomous robotic surgery},
author = {Laura Connolly and Anton Degeut and Kyle R. Sunderland and Andras Lasso and Tamas Ungi and John Rudan and Russell H. Taylor and Parvin Mousavi and Gabor Fichtinger},
doi = {https://doi.org/10.1109/ICAS49788.2021.9551149},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
booktitle = {IEEE International Conference on Autonomous Systems},
publisher = {IEEE},
address = {Montreal, Quebec},
organization = {IEEE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Fichtinger, Gabor; Mousavi, Parvin; Ungi, Tamas; Fenster, Aaron; Abolmaesumi, Purang; Kronreif, Gernot; Ruiz-Alzola, Juan; Ndoye, Alain; Diao, Babacar; Kikinis, Ron
Design of an Ultrasound-Navigated Prostate Cancer Biopsy System for Nationwide Implementation in Senegal Journal Article
In: Journal of Imaging, vol. 7, no. 8, pp. 154, 2021, ISSN: 2313-433X.
@article{Fichtinger2021,
title = {Design of an Ultrasound-Navigated Prostate Cancer Biopsy System for Nationwide Implementation in Senegal},
author = {Gabor Fichtinger and Parvin Mousavi and Tamas Ungi and Aaron Fenster and Purang Abolmaesumi and Gernot Kronreif and Juan Ruiz-Alzola and Alain Ndoye and Babacar Diao and Ron Kikinis},
url = {https://www.mdpi.com/2313-433X/7/8/154},
doi = {10.3390/jimaging7080154},
issn = {2313-433X},
year = {2021},
date = {2021-08-01},
urldate = {2021-08-01},
journal = {Journal of Imaging},
volume = {7},
number = {8},
pages = {154},
abstract = {<p>This paper presents the design of NaviPBx, an ultrasound-navigated prostate cancer biopsy system. NaviPBx is designed to support an affordable and sustainable national healthcare program in Senegal. It uses spatiotemporal navigation and multiparametric transrectal ultrasound to guide biopsies. NaviPBx integrates concepts and methods that have been independently validated previously in clinical feasibility studies and deploys them together in a practical prostate cancer biopsy system. NaviPBx is based entirely on free open-source software and will be shared as a free open-source program with no restriction on its use. NaviPBx is set to be deployed and sustained nationwide through the Senegalese Military Health Service. This paper reports on the results of the design process of NaviPBx. Our approach concentrates on “frugal technology”, intended to be affordable for low–middle income (LMIC) countries. Our project promises the wide-scale application of prostate biopsy and will foster time-efficient development and programmatic implementation of ultrasound-guided diagnostic and therapeutic interventions in Senegal and beyond.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Colton; Hisey, R.; Ungi, Tamas; Fichtinger, Gabor
Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization Conference
Imaging Network of Ontario Symposium, 2021.
@conference{CBarr2021a,
title = {Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization},
author = {Colton Barr and R. Hisey and Tamas Ungi and Gabor Fichtinger},
year = {2021},
date = {2021-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
Imaging Network of Ontario Symposium, 2021.
@conference{Hisey2021a,
title = {Central Line Tutor: using computer vision workflow recognition in a central venous catheterization training system},
author = {R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/RHisey_ImNO2021.pdf},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Catherine O.; Diao, Babacar; Ungi, Tamas; Sedghi, Alireza; Kikinis, Ron; Mousavi, Parvin; Fichtinger, Gabor
Development of an open-source system for prostate biopsy training in Senegal Conference
SPIE Medical Imaging 2021: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 11598, 2021.
@conference{CWu2021a,
title = {Development of an open-source system for prostate biopsy training in Senegal},
author = {Catherine O. Wu and Babacar Diao and Tamas Ungi and Alireza Sedghi and Ron Kikinis and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2021a-poster_0.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2021a_0.pdf},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {SPIE Medical Imaging 2021: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {11598},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
SPIE Medical Imaging, 2021.
@conference{ODriscoll2021a,
title = {Object detection to compute performance metrics for skill assessment in central venous catheterization},
author = {Olivia O’Driscoll and R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/11598/1159816/Object-detection-to-compute-performance-metrics-for-skill-assessment-in/10.1117/12.2581889.short?SSO=1
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2021a.pdf},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
Imaging Network of Ontario Symposium, 2021.
@conference{ODriscoll2021b,
title = {Surgical tool tracking with object detection for performance assessment in central venous catheterization},
author = {Olivia O’Driscoll and R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2021b.pdf},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Sunderland, Kyle R.; Lasso, Andras; Degeut, Anton; Ungi, Tamas; Rudan, John; Taylor, Russell H.; Mousavi, Parvin; Fichtinger, Gabor
A platform for robot-assisted Intraoperative imaging in breast conserving surgery Conference
Imaging Network of Ontario Symposium, Imaging Network of Ontario Symposium, Online, 2021.
@conference{Connolly2021b,
title = {A platform for robot-assisted Intraoperative imaging in breast conserving surgery},
author = {Laura Connolly and Kyle R. Sunderland and Andras Lasso and Anton Degeut and Tamas Ungi and John Rudan and Russell H. Taylor and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2021a_1.pdf},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Imaging Network of Ontario Symposium},
publisher = {Imaging Network of Ontario Symposium},
address = {Online},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Gauvin, Gabrielle; Yeo, Caitlin T; Ungi, Tamas; Merchant, Shaila; Lasso, Andras; Jabs, Doris; Vaughan, Thomas; Rudan, John; Walker, Ross; Fichtinger, Gabor; Engel, C. Jay
Real-time electromagnetic navigation for breast-conserving surgery using NaviKnife technology: A matched case-control study Journal Article
In: The Breast Journal, vol. 26, no. 3, pp. 399-405, 2020.
@article{Gauvin2019,
title = {Real-time electromagnetic navigation for breast-conserving surgery using NaviKnife technology: A matched case-control study},
author = {Gabrielle Gauvin and Caitlin T Yeo and Tamas Ungi and Shaila Merchant and Andras Lasso and Doris Jabs and Thomas Vaughan and John Rudan and Ross Walker and Gabor Fichtinger and C. Jay Engel},
doi = {10.1111/tbj.13480},
year = {2020},
date = {2020-09-01},
urldate = {2020-09-01},
journal = {The Breast Journal},
volume = {26},
number = {3},
pages = {399-405},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hisey, R.; Chen, Brian; Ungi, Tamas; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor
Reinforcement learning approach for video-based task recognition in central venous catheterization Conference
Imaging Network of Ontario Symposium, 2020.
@conference{Hisey2020a,
title = {Reinforcement learning approach for video-based task recognition in central venous catheterization},
author = {R. Hisey and Brian Chen and Tamas Ungi and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/RHisey_ImNO2020.pdf},
year = {2020},
date = {2020-06-01},
urldate = {2020-06-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, R.; Chen, Brian; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
International Conference on Computer Assisted Radiology and Surgery, 2020.
@conference{Hisey2020b,
title = {Recognizing workflow tasks in central venous catheterization using convolutional neural networks and reinforcement learning},
author = {R. Hisey and Brian Chen and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/RHisey_CARS_2020_0.pdf},
year = {2020},
date = {2020-06-01},
urldate = {2020-06-01},
booktitle = {International Conference on Computer Assisted Radiology and Surgery},
pages = {94-95},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ungi, Tamas; Greer, Hastings; Sunderland, Kyle R.; Wu, Victoria; Baum, Zachary M C; Schlenger, Christopher; Oetgen, Matthew; Cleary, Kevin; Aylward, Stephen; Fichtinger, Gabor
Automatic spine ultrasound segmentation for scoliosis visualization and measurement Journal Article
In: IEEE Transactions on Biomedical Engineering, vol. 67, no. 11, pp. 3234 - 3241, 2020.
@article{Ungi2020,
title = {Automatic spine ultrasound segmentation for scoliosis visualization and measurement},
author = {Tamas Ungi and Hastings Greer and Kyle R. Sunderland and Victoria Wu and Zachary M C Baum and Christopher Schlenger and Matthew Oetgen and Kevin Cleary and Stephen Aylward and Gabor Fichtinger},
url = {https://ieeexplore.ieee.org/document/9034149
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ungi2020.pdf},
doi = {10.1109/TBME.2020.2980540},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
journal = {IEEE Transactions on Biomedical Engineering},
volume = {67},
number = {11},
pages = {3234 - 3241},
abstract = {<p>\emph{Objective:} Integrate tracked ultrasound and AI methods to provide a safer and more accessible alternative to X-ray for scoliosis measurement. We propose automatic ultrasound segmentation for 3-dimensional spine visualization and scoliosis measurement to address difficulties in using ultrasound for spine imaging. \emph{Methods:} We trained a convolutional neural network for spine segmentation on ultrasound scans using data from eight healthy adult volunteers. We tested the trained network on eight pediatric patients. We evaluated image segmentation and 3-dimensional volume reconstruction for scoliosis measurement. \emph{Results:} As expected, fuzzy segmentation metrics reduced when trained networks were translated from healthy volunteers to patients. Recall decreased from 0.72 to 0.64 (8.2% decrease), and precision from 0.31 to 0.27 (3.7% decrease). However, after finding optimal thresholds for prediction maps, binary segmentation metrics performed better on patient data. Recall decreased from 0.98 to 0.97 (1.6% decrease), and precision from 0.10 to 0.06 (4.5% decrease). Segmentation prediction maps were reconstructed to 3-dimensional volumes and scoliosis was measured in all patients. Measurement in these reconstructions took less than 1 minute and had a maximum error of 2.2° compared to X-ray. \emph{Conclusion:} automatic spine segmentation makes scoliosis measurement both efficient and accurate in tracked ultrasound scans. \emph{Significance:} Automatic segmentation may overcome the limitations of tracked ultrasound that so far prevented its use as an alternative of X-ray in scoliosis measurement.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Keiran; Laframboise, Jacob; Ungi, Tamas; Hookey, Lawrence; Fichtinger, Gabor
Automated segmentation of computed tomography colonography images using a 3D U-Net Conference
SPIE Medical Imaging, 2020.
@conference{KBarr2020,
title = {Automated segmentation of computed tomography colonography images using a 3D U-Net},
author = {Keiran Barr and Jacob Laframboise and Tamas Ungi and Lawrence Hookey and Gabor Fichtinger},
doi = {https://doi.org/10.1117/12.2549749},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
booktitle = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Catherine O.; Sunderland, Kyle R.; Filippov, Mihail; Sainsbury, Ben; Fichtinger, Gabor; Ungi, Tamas
Workflow for creation and evaluation of virtual nephrolithotomy training models Conference
SPIE Medical Imaging Conference 2020, vol. 11315, 2020.
@conference{CWu2020,
title = {Workflow for creation and evaluation of virtual nephrolithotomy training models},
author = {Catherine O. Wu and Kyle R. Sunderland and Mihail Filippov and Ben Sainsbury and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2020a-manuscript.pdf},
doi = {10.1117/12.2549354},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
booktitle = {SPIE Medical Imaging Conference 2020},
volume = {11315},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Janssen, Natasja; Kaufmann, Martin; Santilli, Alice; Jamzad, Amoon; Kaitlin, Vanderbeck; Ren, Kevin; Ungi, Tamas; Mousavi, Parvin; Rudan, John; McKay, Doug; Wang, Amy; Fichtinger, Gabor
Navigated tissue characterization during skin cancer surgery Journal Article
In: Int J Comput Assist Radiol Surg, 2020.
@article{Janssen2020a,
title = {Navigated tissue characterization during skin cancer surgery},
author = {Natasja Janssen and Martin Kaufmann and Alice Santilli and Amoon Jamzad and Vanderbeck Kaitlin and Kevin Ren and Tamas Ungi and Parvin Mousavi and John Rudan and Doug McKay and Amy Wang and Gabor Fichtinger},
url = {https://doi.org/10.1007/s11548-020-02200-4},
doi = {10.1007/s11548-020-02200-4},
year = {2020},
date = {2020-01-01},
journal = {Int J Comput Assist Radiol Surg},
abstract = {<p><strong>Purpose: </strong>Basal cell carcinoma (BCC) is the most commonly diagnosed skin cancer and is treated by surgical resection. Incomplete tumor removal requires surgical revision, leading to significant healthcare costs and impaired cosmesis. We investigated the clinical feasibility of a surgical navigation system for BCC surgery, based on molecular tissue characterization using rapid evaporative ionization mass spectrometry (REIMS).</p>
<p><strong>Methods: </strong>REIMS enables direct tissue characterization by analysis of cell-specific molecules present within surgical smoke, produced during electrocautery tissue resection. A tissue characterization model was built by acquiring REIMS spectra of BCC, healthy skin and fat from ex vivo skin cancer specimens. This model was used for tissue characterization during navigated skin cancer surgery. Navigation was enabled by optical tracking and real-time visualization of the cautery relative to a contoured resection volume. The surgical smoke was aspirated into a mass spectrometer and directly analyzed with REIMS. Classified BCC was annotated at the real-time position of the cautery. Feasibility of the navigation system, and tissue classification accuracy for ex vivo and intraoperative surgery were evaluated.</p>
<p><strong>Results: </strong>Fifty-four fresh excision specimens were used to build the ex vivo model of BCC, normal skin and fat, with 92% accuracy. While 3 surgeries were successfully navigated without breach of sterility, the intraoperative performance of the ex vivo model was low (< 50%). Hypotheses are: (1) the model was trained on heterogeneous mass spectra that did not originate from a single tissue type, (2) during surgery mixed tissue types were resected and thus presented to the model, and (3) the mass spectra were not validated by pathology.</p>
<p><strong>Conclusion: </strong>REIMS-navigated skin cancer surgery has the potential to detect and localize remaining tumor intraoperatively. Future work will be focused on improving our model by using a precise pencil cautery tip for burning localized tissue types, and having pathology-validated mass spectra.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
<p><strong>Methods: </strong>REIMS enables direct tissue characterization by analysis of cell-specific molecules present within surgical smoke, produced during electrocautery tissue resection. A tissue characterization model was built by acquiring REIMS spectra of BCC, healthy skin and fat from ex vivo skin cancer specimens. This model was used for tissue characterization during navigated skin cancer surgery. Navigation was enabled by optical tracking and real-time visualization of the cautery relative to a contoured resection volume. The surgical smoke was aspirated into a mass spectrometer and directly analyzed with REIMS. Classified BCC was annotated at the real-time position of the cautery. Feasibility of the navigation system, and tissue classification accuracy for ex vivo and intraoperative surgery were evaluated.</p>
<p><strong>Results: </strong>Fifty-four fresh excision specimens were used to build the ex vivo model of BCC, normal skin and fat, with 92% accuracy. While 3 surgeries were successfully navigated without breach of sterility, the intraoperative performance of the ex vivo model was low (< 50%). Hypotheses are: (1) the model was trained on heterogeneous mass spectra that did not originate from a single tissue type, (2) during surgery mixed tissue types were resected and thus presented to the model, and (3) the mass spectra were not validated by pathology.</p>
<p><strong>Conclusion: </strong>REIMS-navigated skin cancer surgery has the potential to detect and localize remaining tumor intraoperatively. Future work will be focused on improving our model by using a precise pencil cautery tip for burning localized tissue types, and having pathology-validated mass spectra.</p>
Wu, Victoria; Ungi, Tamas; Sunderland, Kyle R.; Pigeau, Grace; Schonewille, Abigael; Fichtinger, Gabor
SPIE Medical Imaging, 2020.
@conference{Wu2020a,
title = {Automatic segmentation of spinal ultrasound landmarks with U-net using multiple consecutive images for input},
author = {Victoria Wu and Tamas Ungi and Kyle R. Sunderland and Grace Pigeau and Abigael Schonewille and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2020a-manuscript.pdf},
doi = {10.1117/12.2549584},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Victoria; Ungi, Tamas; Sunderland, Kyle R.; Pigeau, Grace; Schonewille, Abigael; Fichtinger, Gabor
Using multiple frame U-net for automated segmentation of spinal ultrasound images Conference
18th Annual Imaging Network Ontario (ImNO) Symposium, 2020.
@conference{Wu2020b,
title = {Using multiple frame U-net for automated segmentation of spinal ultrasound images},
author = {Victoria Wu and Tamas Ungi and Kyle R. Sunderland and Grace Pigeau and Abigael Schonewille and Gabor Fichtinger},
url = {https://www.imno.ca/sites/default/files/ImNO2020Proceedings.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wu2020b.pdf},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {18th Annual Imaging Network Ontario (ImNO) Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Sunderland, Kyle R.; Zevin, Boris; Fichtinger, Gabor
Open source platform for automated collection of training data to support video-based feedback in surgical simulators Conference
SPIE Medical Imaging, SPIE, Houston, United States, 2020.
@conference{Laframboise2020a,
title = {Open source platform for automated collection of training data to support video-based feedback in surgical simulators},
author = {Jacob Laframboise and Tamas Ungi and Kyle R. Sunderland and Boris Zevin and Gabor Fichtinger},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE},
address = {Houston, United States},
abstract = {<p><strong>Purpose:</strong> Surgical training could be improved by automatic detection of workflow steps. A platform to collect and organize tracking and video data would enable rapid development of deep learning solutions for surgical training. The purpose of this research is to demonstrate 3D Slicer / PLUS Toolkit as a platform for video annotation by identifying and annotating tools interacting with tissues in simulated hernia repair. <strong>Methods:</strong> Tracking data from an optical tracker and video data from a camera are collected by PLUS and 3D Slicer. To demonstrate the platform in use, we identify tissues during a surgical procedure using a neural network. The tracking data is used to identify what tool is in use. The solution is deployed with a custom Slicer module. <strong>Results:</strong> This platform allowed the collection and storage of enough tracked video data for training a convolutional neural network (CNN) to detect interactions with tissues and tools. The CNN was trained on this data and applied to new data with a testing accuracy of 98%. The model’s predictions can be weighted over several frames with a custom Slicer module to improve accuracy. <strong>Conclusion:</strong> We found the 3D Slicer and PLUS Toolkit platform to be a viable platform for training and deploying a solution that combines automatic video processing and optical tool tracking. We designed a proof of concept model to identify tissues with a trained CNN in real time along with tracking of surgical tools.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Jamzad, Amoon; Kaufmann, Martin; Rubino, Rachel; Sedghi, Alireza; Ungi, Tamas; Asselin, Mark; Yam, Scott; Rudan, John; Nicol, Christopher; Fichtinger, Gabor; Mousavi, Parvin
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{Connolly2020a,
title = {Classification of tumor signatures from electrosurgical vapors using mass spectrometry and machine learning: a feasibility study},
author = {Laura Connolly and Amoon Jamzad and Martin Kaufmann and Rachel Rubino and Alireza Sedghi and Tamas Ungi and Mark Asselin and Scott Yam and John Rudan and Christopher Nicol and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2020a.pdf},
doi = {https://doi.org/10.1117/12.2549343},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wiercigroch, Julia; Hashtrudi-Zaad, Keyvan; Ungi, Tamas; Bisleri, Gianluigi; Fichtinger, Gabor
Force and torque feedback in endoscopic vessel harvesting Conference
Medical Imaging 2020: Image-Guided Procedures, Robotic Inverventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{Wiercigroch2020a,
title = {Force and torque feedback in endoscopic vessel harvesting},
author = {Julia Wiercigroch and Keyvan Hashtrudi-Zaad and Tamas Ungi and Gianluigi Bisleri and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wiercigroch2020a.pdf},
doi = {https://doi.org/10.1117/12.2550160},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Inverventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lasso, Andras; Pinter, Csaba; Choueib, Saleh; Ungi, Tamas; Fichtinger, Gabor
Enhance medical software applications with immersive virtual reality experience Conference
Techna Symposium, Toronto, ON, Canada, 2019.
@conference{Lasso2019,
title = {Enhance medical software applications with immersive virtual reality experience},
author = {Andras Lasso and Csaba Pinter and Saleh Choueib and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Lasso2019.pdf},
year = {2019},
date = {2019-10-01},
urldate = {2019-10-01},
booktitle = {Techna Symposium},
address = {Toronto, ON, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Vaughan, Thomas; Brastianos, H; Ungi, Tamas; Lasso, Andras; Falkson, Conrad; Fichtinger, Gabor
Needle Navigation and Catheter Reconstruction for Breast Brachytherapy Using Open Source Software Journal Article
In: Acta Polytechnica Hungarica, vol. 16, no. 8, pp. 99-118, 2019.
@article{Vaughan2019a,
title = {Needle Navigation and Catheter Reconstruction for Breast Brachytherapy Using Open Source Software},
author = {Thomas Vaughan and H Brastianos and Tamas Ungi and Andras Lasso and Conrad Falkson and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Vaughan2019a_0.pdf},
year = {2019},
date = {2019-09-01},
urldate = {2019-09-01},
journal = {Acta Polytechnica Hungarica},
volume = {16},
number = {8},
pages = {99-118},
abstract = {<p>\emph{Abstract: Interstitial breast brachytherapy is a method to deliver radiation therapy directly to the site of cancer. It is a challenging procedure because of issues in localizing the seroma, needles, and catheters within the soft tissue. In this paper we present two open-source technologies based on electromagnetic tracking: a navigation system to help target needles using a tracked needle guide, and software for electromagnetic reconstruction of catheter paths. These technologies were validated phantom studies. We found that the navigation system helped a radiation oncologist to target needles more accurately than under ultrasound guidance (60 needles under each condition, 3.8 vs 3.3 mm placement error},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wiercigroch, Julia; Baum, Zachary M C; Ungi, Tamas; Fritz, Jan; Fichtinger, Gabor
Validation of a low-cost adjustable, handheld needle guide for spine interventions Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Wiercigroch2019,
title = {Validation of a low-cost adjustable, handheld needle guide for spine interventions},
author = {Julia Wiercigroch and Zachary M C Baum and Tamas Ungi and Jan Fritz and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wiercigroch2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lia, H.; Baum, Zachary M C; Vaughan, Thomas; Ungi, Tamas; McGregor, T.; Fichtinger, Gabor
Usability and accuracy of an electromagnetically tracked partial nephrectomy navigation system Conference
17th Annual Imaging Network of Ontario Symposium (ImNO), 2019.
@conference{Lia2019a,
title = {Usability and accuracy of an electromagnetically tracked partial nephrectomy navigation system},
author = {H. Lia and Zachary M C Baum and Thomas Vaughan and Tamas Ungi and T. McGregor and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Lia2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {17th Annual Imaging Network of Ontario Symposium (ImNO)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, R.; Ungi, Tamas; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor
Comparison of convolutional neural networks for central venous catheterization tool detection Conference
Imaging Network of Ontario Symposium, Toronto, Ontario, 2019.
@conference{Hisey2019,
title = {Comparison of convolutional neural networks for central venous catheterization tool detection},
author = {R. Hisey and Tamas Ungi and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/RHisey_ImNO2019_0.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {Imaging Network of Ontario Symposium},
address = {Toronto, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Holden, M.; Tan, Pearl; Hookey, Lawrence; Fichtinger, Gabor
Analyzing the curvature of the colon in different patient positions Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Laframboise2019a,
title = {Analyzing the curvature of the colon in different patient positions},
author = {Jacob Laframboise and Tamas Ungi and Andras Lasso and Mark Asselin and M. Holden and Pearl Tan and Lawrence Hookey and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Laframboise2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Ungi, Tamas; Lasso, Andras; Vaughan, Thomas; Asselin, Mark; Mousavi, Parvin; Yam, Scott; Fichtinger, Gabor
Mechanically-Controlled Spectroscopic Imaging for Tissue Classification Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Connolly2019a,
title = {Mechanically-Controlled Spectroscopic Imaging for Tissue Classification},
author = {Laura Connolly and Tamas Ungi and Andras Lasso and Thomas Vaughan and Mark Asselin and Parvin Mousavi and Scott Yam and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2019a_3.pdf},
doi = {https://doi.org/10.1117/12.2512481},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Church, Ben; Lasso, Andras; Ungi, Tamas; Schlenger, Christopher; Borschneck, Daniel P.; Mousavi, Parvin; Fichtinger, Gabor
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Baum2019a,
title = {Step-wise identification of ultrasound-visible anatomical landmarks for 3D visualization of scoliotic spine},
author = {Zachary M C Baum and Ben Church and Andras Lasso and Tamas Ungi and Christopher Schlenger and Daniel P. Borschneck and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2019a_0.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
House, Rachael; Kunz, M.; Valiquette, Chantal; Lasso, Andras; Ungi, Tamas; Rudan, John; Martou, Glykeria; Fichtinger, Gabor
Canadian Society of Plastic Surgeons (CSPS) Annual Meeting, St. Johns, Newfoundland, 2019.
@conference{House2019a,
title = {Monitoring volume changes for breast reconstruction surgery using three dimensional optical surface scanning},
author = {Rachael House and M. Kunz and Chantal Valiquette and Andras Lasso and Tamas Ungi and John Rudan and Glykeria Martou and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/House2019a-manuscript.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {Canadian Society of Plastic Surgeons (CSPS) Annual Meeting},
address = {St. Johns, Newfoundland},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Isen, Jonah; Hisey, R.; Ungi, Tamas; Fichtinger, Gabor
Utilizing a convolutional neural network for tool detection in central venous catheterization Conference
33rd International Congress & Exhibition on Computer Assisted Radiology and Surgery (CARS), Int J CARS, Rennes, France, 2019.
@conference{Isen2019b,
title = {Utilizing a convolutional neural network for tool detection in central venous catheterization},
author = {Jonah Isen and R. Hisey and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Isen2019b.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {33rd International Congress & Exhibition on Computer Assisted Radiology and Surgery (CARS)},
publisher = {Int J CARS},
address = {Rennes, France},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Holden, M.; Tan, Pearl; Hookey, Lawrence; Fichtinger, Gabor
Quantifying the effect of patient position on the curvature of colons Conference
17th Annual Imaging Network Ontario Symposium (ImNO), Imaging Network Ontario (ImNO), London, Ontario, 2019.
@conference{Laframboise2019b,
title = {Quantifying the effect of patient position on the curvature of colons},
author = {Jacob Laframboise and Tamas Ungi and Andras Lasso and Mark Asselin and M. Holden and Pearl Tan and Lawrence Hookey and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Laframboise2019b.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {17th Annual Imaging Network Ontario Symposium (ImNO)},
publisher = {Imaging Network Ontario (ImNO)},
address = {London, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Asselin, Mark; Kaufmann, Martin; Wiercigroch, Julia; Ungi, Tamas; Lasso, Andras; Rudan, John; Fichtinger, Gabor
Navigated real-time molecular analysis in the operating theatre, demonstration of concept Conference
SPIE Medical Imaging 2019, 2019.
@conference{Asselin2019a,
title = {Navigated real-time molecular analysis in the operating theatre, demonstration of concept},
author = {Mark Asselin and Martin Kaufmann and Julia Wiercigroch and Tamas Ungi and Andras Lasso and John Rudan and Gabor Fichtinger},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019},
abstract = {<p><strong>PURPOSE</strong>: In the operating theatre surgeons are accustomed to using spatially navigated tools in conjunction with<br />
standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but<br />
doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated<br />
system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the<br />
dissected tissue using mass spectrometry.<br />
<strong>METHODS</strong>: Using the 3D Slicer software package, we have integrated a commercially available neurosurgical<br />
navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife)<br />
that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated<br />
patient comprised of an MRI scan from a brain cancer patient deformably registered to a plastic skull model. On the<br />
skull model we placed porcine and bovine tissues to simulate cancerous and healthy tissue, respectively. We built a<br />
PCA/LDA model to distinguish between these tissue types. The tissue classifications were displayed in a spatially<br />
localized manner in the pre-operative imaging, in both 2D and 3D views.<br />
<strong>RESULTS</strong>: We have demonstrated the feasibility of performing spatially navigated intra-operative analysis of tissues by<br />
mass spectrometry. We show that machine learning can classify our sample tissues, with an average computed<br />
confidence of 99.37 % for porcine tissue and 99.36% for bovine tissue.<br />
<strong>CONCLUSION</strong>: In this paper we demonstrate a proof of concept system for navigated intra-operative molecular<br />
analysis. This system may enable intra-operative awareness of spatially localized tissue classification during dissection,<br />
information that is especially useful in tumor surgeries where margins may not be visible to the unassisted eye.<br />
<strong>Keywords</strong>: image guided therapy, intra-operative mass spectrometry, iKnife, 3D Slicer, open-source, rapid evaporative<br />
ionization mass spectrometry (REIMS)</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but<br />
doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated<br />
system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the<br />
dissected tissue using mass spectrometry.<br />
<strong>METHODS</strong>: Using the 3D Slicer software package, we have integrated a commercially available neurosurgical<br />
navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife)<br />
that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated<br />
patient comprised of an MRI scan from a brain cancer patient deformably registered to a plastic skull model. On the<br />
skull model we placed porcine and bovine tissues to simulate cancerous and healthy tissue, respectively. We built a<br />
PCA/LDA model to distinguish between these tissue types. The tissue classifications were displayed in a spatially<br />
localized manner in the pre-operative imaging, in both 2D and 3D views.<br />
<strong>RESULTS</strong>: We have demonstrated the feasibility of performing spatially navigated intra-operative analysis of tissues by<br />
mass spectrometry. We show that machine learning can classify our sample tissues, with an average computed<br />
confidence of 99.37 % for porcine tissue and 99.36% for bovine tissue.<br />
<strong>CONCLUSION</strong>: In this paper we demonstrate a proof of concept system for navigated intra-operative molecular<br />
analysis. This system may enable intra-operative awareness of spatially localized tissue classification during dissection,<br />
information that is especially useful in tumor surgeries where margins may not be visible to the unassisted eye.<br />
<strong>Keywords</strong>: image guided therapy, intra-operative mass spectrometry, iKnife, 3D Slicer, open-source, rapid evaporative<br />
ionization mass spectrometry (REIMS)</p>
Wu, Victoria; Ungi, Tamas; Fichtinger, Gabor
Using Deep Learning for Transverse Process Detection in Spinal Ultrasounds Conference
17th Annual Imaging Network Ontario Symposium (ImNO), Imaging Network Ontario (ImNO), London, Ontario, 2019.
@conference{Wu2019a,
title = {Using Deep Learning for Transverse Process Detection in Spinal Ultrasounds},
author = {Victoria Wu and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wu2019a.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {17th Annual Imaging Network Ontario Symposium (ImNO)},
publisher = {Imaging Network Ontario (ImNO)},
address = {London, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Perrin, Sydney; Baum, Zachary M C; Asselin, Mark; Underwood, Grace; Choueib, Saleh; Lia, H.; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
Reproducibility of freehand calibrations for ultrasound-guided needle navigation Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Perrin2019a,
title = {Reproducibility of freehand calibrations for ultrasound-guided needle navigation},
author = {Sydney Perrin and Zachary M C Baum and Mark Asselin and Grace Underwood and Saleh Choueib and H. Lia and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Perrin2019a.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}