Biography
Gabor Fichtinger received BSc and MSc degrees in Electrical Engineering, and Doctoral degree in Computer Science from the Technical University of Budapest, Hungary, in 1986, 1988, and 1990, respectively. He has a balanced academic, industrial, and clinical background in the development and clinical inauguration of image-guided surgery and interventional navigation systems. His specialty is image-guided needle-placement procedures, primarily for cancer diagnosis and therapy and musculoskeletal conditions. Dr. Fichtinger is a Professor of Computer Science, with cross appointments in Electrical and Computer Engineering, Mechanical and Materials Engineering, Surgery and Pathology at Queen’s University, Canada, with adjunct appointments at the Johns Hopkins University, USA, Western University, Canada and the medical University of Vienna, Austria. Dr. Fichtinger holds a Cancer Ontario Research Chair in Cancer Imaging.
Affiliations
- Professor and Canada Research Chair (Tier 1) in Computer-Integrated Surgery, School of Computing, w/ cross appointments in Surgery, Pathology and Molecular Medicine, Mechanical and Materials Engineering, Electrical and Computer Engineering, Queen's University, Kingston, Ontario, Canada
- Fellow of RSC (Royal Society of Canada)
- Fellow of IEEE (Institute of Electrical and Electronics Engineers)
- Fellow of AIMBE (American Institute for Medical and Biological Engineering)
- Fellow of MICCAI (Medical Image Computing and Computer Assisted Interventions)
- Adjunct Professor of Medical Physics and Biomedical Engineering, Medical University of Vienna, Austria
- Adjunct Professor of Computer Science and Radiology and Radiological Science, Johns Hopkins University, Baltimore, MD, USA
- Adjunct Research Professor of Medical Biophysics, Western University, London, ON, Canada
- Affiliated Faculty, The Techna Institute, University Health Network and University of Toronto, Canada
- Honorary University Professor, Obuda University, Budapest, Hungary
Publications
Kim, Andrew S.; Yeung, Chris; Szabo, Robert; Sunderland, Kyle; Hisey, Rebecca; Morton, David; Kikinis, Ron; Diao, Babacar; Mousavi, Parvin; Ungi, Tamas; Fichtinger, Gabor
SPIE, 2024.
@proceedings{Kim2024,
title = {Percutaneous nephrostomy needle guidance using real-time 3D anatomical visualization with live ultrasound segmentation},
author = {Andrew S. Kim and Chris Yeung and Robert Szabo and Kyle Sunderland and Rebecca Hisey and David Morton and Ron Kikinis and Babacar Diao and Parvin Mousavi and Tamas Ungi and Gabor Fichtinger},
editor = {Maryam E. Rettmann and Jeffrey H. Siewerdsen},
doi = {10.1117/12.3006533},
year = {2024},
date = {2024-03-29},
urldate = {2024-03-29},
publisher = {SPIE},
abstract = {
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.
Connolly, Laura; Fooladgar, Fahimeh; Jamzad, Amoon; Kaufmann, Martin; Syeda, Ayesha; Ren, Kevin; Abolmaesumi, Purang; Rudan, John F; McKay, Doug; Fichtinger, Gabor; others,
ImSpect: Image-driven self-supervised learning for surgical margin evaluation with mass spectrometry Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, pp. 1–8, 2024.
@article{connolly2024imspect,
title = {ImSpect: Image-driven self-supervised learning for surgical margin evaluation with mass spectrometry},
author = {Laura Connolly and Fahimeh Fooladgar and Amoon Jamzad and Martin Kaufmann and Ayesha Syeda and Kevin Ren and Purang Abolmaesumi and John F Rudan and Doug McKay and Gabor Fichtinger and others},
doi = {https://doi.org/10.1007/s11548-024-03106-1},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
pages = {1–8},
publisher = {Springer},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Radcliffe, Olivia; Connolly, Laura; Ungi, Tamas; Yeo, Caitlin; Rudan, John F.; Fichtinger, Gabor; Mousavi, Parvin
Navigated surgical resection cavity inspection for breast conserving surgery Proceedings
2023.
@proceedings{nokey,
title = {Navigated surgical resection cavity inspection for breast conserving surgery},
author = {Olivia Radcliffe and Laura Connolly and Tamas Ungi and Caitlin Yeo and John F. Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1117/12.2654015},
year = {2023},
date = {2023-04-03},
abstract = {Up to 40% of Breast Conserving Surgery (BCS) patients must undergo repeat surgery because cancer is left behind in the resection cavity. The mobility of the breast resection cavity makes it difficult to localize residual cancer and, therefore, cavity shaving is a common technique for cancer removal. Cavity shaving involves removing an additional layer of tissue from the entire resection cavity, often resulting in unnecessary healthy tissue loss. In this study, we demonstrated a navigation system and open-source software module that facilitates visualization of the breast resection cavity for targeted localization of residual cancer.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
Cernelev, Pavel-Dumitru; Moga, Kristof; Groves, Leah; Haidegger, Tamás; Fichtinger, Gabor; Ungi, Tamas
Determining boundaries of accurate tracking for electromagnetic sensors Conference
SPIE, 2023.
@conference{Cernelev2023,
title = {Determining boundaries of accurate tracking for electromagnetic sensors},
author = {Pavel-Dumitru Cernelev and Kristof Moga and Leah Groves and Tamás Haidegger and Gabor Fichtinger and Tamas Ungi},
editor = {Cristian A. Linte and Jeffrey H. Siewerdsen},
doi = {10.1117/12.2654428},
year = {2023},
date = {2023-04-03},
urldate = {2023-04-03},
publisher = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor; Korzeniowski, Martin
Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction Conference
Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting, Canadian Association for Radiation Oncologists, 2022.
@conference{Kitner2022ab,
title = {Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger and Martin Korzeniowski},
year = {2022},
date = {2022-09-01},
urldate = {2022-09-01},
booktitle = {Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting},
publisher = {Canadian Association for Radiation Oncologists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Fooladgar, Fahimeh; Jamzad, Amoon; Connolly, Laura; Santilli, Alice; Kaufmann, Martin; Ren, Kevin; Abolmaesumi, Purang; Rudan, John; McKay, Doug; Fichtinger, Gabor; Mousavi, Parvin
Uncertainty estimation for margin detection in cancer surgery using mass spectrometry Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, 2022.
@article{Fooladgar2022,
title = {Uncertainty estimation for margin detection in cancer surgery using mass spectrometry},
author = {Fahimeh Fooladgar and Amoon Jamzad and Laura Connolly and Alice Santilli and Martin Kaufmann and Kevin Ren and Purang Abolmaesumi and John Rudan and Doug McKay and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1007/s11548-022-02764-3},
year = {2022},
date = {2022-09-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Degeut, Anton; Leonard, Simon; Tokuda, Junichi; Ungi, Tamas; Krieger, Axel; Kazanzides, Peter; Mousavi, Parvin; Fichtinger, Gabor; Taylor, Russell H.
Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions Journal Article
In: Sensors, vol. 22, 2022.
@article{Connolly2022c,
title = {Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions},
author = {Laura Connolly and Anton Degeut and Simon Leonard and Junichi Tokuda and Tamas Ungi and Axel Krieger and Peter Kazanzides and Parvin Mousavi and Gabor Fichtinger and Russell H. Taylor},
doi = {https://doi.org/10.3390/s22145336},
year = {2022},
date = {2022-07-01},
journal = {Sensors},
volume = {22},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline Conference
Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting, Canadian Organization of Medical Physicists, 2022.
@conference{Kitner2022a,
title = {Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
booktitle = {Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting},
publisher = {Canadian Organization of Medical Physicists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hu, Zoe; Fauerbach, Paola V. Nasute; Yeung, Chris; Ungi, Tamas; Rudan, John; Engel, C. Jay; Mousavi, Parvin; Fichtinger, Gabor; Jabs, Doris
Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 17, no. 9, pp. 1663–1672, 2022.
@article{Hu2022,
title = {Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation},
author = {Zoe Hu and Paola V. Nasute Fauerbach and Chris Yeung and Tamas Ungi and John Rudan and C. Jay Engel and Parvin Mousavi and Gabor Fichtinger and Doris Jabs},
doi = {10.1007/s11548-022-02658-4},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {17},
number = {9},
pages = {1663–1672},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Jamzad, Amoon; Nikniazi, Arash; Poushimin, Rana; Nunzi, Jean Michel; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
Feasibility of combined optical and acoustic imaging for surgical cavity scanning Conference
SPIE Medical Imaging 2022: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 12034, San Diego (online), 2022.
@conference{Connolly2022,
title = {Feasibility of combined optical and acoustic imaging for surgical cavity scanning},
author = {Laura Connolly and Amoon Jamzad and Arash Nikniazi and Rana Poushimin and Jean Michel Nunzi and John Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1117/12.2611964},
year = {2022},
date = {2022-04-01},
booktitle = {SPIE Medical Imaging 2022: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {12034},
address = {San Diego (online)},
abstract = {PURPOSE: Over 30% of breast conserving surgery patients must undergo repeat surgery to address incomplete tumor resection. We hypothesize that the addition of a robotic cavity scanning system can improve the success rates of these procedures by performing additional, intraoperative imaging to detect left-over cancer cells. In this study, we assess the feasibility of a combined optical and acoustic imaging approach for this cavity scanning system. METHODS: Dual-layer tissue phantoms are imaged with both throughput broadband spectroscopy and an endocavity ultrasound probe. The absorbance and transmittance of the incident light from the broadband source is used to characterize each tissue sample optically. Additionally, a temporally enhanced ultrasound approach is used to distinguish the heterogeneity of the tissue sample by classifying individual pixels in the ultrasound image with a support vector machine. The goal of this combined approach is to use optical characterization to classify the tissue surface, and acoustic characterization to classify the sample heterogeneity. RESULTS: Both optical and acoustic characterization demonstrated promising preliminary results. The class of each tissue sample is distinctly separable based on the transmittance and absorption of the broadband light. Additionally, an SVM trained on the temporally enhance ultrasound signals for each tissue type, showed 82% linear separability of labelled temporally enhanced ultrasound sequences in our test set. CONCLUSIONS: By combining broadband and ultrasound imaging, we demonstrate a potential non-destructive imaging approach for this robotic cavity scanning system. With this approach, our system can detect both surface level tissue characteristics and depth information. Applying this to breast conserving surgery can help inform the surgeon about the tissue composition of the resection cavity after initial tumor resection.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Klosa, Elizabeth; Hisey, R.; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Identifying tissues for task recognition in training of open inguinal hernia repairs Conference
Imaging Network of Ontario Symposium, 2022.
@conference{Klosa2022b,
title = {Identifying tissues for task recognition in training of open inguinal hernia repairs},
author = {Elizabeth Klosa and R. Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Klosa2022b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of using object detection for performance assessment in central venous catherization Conference
Imaging Network of Ontario Symposium, 2022.
@conference{ODriscoll2022b,
title = {Feasibility of using object detection for performance assessment in central venous catherization},
author = {Olivia O’Driscoll and R. Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2021b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy Conference
Imaging Network Ontario (IMNO) 2022 Symposium, Imaging Network of Ontario, Online, 2022.
@conference{Kitner2022ac,
title = {Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network Ontario (IMNO) 2022 Symposium},
publisher = {Imaging Network of Ontario},
address = {Online},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Klosa, Elizabeth; Hisey, R.; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Tissue segmentation for workflow recognition in open inguinal hernia repair training Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{Klosa2022a,
title = {Tissue segmentation for workflow recognition in open inguinal hernia repair training},
author = {Elizabeth Klosa and R. Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Klosa2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {PURPOSE: As medical education adopts a competency-based training method, experts are spending substantial amounts of time instructing and assessing trainees’ competence. In this study, we look to develop a computer-assisted training platform that can provide instruction and assessment of open inguinal hernia repairs without needing an expert observer. We recognize workflow tasks based on the tool-tissue interactions, suggesting that we first need a method to identify tissues. This study aims to train a neural network in identifying tissues in a low-cost phantom as we work towards identifying the tool-tissue interactions needed for task recognition. METHODS: Eight simulated tissues were segmented throughout five videos from experienced surgeons who performed open inguinal hernia repairs on phantoms. A U-Net was trained using leave-one-user-out cross validation. The average F-score, false positive rate and false negative rate were calculated for each tissue to evaluate the U-Net’s performance. RESULTS: Higher F-scores and lower false negative and positive rates were recorded for the skin, hernia sac, spermatic cord, and nerves, while slightly lower metrics were recorded for the subcutaneous tissue, Scarpa’s fascia, external oblique aponeurosis and superficial epigastric vessels. CONCLUSION: The U-Net performed better in recognizing tissues that were relatively larger in size and more prevalent, while struggling to recognize smaller tissues only briefly visible. Since workflow recognition does not require perfect segmentation, we believe our U-Net is sufficient in recognizing the tissues of an inguinal hernia repair phantom. Future studies will explore combining our segmentation U-Net with tool detection as we work towards workflow recognition.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of object detection for skill assessment in central venous catheterization Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{ODriscoll2022a,
title = {Feasibility of object detection for skill assessment in central venous catheterization},
author = {Olivia O’Driscoll and R. Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {<p><strong>Purpose: </strong>Computer-assisted surgical skill assessment methods have traditionally relied on tracking tool motion with physical sensors. These tracking systems can be expensive, bulky, and impede tool function. Recent advances in object detection networks have made it possible to quantify tool motion using only a camera. These advances open the door for a low-cost alternative to current physical tracking systems for surgical skill assessment. This study determines the feasibility of using metrics computed with object detection by comparing them to widely accepted metrics computed using traditional tracking methods in central venous catheterization. <strong>Methods:</strong> Both video and tracking data were recorded from participants performing central venous catheterization on a venous access phantom. A Faster Region-Based Convolutional Neural Network was trained to recognize the ultrasound probe and syringe on the video data. Tracking-based metrics were computed using the Perk Tutor extension of 3D Slicer. The path length and usage time for each tool were then computed using both the video and tracking data. The metrics from object detection and tracking were compared using Spearman rank correlation. <strong>Results: </strong>The path lengths had a rank correlation coefficient of 0.22 for the syringe (p<0.03) and 0.35 (p<0.001) for the ultrasound probe. For the usage times, the correlation coefficient was 0.37 (p<0.001) for the syringe and 0.34 (p<0.001) for the ultrasound probe. <strong>Conclusions</strong>: The video-based metrics correlated significantly with the tracked metrics, suggesting that object detection could be a feasible skill assessment method for central venous catheterization.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kaufmann, Martin; Vaysse, Pierre-Maxence; Savage, Adele; Amgheib, Ala; Marton, András; Manoli, Eftychios; Fichtinger, Gabor; Pringle, Steven D.; Rudan, John; Heeren, Ron M. A.; Takáts, Zoltán; Balog, Júlia; Siegel, Tiffany Porta
Harmonization of Rapid Evaporative Ionization Mass Spectrometry Workflows across Four Sites and Testing Using Reference Material and Local Food-Grade Meats Journal Article
In: Metabolites, 2022.
@article{Kaufmann2022b,
title = {Harmonization of Rapid Evaporative Ionization Mass Spectrometry Workflows across Four Sites and Testing Using Reference Material and Local Food-Grade Meats},
author = {Martin Kaufmann and Pierre-Maxence Vaysse and Adele Savage and Ala Amgheib and András Marton and Eftychios Manoli and Gabor Fichtinger and Steven D. Pringle and John Rudan and Ron M. A. Heeren and Zoltán Takáts and Júlia Balog and Tiffany Porta Siegel},
year = {2022},
date = {2022-01-01},
journal = {Metabolites},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Jamzad, Amoon; Nikniazi, Arash; Poushimin, Rana; Lasso, Andras; Sunderland, Kyle R.; Ungi, Tamas; Nunzi, Jean Michel; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
An open-source testbed for developing image-guided robotic tumor-bed inspection Conference
Imaging Network of Ontario (ImNO) Symposium, 2022.
@conference{connolly2022b,
title = {An open-source testbed for developing image-guided robotic tumor-bed inspection},
author = {Laura Connolly and Amoon Jamzad and Arash Nikniazi and Rana Poushimin and Andras Lasso and Kyle R. Sunderland and Tamas Ungi and Jean Michel Nunzi and John Rudan and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/01/Connolly2022b.pdf},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Imaging Network of Ontario (ImNO) Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Poole, Meredith; Ungi, Tamas; Fichtinger, Gabor; Zevin, Boris
Training in soft tissue resection using real-time visual computer navigation feedback from the Surgery Tutor: A randomized controlled trial Journal Article
In: Surgery, 2021, ISSN: 0039-6060.
@article{Poole2021,
title = {Training in soft tissue resection using real-time visual computer navigation feedback from the Surgery Tutor: A randomized controlled trial},
author = {Meredith Poole and Tamas Ungi and Gabor Fichtinger and Boris Zevin},
url = {https://www.sciencedirect.com/science/article/pii/S0039606021011855},
doi = {https://doi.org/10.1016/j.surg.2021.11.037},
issn = {0039-6060},
year = {2021},
date = {2021-12-01},
urldate = {2021-12-01},
journal = {Surgery},
abstract = {<p>Background In competency-based medical education, surgery trainees are often required to learn procedural skills in a simulated setting before proceeding to the clinical environment. The Surgery Tutor computer navigation platform allows for real-time proctor-less assessment of open soft tissue resection skills; however, the use of this platform as an aid in acquisition of procedural skills is yet to be explored. Methods In this prospective randomized controlled trial, 20 final year medical students were randomized to receive either training with real-time computer navigation feedback (Intervention, n = 10) or simulation training without navigation feedback (Control, n = 10) during resection of simulated non-palpable soft tissue tumors. Real-time computer navigation feedback allowed participants to visualize the position of their scalpel relative to the tumor. Computer navigation feedback was removed for postintervention assessment. Primary outcome was positive margin rate. Secondary outcomes were procedure time, mass of tissue excised, number of scalpel motions, and distance traveled by the scalpel. Results Training with real-time computer navigation resulted in a significantly lower positive margin rate as compared to training without navigation feedback (0% vs 40%, P = .025). All other performance metrics were not significantly different between the 2 groups. Participants in the intervention group displayed significant improvement in positive margin rate from baseline to final assessment (80% vs 0%, P < .01), whereas participants in the Control group did not. Conclusion Real-time visual computer navigation feedback from the Surgery Tutor resulted in superior acquisition of procedural skills as compared to training without navigation feedback.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
System for central venous catheterization training using computer vision-based workflow feedback Journal Article
In: IEEE Transactions on Biomedical Engineering, 2021.
@article{Hisey2021b,
title = {System for central venous catheterization training using computer vision-based workflow feedback},
author = {R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
journal = {IEEE Transactions on Biomedical Engineering},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Degeut, Anton; Sunderland, Kyle R.; Lasso, Andras; Ungi, Tamas; Rudan, John; Taylor, Russell H.; Mousavi, Parvin; Fichtinger, Gabor
An open-source platform for cooperative semi-autonomous robotic surgery Conference
IEEE International Conference on Autonomous Systems, IEEE IEEE, Montreal, Quebec, 2021.
@conference{Connolly2021,
title = {An open-source platform for cooperative semi-autonomous robotic surgery},
author = {Laura Connolly and Anton Degeut and Kyle R. Sunderland and Andras Lasso and Tamas Ungi and John Rudan and Russell H. Taylor and Parvin Mousavi and Gabor Fichtinger},
doi = {https://doi.org/10.1109/ICAS49788.2021.9551149},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
booktitle = {IEEE International Conference on Autonomous Systems},
publisher = {IEEE},
address = {Montreal, Quebec},
organization = {IEEE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Jamzad, Amoon; Kaufmann, Martin; Farquharson, Catriona E.; Ren, Kevin; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
Combined Mass Spectrometry and Histopathology Imaging for Perioperative Tissue Assessment in Cancer Surgery Journal Article
In: Journal of Imaging, vol. 7, no. 203, 2021.
@article{Connolly2021c,
title = {Combined Mass Spectrometry and Histopathology Imaging for Perioperative Tissue Assessment in Cancer Surgery},
author = {Laura Connolly and Amoon Jamzad and Martin Kaufmann and Catriona E. Farquharson and Kevin Ren and John Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.3390/jimaging7100203},
year = {2021},
date = {2021-10-01},
journal = {Journal of Imaging},
volume = {7},
number = {203},
abstract = {<p><span style="color:rgb(34, 34, 34); font-family:arial; font-size:12px">Mass spectrometry is an effective imaging tool for evaluating biological tissue to detect cancer. With the assistance of deep learning, this technology can be used as a perioperative tissue assessment tool that will facilitate informed surgical decisions. To achieve such a system requires the development of a database of mass spectrometry signals and their corresponding pathology labels. Assigning correct labels, in turn, necessitates precise spatial registration of histopathology and mass spectrometry data. This is a challenging task due to the domain differences and noisy nature of images. In this study, we create a registration framework for mass spectrometry and pathology images as a contribution to the development of perioperative tissue assessment. In doing so, we explore two opportunities in deep learning for medical image registration, namely, unsupervised, multi-modal deformable image registration and evaluation of the registration. We test this system on prostate needle biopsy cores that were imaged with desorption electrospray ionization mass spectrometry (DESI) and show that we can successfully register DESI and histology images to achieve accurate alignment and, consequently, labelling for future training. This automation is expected to improve the efficiency and development of a deep learning architecture that will benefit the use of mass spectrometry imaging for cancer diagnosis.</span></p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Colton; Hisey, R.; Ungi, Tamas; Fichtinger, Gabor
Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization Conference
43rd Conference of the IEEE Engineering Medicine and Biology Society, 2021.
@conference{CBarr2021b,
title = {Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization},
author = {Colton Barr and R. Hisey and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CBarr2021a.pdf},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
booktitle = {43rd Conference of the IEEE Engineering Medicine and Biology Society},
abstract = {<p>Central Line Tutor is a system that facilitates real-time feedback during training for central venous catheterization. One limitation of Central Line Tutor is its reliance on expensive, cumbersome electromagnetic tracking to facilitate various training aids, including ultrasound task identification and segmentation of neck vasculature. The purpose of this study is to validate deep learning methods for vessel segmentation and ultrasound pose classification in order to mitigate the system’s reliance on electromagnetic tracking. A large dataset of segmented and classified ultrasound images was generated from participant data captured using Central Line Tutor. A U-Net architecture was used to perform vessel segmentation, while a shallow Convolutional Neural Network (CNN) architecture was designed to classify the pose of the ultrasound probe. A second classifier architecture was also tested that used the U-Net output as the CNN input. The mean testing set Intersect over Union score for U-Net cross-validation was 0.746 ± 0.052. The mean test set classification accuracy for the CNN was 92.0% ± 3.0, while the U-Net + CNN achieved 92.7% ± 2.1%. This study highlights the potential for deep learning on ultrasound images to replace the current electromagnetic tracking-based methods for vessel segmentation and ultrasound pose classification, and represents an important step towards removing the electromagnetic tracker altogether. Removing the need for an external tracking system would significantly reduce the cost of Central Line Tutor and make it far more accessible to the medical trainees that would benefit from it most.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Fichtinger, Gabor; Mousavi, Parvin; Ungi, Tamas; Fenster, Aaron; Abolmaesumi, Purang; Kronreif, Gernot; Ruiz-Alzola, Juan; Ndoye, Alain; Diao, Babacar; Kikinis, Ron
Design of an Ultrasound-Navigated Prostate Cancer Biopsy System for Nationwide Implementation in Senegal Journal Article
In: Journal of Imaging, vol. 7, no. 8, pp. 154, 2021, ISSN: 2313-433X.
@article{Fichtinger2021,
title = {Design of an Ultrasound-Navigated Prostate Cancer Biopsy System for Nationwide Implementation in Senegal},
author = {Gabor Fichtinger and Parvin Mousavi and Tamas Ungi and Aaron Fenster and Purang Abolmaesumi and Gernot Kronreif and Juan Ruiz-Alzola and Alain Ndoye and Babacar Diao and Ron Kikinis},
url = {https://www.mdpi.com/2313-433X/7/8/154},
doi = {10.3390/jimaging7080154},
issn = {2313-433X},
year = {2021},
date = {2021-08-01},
urldate = {2021-08-01},
journal = {Journal of Imaging},
volume = {7},
number = {8},
pages = {154},
abstract = {<p>This paper presents the design of NaviPBx, an ultrasound-navigated prostate cancer biopsy system. NaviPBx is designed to support an affordable and sustainable national healthcare program in Senegal. It uses spatiotemporal navigation and multiparametric transrectal ultrasound to guide biopsies. NaviPBx integrates concepts and methods that have been independently validated previously in clinical feasibility studies and deploys them together in a practical prostate cancer biopsy system. NaviPBx is based entirely on free open-source software and will be shared as a free open-source program with no restriction on its use. NaviPBx is set to be deployed and sustained nationwide through the Senegalese Military Health Service. This paper reports on the results of the design process of NaviPBx. Our approach concentrates on “frugal technology”, intended to be affordable for low–middle income (LMIC) countries. Our project promises the wide-scale application of prostate biopsy and will foster time-efficient development and programmatic implementation of ultrasound-guided diagnostic and therapeutic interventions in Senegal and beyond.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fauerbach, Paola V. Nasute; Tyryshkin, Kathrin; Rodrigo, Silvia Perez; Rudan, John; Fichtinger, Gabor; Reedijk, Michael; Varma, Sonal; Berman, David M.
Lack of definitive presurgical pathological diagnosis is associated with inadequate surgical margins in breast-conserving surgery Journal Article
In: European Journal of Surgical Oncology, 2021, ISSN: 0748-7983.
@article{NasuteFauerbach2021,
title = {Lack of definitive presurgical pathological diagnosis is associated with inadequate surgical margins in breast-conserving surgery},
author = {Paola V. Nasute Fauerbach and Kathrin Tyryshkin and Silvia Perez Rodrigo and John Rudan and Gabor Fichtinger and Michael Reedijk and Sonal Varma and David M. Berman},
url = {https://www.sciencedirect.com/science/article/pii/S0748798321005424},
doi = {https://doi.org/10.1016/j.ejso.2021.05.047},
issn = {0748-7983},
year = {2021},
date = {2021-06-01},
urldate = {2021-06-01},
journal = {European Journal of Surgical Oncology},
abstract = {<p>Purpose To determine the impact of definitive presurgical diagnosis on surgical margins in breast-conserving surgery (BCS) for primary carcinomas; clinicopathological features were also analyzed. Methods This retrospective study included women who underwent BCS for primary carcinomas in 2016 and 2017. Definitive presurgical diagnosis was defined as having a presurgical core needle biopsy (CNB) and not being upstaged between biopsy and surgery. Biopsy data and imaging findings including breast density were retrieved. Inadequate surgical margins (IM) were defined per latest ASCO and ASTRO guidelines. Univariable and multivariable analyses were performed. Results 360 women (median age, 66) met inclusion criteria with 1 having 2 cancers. 82.5% (298/361) were invasive cancers while 17.5% (63/361) were ductal carcinoma in situ (DCIS). Most biopsies were US-guided (284/346, 82.0%), followed by mammographic (60/346, 17.3%), and MRI-guided (2/346, 0.6%). US and mammographic CNB yielded median samples of 2 and 4, respectively, with a 14G needle. 15 patients (4.2%) lacked presurgical CNB. The IM rate was 30.0%. In multivariable analysis, large invasive cancers (>20 mm), dense breasts, and DCIS were associated with IM (p = 0.029, p = 0.010, and p = 0.013, respectively). Most importantly, lack of definitive presurgical diagnosis was a risk factor for IM (OR, 2.35; 95% CI: 1.23–4.51, p = 0.010). In contrast, neither patient age (<50) nor aggressive features (e.g., LVI) were associated with IM. Conclusion Lack of a definitive presurgical diagnosis was associated with a two-fold increase of IM in BCS; other risk factors were dense breasts, large invasive cancers, and DCIS.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
Imaging Network of Ontario Symposium, 2021.
@conference{Hisey2021a,
title = {Central Line Tutor: using computer vision workflow recognition in a central venous catheterization training system},
author = {R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/RHisey_ImNO2021.pdf},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Catherine O.; Diao, Babacar; Ungi, Tamas; Sedghi, Alireza; Kikinis, Ron; Mousavi, Parvin; Fichtinger, Gabor
Development of an open-source system for prostate biopsy training in Senegal Conference
SPIE Medical Imaging 2021: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 11598, 2021.
@conference{CWu2021a,
title = {Development of an open-source system for prostate biopsy training in Senegal},
author = {Catherine O. Wu and Babacar Diao and Tamas Ungi and Alireza Sedghi and Ron Kikinis and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2021a-poster_0.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2021a_0.pdf},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {SPIE Medical Imaging 2021: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {11598},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Barr, Colton; Hisey, R.; Ungi, Tamas; Fichtinger, Gabor
Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization Conference
Imaging Network of Ontario Symposium, 2021.
@conference{CBarr2021a,
title = {Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization},
author = {Colton Barr and R. Hisey and Tamas Ungi and Gabor Fichtinger},
year = {2021},
date = {2021-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
SPIE Medical Imaging, 2021.
@conference{ODriscoll2021a,
title = {Object detection to compute performance metrics for skill assessment in central venous catheterization},
author = {Olivia O’Driscoll and R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/11598/1159816/Object-detection-to-compute-performance-metrics-for-skill-assessment-in/10.1117/12.2581889.short?SSO=1
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2021a.pdf},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
Imaging Network of Ontario Symposium, 2021.
@conference{ODriscoll2021b,
title = {Surgical tool tracking with object detection for performance assessment in central venous catheterization},
author = {Olivia O’Driscoll and R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2021b.pdf},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Sunderland, Kyle R.; Lasso, Andras; Degeut, Anton; Ungi, Tamas; Rudan, John; Taylor, Russell H.; Mousavi, Parvin; Fichtinger, Gabor
A platform for robot-assisted Intraoperative imaging in breast conserving surgery Conference
Imaging Network of Ontario Symposium, Imaging Network of Ontario Symposium, Online, 2021.
@conference{Connolly2021b,
title = {A platform for robot-assisted Intraoperative imaging in breast conserving surgery},
author = {Laura Connolly and Kyle R. Sunderland and Andras Lasso and Anton Degeut and Tamas Ungi and John Rudan and Russell H. Taylor and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2021a_1.pdf},
year = {2021},
date = {2021-01-01},
urldate = {2021-01-01},
booktitle = {Imaging Network of Ontario Symposium},
publisher = {Imaging Network of Ontario Symposium},
address = {Online},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Gauvin, Gabrielle; Yeo, Caitlin T; Ungi, Tamas; Merchant, Shaila; Lasso, Andras; Jabs, Doris; Vaughan, Thomas; Rudan, John; Walker, Ross; Fichtinger, Gabor; Engel, C. Jay
Real-time electromagnetic navigation for breast-conserving surgery using NaviKnife technology: A matched case-control study Journal Article
In: The Breast Journal, vol. 26, no. 3, pp. 399-405, 2020.
@article{Gauvin2019,
title = {Real-time electromagnetic navigation for breast-conserving surgery using NaviKnife technology: A matched case-control study},
author = {Gabrielle Gauvin and Caitlin T Yeo and Tamas Ungi and Shaila Merchant and Andras Lasso and Doris Jabs and Thomas Vaughan and John Rudan and Ross Walker and Gabor Fichtinger and C. Jay Engel},
doi = {10.1111/tbj.13480},
year = {2020},
date = {2020-09-01},
urldate = {2020-09-01},
journal = {The Breast Journal},
volume = {26},
number = {3},
pages = {399-405},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hisey, R.; Chen, Brian; Ungi, Tamas; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor
Reinforcement learning approach for video-based task recognition in central venous catheterization Conference
Imaging Network of Ontario Symposium, 2020.
@conference{Hisey2020a,
title = {Reinforcement learning approach for video-based task recognition in central venous catheterization},
author = {R. Hisey and Brian Chen and Tamas Ungi and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/RHisey_ImNO2020.pdf},
year = {2020},
date = {2020-06-01},
urldate = {2020-06-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, R.; Chen, Brian; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
International Conference on Computer Assisted Radiology and Surgery, 2020.
@conference{Hisey2020b,
title = {Recognizing workflow tasks in central venous catheterization using convolutional neural networks and reinforcement learning},
author = {R. Hisey and Brian Chen and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/RHisey_CARS_2020_0.pdf},
year = {2020},
date = {2020-06-01},
urldate = {2020-06-01},
booktitle = {International Conference on Computer Assisted Radiology and Surgery},
pages = {94-95},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Pinter, Csaba; Olding, Tim; Schreiner, L. John; Fichtinger, Gabor
Using Fuzzy Logics to Determine Optimal Oversampling Factor for Voxelizing 3D Surfaces in Radiation Therapy Journal Article
In: Soft Computing, 2020.
@article{Pinter2020a,
title = {Using Fuzzy Logics to Determine Optimal Oversampling Factor for Voxelizing 3D Surfaces in Radiation Therapy},
author = {Csaba Pinter and Tim Olding and L. John Schreiner and Gabor Fichtinger},
url = {https://link.springer.com/article/10.1007/s00500-020-05126-w
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Pinter2020a_0.pdf},
doi = {10.1007/s00500-020-05126-w},
year = {2020},
date = {2020-06-01},
urldate = {2020-06-01},
journal = {Soft Computing},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wu, Catherine O.; Sunderland, Kyle R.; Filippov, Mihail; Sainsbury, Ben; Fichtinger, Gabor; Ungi, Tamas
Workflow for creation and evaluation of virtual nephrolithotomy training models Conference
SPIE Medical Imaging Conference 2020, vol. 11315, 2020.
@conference{CWu2020,
title = {Workflow for creation and evaluation of virtual nephrolithotomy training models},
author = {Catherine O. Wu and Kyle R. Sunderland and Mihail Filippov and Ben Sainsbury and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2020a-manuscript.pdf},
doi = {10.1117/12.2549354},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
booktitle = {SPIE Medical Imaging Conference 2020},
volume = {11315},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Pinter, Csaba; Lasso, Andras; Choueib, Saleh; Asselin, Mark; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Jolley, MatthewA; Fichtinger, Gabor
SlicerVR for Medical Intervention Training and Planning in Immersive Virtual Reality Journal Article
In: IEEE Transactions on Medical Robotics and Bionics, vol. 2, no. 2, pp. 108-117, 2020.
@article{Pinter2020,
title = {SlicerVR for Medical Intervention Training and Planning in Immersive Virtual Reality},
author = {Csaba Pinter and Andras Lasso and Saleh Choueib and Mark Asselin and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and MatthewA Jolley and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Pinter2020a_0.pdf},
doi = {10.1109/TMRB.2020.2983199},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
journal = {IEEE Transactions on Medical Robotics and Bionics},
volume = {2},
number = {2},
pages = {108-117},
abstract = {<p>Virtual reality (VR) provides immersive visualization that has proved to be useful in a variety of medical applications. Currently, however, no free open-source software platform exists that would provide comprehensive support for translational clinical researchers in prototyping experimental VR scenarios in training, planning or guiding medical interventions. By integrating VR functions in 3D Slicer, an established medical image analysis and visualization platform, SlicerVR enables virtual reality experience by a single click. It provides functions to navigate and manipulate the virtual scene, as well as various settings to abate the feeling of motion sickness. SlicerVR allows for shared collaborative VR experience both locally and remotely. We present illustrative scenarios created with SlicerVR in a wide spectrum of applications, including echocardiography, neurosurgery, spine surgery, brachytherapy, intervention training and personalized patient education. SlicerVR is freely available under BSD type license as an extension to 3D Slicer and it has been downloaded over 7,800 times at the time of writing this article.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ungi, Tamas; Greer, Hastings; Sunderland, Kyle R.; Wu, Victoria; Baum, Zachary M C; Schlenger, Christopher; Oetgen, Matthew; Cleary, Kevin; Aylward, Stephen; Fichtinger, Gabor
Automatic spine ultrasound segmentation for scoliosis visualization and measurement Journal Article
In: IEEE Transactions on Biomedical Engineering, vol. 67, no. 11, pp. 3234 - 3241, 2020.
@article{Ungi2020,
title = {Automatic spine ultrasound segmentation for scoliosis visualization and measurement},
author = {Tamas Ungi and Hastings Greer and Kyle R. Sunderland and Victoria Wu and Zachary M C Baum and Christopher Schlenger and Matthew Oetgen and Kevin Cleary and Stephen Aylward and Gabor Fichtinger},
url = {https://ieeexplore.ieee.org/document/9034149
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ungi2020.pdf},
doi = {10.1109/TBME.2020.2980540},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
journal = {IEEE Transactions on Biomedical Engineering},
volume = {67},
number = {11},
pages = {3234 - 3241},
abstract = {<p>\emph{Objective:} Integrate tracked ultrasound and AI methods to provide a safer and more accessible alternative to X-ray for scoliosis measurement. We propose automatic ultrasound segmentation for 3-dimensional spine visualization and scoliosis measurement to address difficulties in using ultrasound for spine imaging. \emph{Methods:} We trained a convolutional neural network for spine segmentation on ultrasound scans using data from eight healthy adult volunteers. We tested the trained network on eight pediatric patients. We evaluated image segmentation and 3-dimensional volume reconstruction for scoliosis measurement. \emph{Results:} As expected, fuzzy segmentation metrics reduced when trained networks were translated from healthy volunteers to patients. Recall decreased from 0.72 to 0.64 (8.2% decrease), and precision from 0.31 to 0.27 (3.7% decrease). However, after finding optimal thresholds for prediction maps, binary segmentation metrics performed better on patient data. Recall decreased from 0.98 to 0.97 (1.6% decrease), and precision from 0.10 to 0.06 (4.5% decrease). Segmentation prediction maps were reconstructed to 3-dimensional volumes and scoliosis was measured in all patients. Measurement in these reconstructions took less than 1 minute and had a maximum error of 2.2° compared to X-ray. \emph{Conclusion:} automatic spine segmentation makes scoliosis measurement both efficient and accurate in tracked ultrasound scans. \emph{Significance:} Automatic segmentation may overcome the limitations of tracked ultrasound that so far prevented its use as an alternative of X-ray in scoliosis measurement.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Keiran; Laframboise, Jacob; Ungi, Tamas; Hookey, Lawrence; Fichtinger, Gabor
Automated segmentation of computed tomography colonography images using a 3D U-Net Conference
SPIE Medical Imaging, 2020.
@conference{KBarr2020,
title = {Automated segmentation of computed tomography colonography images using a 3D U-Net},
author = {Keiran Barr and Jacob Laframboise and Tamas Ungi and Lawrence Hookey and Gabor Fichtinger},
doi = {https://doi.org/10.1117/12.2549749},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
booktitle = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Victoria; Ungi, Tamas; Sunderland, Kyle R.; Pigeau, Grace; Schonewille, Abigael; Fichtinger, Gabor
Using multiple frame U-net for automated segmentation of spinal ultrasound images Conference
18th Annual Imaging Network Ontario (ImNO) Symposium, 2020.
@conference{Wu2020b,
title = {Using multiple frame U-net for automated segmentation of spinal ultrasound images},
author = {Victoria Wu and Tamas Ungi and Kyle R. Sunderland and Grace Pigeau and Abigael Schonewille and Gabor Fichtinger},
url = {https://www.imno.ca/sites/default/files/ImNO2020Proceedings.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wu2020b.pdf},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {18th Annual Imaging Network Ontario (ImNO) Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Sunderland, Kyle R.; Zevin, Boris; Fichtinger, Gabor
Open source platform for automated collection of training data to support video-based feedback in surgical simulators Conference
SPIE Medical Imaging, SPIE, Houston, United States, 2020.
@conference{Laframboise2020a,
title = {Open source platform for automated collection of training data to support video-based feedback in surgical simulators},
author = {Jacob Laframboise and Tamas Ungi and Kyle R. Sunderland and Boris Zevin and Gabor Fichtinger},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE},
address = {Houston, United States},
abstract = {<p><strong>Purpose:</strong> Surgical training could be improved by automatic detection of workflow steps. A platform to collect and organize tracking and video data would enable rapid development of deep learning solutions for surgical training. The purpose of this research is to demonstrate 3D Slicer / PLUS Toolkit as a platform for video annotation by identifying and annotating tools interacting with tissues in simulated hernia repair. <strong>Methods:</strong> Tracking data from an optical tracker and video data from a camera are collected by PLUS and 3D Slicer. To demonstrate the platform in use, we identify tissues during a surgical procedure using a neural network. The tracking data is used to identify what tool is in use. The solution is deployed with a custom Slicer module. <strong>Results:</strong> This platform allowed the collection and storage of enough tracked video data for training a convolutional neural network (CNN) to detect interactions with tissues and tools. The CNN was trained on this data and applied to new data with a testing accuracy of 98%. The model’s predictions can be weighted over several frames with a custom Slicer module to improve accuracy. <strong>Conclusion:</strong> We found the 3D Slicer and PLUS Toolkit platform to be a viable platform for training and deploying a solution that combines automatic video processing and optical tool tracking. We designed a proof of concept model to identify tissues with a trained CNN in real time along with tracking of surgical tools.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Choi, Perry S.; Nam, HannahH; Lasso, Andras; Herz, Christian; Drouin, Simon; Harrild, David M.; Quartermain, Michael; Fichtinger, Gabor; Mascio, Christopher E.; Emani, Sitaram; Jolley, MatthewA
3D Modeling of Surgically Implanted Stent-Based Valves in the Mitral Position in Children Journal Article
In: The Annals of Thoracic Surgery, 2020.
@article{Choi2020,
title = {3D Modeling of Surgically Implanted Stent-Based Valves in the Mitral Position in Children},
author = {Perry S. Choi and HannahH Nam and Andras Lasso and Christian Herz and Simon Drouin and David M. Harrild and Michael Quartermain and Gabor Fichtinger and Christopher E. Mascio and Sitaram Emani and MatthewA Jolley},
url = {https://doi.org/10.1016/j.athoracsur.2020.02.020
},
doi = {10.1016/j.athoracsur.2020.02.020},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
journal = {The Annals of Thoracic Surgery},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Colton; Lasso, Andras; Asselin, Mark; Pieper, Steve; Robertson, Faith C.; Gormley, William B.; Fichtinger, Gabor
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{BarrC2020,
title = {Towards portable image guidance and automatic patient registration using an RGB-D camera and video projector},
author = {Colton Barr and Andras Lasso and Mark Asselin and Steve Pieper and Faith C. Robertson and William B. Gormley and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Barr2020.pdf},
doi = {10.1117/12.2549723},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wiercigroch, Julia; Hashtrudi-Zaad, Keyvan; Ungi, Tamas; Bisleri, Gianluigi; Fichtinger, Gabor
Force and torque feedback in endoscopic vessel harvesting Conference
Medical Imaging 2020: Image-Guided Procedures, Robotic Inverventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{Wiercigroch2020a,
title = {Force and torque feedback in endoscopic vessel harvesting},
author = {Julia Wiercigroch and Keyvan Hashtrudi-Zaad and Tamas Ungi and Gianluigi Bisleri and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wiercigroch2020a.pdf},
doi = {https://doi.org/10.1117/12.2550160},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Inverventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Jamzad, Amoon; Kaufmann, Martin; Rubino, Rachel; Sedghi, Alireza; Ungi, Tamas; Asselin, Mark; Yam, Scott; Rudan, John; Nicol, Christopher; Fichtinger, Gabor; Mousavi, Parvin
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{Connolly2020a,
title = {Classification of tumor signatures from electrosurgical vapors using mass spectrometry and machine learning: a feasibility study},
author = {Laura Connolly and Amoon Jamzad and Martin Kaufmann and Rachel Rubino and Alireza Sedghi and Tamas Ungi and Mark Asselin and Scott Yam and John Rudan and Christopher Nicol and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2020a.pdf},
doi = {https://doi.org/10.1117/12.2549343},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Victoria; Ungi, Tamas; Sunderland, Kyle R.; Pigeau, Grace; Schonewille, Abigael; Fichtinger, Gabor
SPIE Medical Imaging, 2020.
@conference{Wu2020a,
title = {Automatic segmentation of spinal ultrasound landmarks with U-net using multiple consecutive images for input},
author = {Victoria Wu and Tamas Ungi and Kyle R. Sunderland and Grace Pigeau and Abigael Schonewille and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2020a-manuscript.pdf},
doi = {10.1117/12.2549584},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Janssen, Natasja; Kaufmann, Martin; Santilli, Alice; Jamzad, Amoon; Kaitlin, Vanderbeck; Ren, Kevin; Ungi, Tamas; Mousavi, Parvin; Rudan, John; McKay, Doug; Wang, Amy; Fichtinger, Gabor
Navigated tissue characterization during skin cancer surgery Journal Article
In: Int J Comput Assist Radiol Surg, 2020.
@article{Janssen2020a,
title = {Navigated tissue characterization during skin cancer surgery},
author = {Natasja Janssen and Martin Kaufmann and Alice Santilli and Amoon Jamzad and Vanderbeck Kaitlin and Kevin Ren and Tamas Ungi and Parvin Mousavi and John Rudan and Doug McKay and Amy Wang and Gabor Fichtinger},
url = {https://doi.org/10.1007/s11548-020-02200-4},
doi = {10.1007/s11548-020-02200-4},
year = {2020},
date = {2020-01-01},
journal = {Int J Comput Assist Radiol Surg},
abstract = {<p><strong>Purpose: </strong>Basal cell carcinoma (BCC) is the most commonly diagnosed skin cancer and is treated by surgical resection. Incomplete tumor removal requires surgical revision, leading to significant healthcare costs and impaired cosmesis. We investigated the clinical feasibility of a surgical navigation system for BCC surgery, based on molecular tissue characterization using rapid evaporative ionization mass spectrometry (REIMS).</p>
<p><strong>Methods: </strong>REIMS enables direct tissue characterization by analysis of cell-specific molecules present within surgical smoke, produced during electrocautery tissue resection. A tissue characterization model was built by acquiring REIMS spectra of BCC, healthy skin and fat from ex vivo skin cancer specimens. This model was used for tissue characterization during navigated skin cancer surgery. Navigation was enabled by optical tracking and real-time visualization of the cautery relative to a contoured resection volume. The surgical smoke was aspirated into a mass spectrometer and directly analyzed with REIMS. Classified BCC was annotated at the real-time position of the cautery. Feasibility of the navigation system, and tissue classification accuracy for ex vivo and intraoperative surgery were evaluated.</p>
<p><strong>Results: </strong>Fifty-four fresh excision specimens were used to build the ex vivo model of BCC, normal skin and fat, with 92% accuracy. While 3 surgeries were successfully navigated without breach of sterility, the intraoperative performance of the ex vivo model was low (< 50%). Hypotheses are: (1) the model was trained on heterogeneous mass spectra that did not originate from a single tissue type, (2) during surgery mixed tissue types were resected and thus presented to the model, and (3) the mass spectra were not validated by pathology.</p>
<p><strong>Conclusion: </strong>REIMS-navigated skin cancer surgery has the potential to detect and localize remaining tumor intraoperatively. Future work will be focused on improving our model by using a precise pencil cautery tip for burning localized tissue types, and having pathology-validated mass spectra.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
<p><strong>Methods: </strong>REIMS enables direct tissue characterization by analysis of cell-specific molecules present within surgical smoke, produced during electrocautery tissue resection. A tissue characterization model was built by acquiring REIMS spectra of BCC, healthy skin and fat from ex vivo skin cancer specimens. This model was used for tissue characterization during navigated skin cancer surgery. Navigation was enabled by optical tracking and real-time visualization of the cautery relative to a contoured resection volume. The surgical smoke was aspirated into a mass spectrometer and directly analyzed with REIMS. Classified BCC was annotated at the real-time position of the cautery. Feasibility of the navigation system, and tissue classification accuracy for ex vivo and intraoperative surgery were evaluated.</p>
<p><strong>Results: </strong>Fifty-four fresh excision specimens were used to build the ex vivo model of BCC, normal skin and fat, with 92% accuracy. While 3 surgeries were successfully navigated without breach of sterility, the intraoperative performance of the ex vivo model was low (< 50%). Hypotheses are: (1) the model was trained on heterogeneous mass spectra that did not originate from a single tissue type, (2) during surgery mixed tissue types were resected and thus presented to the model, and (3) the mass spectra were not validated by pathology.</p>
<p><strong>Conclusion: </strong>REIMS-navigated skin cancer surgery has the potential to detect and localize remaining tumor intraoperatively. Future work will be focused on improving our model by using a precise pencil cautery tip for burning localized tissue types, and having pathology-validated mass spectra.</p>
Lasso, Andras; Pinter, Csaba; Choueib, Saleh; Ungi, Tamas; Fichtinger, Gabor
Enhance medical software applications with immersive virtual reality experience Conference
Techna Symposium, Toronto, ON, Canada, 2019.
@conference{Lasso2019,
title = {Enhance medical software applications with immersive virtual reality experience},
author = {Andras Lasso and Csaba Pinter and Saleh Choueib and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Lasso2019.pdf},
year = {2019},
date = {2019-10-01},
urldate = {2019-10-01},
booktitle = {Techna Symposium},
address = {Toronto, ON, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Vaughan, Thomas; Brastianos, H; Ungi, Tamas; Lasso, Andras; Falkson, Conrad; Fichtinger, Gabor
Needle Navigation and Catheter Reconstruction for Breast Brachytherapy Using Open Source Software Journal Article
In: Acta Polytechnica Hungarica, vol. 16, no. 8, pp. 99-118, 2019.
@article{Vaughan2019a,
title = {Needle Navigation and Catheter Reconstruction for Breast Brachytherapy Using Open Source Software},
author = {Thomas Vaughan and H Brastianos and Tamas Ungi and Andras Lasso and Conrad Falkson and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Vaughan2019a_0.pdf},
year = {2019},
date = {2019-09-01},
urldate = {2019-09-01},
journal = {Acta Polytechnica Hungarica},
volume = {16},
number = {8},
pages = {99-118},
abstract = {<p>\emph{Abstract: Interstitial breast brachytherapy is a method to deliver radiation therapy directly to the site of cancer. It is a challenging procedure because of issues in localizing the seroma, needles, and catheters within the soft tissue. In this paper we present two open-source technologies based on electromagnetic tracking: a navigation system to help target needles using a tracked needle guide, and software for electromagnetic reconstruction of catheter paths. These technologies were validated phantom studies. We found that the navigation system helped a radiation oncologist to target needles more accurately than under ultrasound guidance (60 needles under each condition, 3.8 vs 3.3 mm placement error},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Pinter, Csaba; Lasso, Andras; Asselin, Mark; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Fichtinger, Gabor
SlicerVR for image-guided therapy planning in immersive virtual reality Conference
The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK, London, UK, 2019.
@conference{Pinter2019a,
title = {SlicerVR for image-guided therapy planning in immersive virtual reality},
author = {Csaba Pinter and Andras Lasso and Mark Asselin and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2019a_0.pdf},
year = {2019},
date = {2019-06-01},
urldate = {2019-06-01},
booktitle = {The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK},
pages = {91-92},
address = {London, UK},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Ungi, Tamas; Lasso, Andras; Vaughan, Thomas; Asselin, Mark; Mousavi, Parvin; Yam, Scott; Fichtinger, Gabor
Mechanically-Controlled Spectroscopic Imaging for Tissue Classification Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Connolly2019a,
title = {Mechanically-Controlled Spectroscopic Imaging for Tissue Classification},
author = {Laura Connolly and Tamas Ungi and Andras Lasso and Thomas Vaughan and Mark Asselin and Parvin Mousavi and Scott Yam and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2019a_3.pdf},
doi = {https://doi.org/10.1117/12.2512481},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}