Biography
Dr. Tamas Ungi is a Senior Research Scientist at Queen’s University in Kingston, Ontario, Canada, where he holds adjunct faculty appointments at the School of Computing and the Department of Surgery. He received his MD degree in 2006 and PhD in Radiology in 2011, both from the University of Szeged in Hungary. Dr. Ungi’s research focuses on the translation of new technologies to clinical research, with a particular interest in image-guided medical interventions, skills education, and real-time applications of artificial intelligence in ultrasound-guided procedures.
Publications
Kim, Andrew S.; Yeung, Chris; Szabo, Robert; Sunderland, Kyle; Hisey, Rebecca; Morton, David; Kikinis, Ron; Diao, Babacar; Mousavi, Parvin; Ungi, Tamas; Fichtinger, Gabor
SPIE, 2024.
@proceedings{Kim2024,
title = {Percutaneous nephrostomy needle guidance using real-time 3D anatomical visualization with live ultrasound segmentation},
author = {Andrew S. Kim and Chris Yeung and Robert Szabo and Kyle Sunderland and Rebecca Hisey and David Morton and Ron Kikinis and Babacar Diao and Parvin Mousavi and Tamas Ungi and Gabor Fichtinger},
editor = {Maryam E. Rettmann and Jeffrey H. Siewerdsen},
doi = {10.1117/12.3006533},
year = {2024},
date = {2024-03-29},
urldate = {2024-03-29},
publisher = {SPIE},
abstract = {
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
PURPOSE: Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software. METHODS: Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the trials were analyzed for needle tip distance to the center of the target calyx, needle insertion time, and success rate. Participants also completed a survey on their experience. RESULTS: Using the visualization aid showed significantly higher accuracy, while needle insertion time and success rate were not statistically significant at our sample size. Participants mostly responded positively to the visualization aid, and 80% found it easier to use than ultrasound needle guidance. CONCLUSION: We found that our visualization aid produced increased accuracy and an overall positive experience. We demonstrated that our system is functional and stable and believe that the workflow with this system can be applied to other procedures. This visualization aid system is effective on phantoms and is ready for translation with clinical data.
Elkind, Emese; Barr, Keiran; Barr, Colton; Moga, Kristof; Garamvolgy, Tivadar; Haidegger, Tamas; Ungi, Tamas; Fichtinger, Gabor
Modifying Radix Lenses to Survive Low-Cost Sterilization: An Exploratory Study Conference
Imaging Network of Ontario (ImNO) Symposium, Imaging Network of Ontario (ImNO) Symposium, 2024.
@conference{Elkind2024,
title = {Modifying Radix Lenses to Survive Low-Cost Sterilization: An Exploratory Study},
author = {Emese Elkind and Keiran Barr and Colton Barr and Kristof Moga and Tivadar Garamvolgy and Tamas Haidegger and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/10/EmeseElkindImNO2024-2.docx},
year = {2024},
date = {2024-03-19},
urldate = {2024-03-19},
booktitle = {Imaging Network of Ontario (ImNO) Symposium},
publisher = {Imaging Network of Ontario (ImNO) Symposium},
abstract = {INTRODUCTION: A major challenge with deploying infrared camera-tracked surgical navigation solutions, such as NousNav [1], in low-resource settings is the high cost and unavailability of disposable retroreflective infrared markers. Developing an accessible method to reuse and sterilize retroreflective markers could lead to significant increase in the uptake of this technology. As none of the known infrared markers can endure standard autoclaving and most places do not have access to gas sterilization, attention is focused on cold liquid sterilisation methods commonly used in laparoscopy and other optical tools that cannot be sterilized in a conventional autoclave.
METHODS: We propose to modify NDI Radix™ Lens [1], single-use retroreflective spherical marker manufactured by Northern Digital, Waterloo, Canada. Radix lenses are uniquely promising candidates for liquid sterilization given their smooth, spherical surface. This quality also makes them easier to clean perioperatively compared to other retroreflective infrared marker designs. Initial experiments show that liquid sterilization agents degrade the marker’s retroreflective gold coating (Fig. 1). Hence the objective of this project is to develop a method to protect the Radix Lenses with a layer of coating material that does not allow the sanitizing agent to degrade the coating to enable the lens to survive multiple sanitation cycles while retaining sufficient tracking accuracy. We employed two cold liquid sterilisation agents, household bleach which is a common ingredient of liquid sterilisation solutions and Sekusept™ Aktiv (Ecolab, Saint Paul, MN, USA), which is widely known for sterilizing laparoscopy instruments. Store-bought nail polish and Zink-Alu Spray were used to coat the lenses. Data were obtained by recording five tests each with five rounds of sterilization, each tested with six trials, for a total of 150 recordings. The five tests were as follows: 1) Radix lens coated with nail polish and bleached, 2) uncoated and bleached, 3) coated with nail polish and sanitised, 4) uncoated and sanitised, and 5) coated with Zink-Alu Spray and sanitised. To assess the impact of the sterilization on the lens’s fiducial localization error, two metal marker frames equipped with four sockets designed for the Radix lenses were used. The reference marker frame was secured to a flat table while the other marker frame moved along a fixed path on the table. The position and orientation of the marker clusters were streamed into 3D Slicer using the Public Library for Ultrasound Toolkit (PLUS). A plane was then fit to the recorded marker poses in 3D Slicer using Iterative Closest Point and the marker registration error was computed. Distance from the camera, angle of view, and distance from the edges of the field of view were held constant.
RESULTS: With each round of sterilization, the error of coated lenses was lower than the unprotected lenses, and the error showed a slightly increasing trend (Fig. 2). The lenses appeared fainter in the tracking software the lenses appeared fainter while all lenses remained trackable and visible despite the significant removal of reflective coating.
When reflective coating was fully rubbed off the lenses, the tracking software could still localize the markers; however, the lenses did appear much fainter in the tracking software. We observed that the reflective coating rubs off the lens in routine handling, and recoating with Zink-Alu spray can partially restore marker visibility. Using protective nail polish coating prevented the reflective coating from rubbing off altogether.
CONCLUSIONS: This exploratory study represents a promising step toward achieving low-cost sterilization of retroreflective infrared markers. Studies with the NousNav system need to be undertaken to measure the extent of degradation in tracking accuracy is tolerable as a side effect of marker sterilization. Before using coated Radix lenses on human subjects, it must be verified that the protective coating (common nail polish in our study) is fully biocompatible and remains undamaged by the cold sterilization agent (Sekusept™ Aktiv in our study.)
REFERENCES: [1] NousNav: A low-cost neuronavigation system for deployment in lower-resource settings, International Journal of Computer Assisted Radiology and Surgery, 2022 Sep;17(9):1745-1750. [2] NDI Radix™ Lens (https://www.ndigital.com/optical-measurement-technology/radix-lens/) },
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
METHODS: We propose to modify NDI Radix™ Lens [1], single-use retroreflective spherical marker manufactured by Northern Digital, Waterloo, Canada. Radix lenses are uniquely promising candidates for liquid sterilization given their smooth, spherical surface. This quality also makes them easier to clean perioperatively compared to other retroreflective infrared marker designs. Initial experiments show that liquid sterilization agents degrade the marker’s retroreflective gold coating (Fig. 1). Hence the objective of this project is to develop a method to protect the Radix Lenses with a layer of coating material that does not allow the sanitizing agent to degrade the coating to enable the lens to survive multiple sanitation cycles while retaining sufficient tracking accuracy. We employed two cold liquid sterilisation agents, household bleach which is a common ingredient of liquid sterilisation solutions and Sekusept™ Aktiv (Ecolab, Saint Paul, MN, USA), which is widely known for sterilizing laparoscopy instruments. Store-bought nail polish and Zink-Alu Spray were used to coat the lenses. Data were obtained by recording five tests each with five rounds of sterilization, each tested with six trials, for a total of 150 recordings. The five tests were as follows: 1) Radix lens coated with nail polish and bleached, 2) uncoated and bleached, 3) coated with nail polish and sanitised, 4) uncoated and sanitised, and 5) coated with Zink-Alu Spray and sanitised. To assess the impact of the sterilization on the lens’s fiducial localization error, two metal marker frames equipped with four sockets designed for the Radix lenses were used. The reference marker frame was secured to a flat table while the other marker frame moved along a fixed path on the table. The position and orientation of the marker clusters were streamed into 3D Slicer using the Public Library for Ultrasound Toolkit (PLUS). A plane was then fit to the recorded marker poses in 3D Slicer using Iterative Closest Point and the marker registration error was computed. Distance from the camera, angle of view, and distance from the edges of the field of view were held constant.
RESULTS: With each round of sterilization, the error of coated lenses was lower than the unprotected lenses, and the error showed a slightly increasing trend (Fig. 2). The lenses appeared fainter in the tracking software the lenses appeared fainter while all lenses remained trackable and visible despite the significant removal of reflective coating.
When reflective coating was fully rubbed off the lenses, the tracking software could still localize the markers; however, the lenses did appear much fainter in the tracking software. We observed that the reflective coating rubs off the lens in routine handling, and recoating with Zink-Alu spray can partially restore marker visibility. Using protective nail polish coating prevented the reflective coating from rubbing off altogether.
CONCLUSIONS: This exploratory study represents a promising step toward achieving low-cost sterilization of retroreflective infrared markers. Studies with the NousNav system need to be undertaken to measure the extent of degradation in tracking accuracy is tolerable as a side effect of marker sterilization. Before using coated Radix lenses on human subjects, it must be verified that the protective coating (common nail polish in our study) is fully biocompatible and remains undamaged by the cold sterilization agent (Sekusept™ Aktiv in our study.)
REFERENCES: [1] NousNav: A low-cost neuronavigation system for deployment in lower-resource settings, International Journal of Computer Assisted Radiology and Surgery, 2022 Sep;17(9):1745-1750. [2] NDI Radix™ Lens (https://www.ndigital.com/optical-measurement-technology/radix-lens/)
Hashtrudi-Zaad, Kian; Ungi, Tamas; Yeung, Chris; Baum, Zachary; Cernelev, Pavel-Dumitru; Hage, Anthony N; Schlenger, Christopher; Fichtinger, Gabor
Expert-guided optimization of ultrasound segmentation models for 3D spine imaging Journal Article
In: pp. 680-685, 2024.
@article{hashtrudi-zaad2024,
title = {Expert-guided optimization of ultrasound segmentation models for 3D spine imaging},
author = {Kian Hashtrudi-Zaad and Tamas Ungi and Chris Yeung and Zachary Baum and Pavel-Dumitru Cernelev and Anthony N Hage and Christopher Schlenger and Gabor Fichtinger},
year = {2024},
date = {2024-01-01},
pages = {680-685},
publisher = {IEEE},
abstract = {We explored ultrasound for imaging bones, specifically the spine, as a safer and more accessible alternative to conventional X-ray. We aimed to improve how well deep learning segmentation models filter bone signals from ultrasound frames with the goal of using these segmented images for reconstructing the 3-dimensional spine volume.Our dataset consisted of spatially tracked ultrasound scans from 25 patients. Image frames from these scans were also manually annotated to provide training data for image segmentation deep learning. To find the optimal automatic segmentation method, we assessed five different artificial neural network models and their variations by hyperparameter tuning. Our main contribution is a new approach for model selection, employing an Elo rating system to efficiently rank trained models based on their visual performance as assessed by clinical users. This method addresses the …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yeung, Chris; Ungi, Tamas; Hu, Zoe; Jamzad, Amoon; Kaufmann, Martin; Walker, Ross; Merchant, Shaila; Engel, Cecil Jay; Jabs, Doris; Rudan, John; Mousavi, Parvin; Fichtinger, Gabor
From quantitative metrics to clinical success: assessing the utility of deep learning for tumor segmentation in breast surgery Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, pp. 1-9, 2024.
@article{yeung2024,
title = {From quantitative metrics to clinical success: assessing the utility of deep learning for tumor segmentation in breast surgery},
author = {Chris Yeung and Tamas Ungi and Zoe Hu and Amoon Jamzad and Martin Kaufmann and Ross Walker and Shaila Merchant and Cecil Jay Engel and Doris Jabs and John Rudan and Parvin Mousavi and Gabor Fichtinger},
url = {https://link.springer.com/article/10.1007/s11548-024-03133-y},
year = {2024},
date = {2024-01-01},
urldate = {2024-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
pages = {1-9},
publisher = {Springer International Publishing},
abstract = {Purpose
Preventing positive margins is essential for ensuring favorable patient outcomes following breast-conserving surgery (BCS). Deep learning has the potential to enable this by automatically contouring the tumor and guiding resection in real time. However, evaluation of such models with respect to pathology outcomes is necessary for their successful translation into clinical practice.
Methods
Sixteen deep learning models based on established architectures in the literature are trained on 7318 ultrasound images from 33 patients. Models are ranked by an expert based on their contours generated from images in our test set. Generated contours from each model are also analyzed using recorded cautery trajectories of five navigated BCS cases to predict margin status. Predicted margins are compared with pathology reports.
Results
The best-performing model using both quantitative evaluation and our visual …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Preventing positive margins is essential for ensuring favorable patient outcomes following breast-conserving surgery (BCS). Deep learning has the potential to enable this by automatically contouring the tumor and guiding resection in real time. However, evaluation of such models with respect to pathology outcomes is necessary for their successful translation into clinical practice.
Methods
Sixteen deep learning models based on established architectures in the literature are trained on 7318 ultrasound images from 33 patients. Models are ranked by an expert based on their contours generated from images in our test set. Generated contours from each model are also analyzed using recorded cautery trajectories of five navigated BCS cases to predict margin status. Predicted margins are compared with pathology reports.
Results
The best-performing model using both quantitative evaluation and our visual …
Barr, Colton; Groves, Leah; Ungi, Tamas; Siemens, D Robert; Diao, Babacar; Kikinis, Ron; Mousavi, Parvin; Fichtinger, Gabor
Extracting 3D Prostate Geometry from 2D Optically-Tracked Transrectal Ultrasound Images Journal Article
In: pp. 32-37, 2024.
@article{barr2024,
title = {Extracting 3D Prostate Geometry from 2D Optically-Tracked Transrectal Ultrasound Images},
author = {Colton Barr and Leah Groves and Tamas Ungi and D Robert Siemens and Babacar Diao and Ron Kikinis and Parvin Mousavi and Gabor Fichtinger},
year = {2024},
date = {2024-01-01},
pages = {32-37},
publisher = {IEEE},
abstract = {The technical challenges of traditional transrectal ultrasound-guided prostate biopsy, combined with the limited availability of more advanced prostate imaging techniques, have exacerbated existing differences in prostate cancer outcomes between high-resource and low-resource healthcare settings. The objective of this paper is to improve the tools available to clinicians in low-resource settings by working towards an inexpensive ultrasound-guided prostate biopsy navigation system. The principal contributions detailed here are the design, implementation, and testing of a system capable of generating a 3D model of the prostate from spatially-tracked 2D ultrasound images. The system uses open-source software, low-cost materials, and deep learning to segment and localize cross-sections of the prostate in order to produce a patient-specific 3D prostate model. A user study was performed to evaluate the …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
d'Albenzio, Gabriella; Hisey, Rebecca; Srikanthan, Dilakshan; Ungi, Tamas; Lasso, Andras; Aghayan, Davit; Fichtinger, Gabor; Palomar, Rafael
Using NURBS for virtual resections in liver surgery planning: a comparative usability study Journal Article
In: vol. 12927, pp. 235-241, 2024.
@article{fichtinger2024f,
title = {Using NURBS for virtual resections in liver surgery planning: a comparative usability study},
author = {Gabriella d'Albenzio and Rebecca Hisey and Dilakshan Srikanthan and Tamas Ungi and Andras Lasso and Davit Aghayan and Gabor Fichtinger and Rafael Palomar},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12927/129270Z/Using-NURBS-for-virtual-resections-in-liver-surgery-planning/10.1117/12.3006486.short},
year = {2024},
date = {2024-01-01},
volume = {12927},
pages = {235-241},
publisher = {SPIE},
abstract = {PURPOSE
Accurate preoperative planning is crucial for liver resection surgery due to the complex anatomical structures and variations among patients. The need of virtual resections utilizing deformable surfaces presents a promising approach for effective liver surgery planning. However, the range of available surface definitions poses the question of which definition is most appropriate.
METHODS
The study compares the use of NURBS and B´ezier surfaces for the definition of virtual resections through a usability study, where 25 participants (19 biomedical researchers and 6 liver surgeons) completed tasks using varying numbers of control points driving surface deformations and different surface types. Specifically, participants aim to perform virtual liver resections using 16 and 9 control points for NURBS and B´ezier surfaces. The goal is to assess whether they can attain an optimal resection plan, effectively …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Accurate preoperative planning is crucial for liver resection surgery due to the complex anatomical structures and variations among patients. The need of virtual resections utilizing deformable surfaces presents a promising approach for effective liver surgery planning. However, the range of available surface definitions poses the question of which definition is most appropriate.
METHODS
The study compares the use of NURBS and B´ezier surfaces for the definition of virtual resections through a usability study, where 25 participants (19 biomedical researchers and 6 liver surgeons) completed tasks using varying numbers of control points driving surface deformations and different surface types. Specifically, participants aim to perform virtual liver resections using 16 and 9 control points for NURBS and B´ezier surfaces. The goal is to assess whether they can attain an optimal resection plan, effectively …
Hintz, Lucas; Nanziri, Sarah C; Dance, Sarah; Jawed, Kochai; Oetgen, Matthew; Ungi, Tamas; Fichtinger, Gabor; Schlenger, Christopher; Cleary, Kevin
3D volume reconstruction for pediatric scoliosis evaluation using motion-tracked ultrasound Journal Article
In: vol. 12928, pp. 223-227, 2024.
@article{fichtinger2024g,
title = {3D volume reconstruction for pediatric scoliosis evaluation using motion-tracked ultrasound},
author = {Lucas Hintz and Sarah C Nanziri and Sarah Dance and Kochai Jawed and Matthew Oetgen and Tamas Ungi and Gabor Fichtinger and Christopher Schlenger and Kevin Cleary},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12928/1292811/3D-volume-reconstruction-for-pediatric-scoliosis-evaluation-using-motion-tracked/10.1117/12.3008629.short},
year = {2024},
date = {2024-01-01},
volume = {12928},
pages = {223-227},
publisher = {SPIE},
abstract = {We have evaluated AI-segmented 3D spine ultrasound for scoliosis measurement in a feasibility study of pediatric patients enrolled over two months in the orthopedic clinic at Children’s National Hospital. Patients who presented to clinic for scoliosis evaluation were invited to participate and their spines were scanned using the method. Our system consists of three Optitrack cameras which track a Clarius wireless ultrasound probe and infrared marked waistbelt. Proprietary SpineUs software uses neural networks to build a volumetric reproduction of the spine in real-time using a laptop computer. We can approximate the maximal lateral curvature using the transverse process angle of the virtual reconstruction; these angles were compared to those from the radiographic exams for each patient from the same visit. Scans and radiographs from five patients were examined and demonstrate a linear correlation between …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kaufmann, Martin; Jamzad, Amoon; Ungi, Tamas; Rodgers, Jessica; Koster, Teaghan; Chris, Yeung; Janssen, Natasja; McMullen, Julie; Solberg, Kathryn; Cheesman, Joanna; Ren, Kevin Ti Mi; Varma, Sonal; Merchant, Shaila; Engel, Cecil Jay; Walker, G Ross; Gallo, Andrea; Jabs, Doris; Mousavi, Parvin; Fichtinger, Gabor; Rudan, John
Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery Journal Article
In: vol. 31, iss. 1, pp. S10-S10, 2024.
@article{fichtinger2024i,
title = {Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery},
author = {Martin Kaufmann and Amoon Jamzad and Tamas Ungi and Jessica Rodgers and Teaghan Koster and Yeung Chris and Natasja Janssen and Julie McMullen and Kathryn Solberg and Joanna Cheesman and Kevin Ti Mi Ren and Sonal Varma and Shaila Merchant and Cecil Jay Engel and G Ross Walker and Andrea Gallo and Doris Jabs and Parvin Mousavi and Gabor Fichtinger and John Rudan},
url = {https://scholar.google.com/scholar?cluster=16985799098796735653&hl=en&oi=scholarr},
year = {2024},
date = {2024-01-01},
volume = {31},
issue = {1},
pages = {S10-S10},
publisher = {SPRINGER},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kaufmann, Martin; Jamzad, Amoon; Ungi, Tamas; Rodgers, Jessica R; Koster, Teaghan; Yeung, Chris; Ehrlich, Josh; Santilli, Alice; Asselin, Mark; Janssen, Natasja; McMullen, Julie; Solberg, Kathryn; Cheesman, Joanna; Carlo, Alessia Di; Ren, Kevin Yi Mi; Varma, Sonal; Merchant, Shaila; Engel, Cecil Jay; Walker, G Ross; Gallo, Andrea; Jabs, Doris; Mousavi, Parvin; Fichtinger, Gabor; Rudan, John F
Abstract PO2-23-07: Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery Journal Article
In: Cancer Research, vol. 84, iss. 9_Supplement, pp. PO2-23-07-PO2-23-07, 2024.
@article{fichtinger2024c,
title = {Abstract PO2-23-07: Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery},
author = {Martin Kaufmann and Amoon Jamzad and Tamas Ungi and Jessica R Rodgers and Teaghan Koster and Chris Yeung and Josh Ehrlich and Alice Santilli and Mark Asselin and Natasja Janssen and Julie McMullen and Kathryn Solberg and Joanna Cheesman and Alessia Di Carlo and Kevin Yi Mi Ren and Sonal Varma and Shaila Merchant and Cecil Jay Engel and G Ross Walker and Andrea Gallo and Doris Jabs and Parvin Mousavi and Gabor Fichtinger and John F Rudan},
url = {https://aacrjournals.org/cancerres/article/84/9_Supplement/PO2-23-07/743683},
year = {2024},
date = {2024-01-01},
journal = {Cancer Research},
volume = {84},
issue = {9_Supplement},
pages = {PO2-23-07-PO2-23-07},
publisher = {The American Association for Cancer Research},
abstract = {Positive resection margins occur in approximately 25% of breast cancer (BCa) surgeries, requiring re-operation. Margin status is not routinely available during surgery; thus, technologies that identify residual cancer on the specimen or cavity are needed to provide intraoperative decision support that may reduce positive margin rates. Rapid evaporative ionization mass spectrometry (REIMS) is an emerging technique that chemically profiles the plume generated by tissue cauterization to classify the ablated tissue as either cancerous or non-cancerous, on the basis of detected lipid species. Although REIMS can distinguish cancer and non-cancerous breast tissue by the signals generated, it does not indicate the location of the classified tissue in real-time. Our objective was to combine REIMS with spatio-temporal navigation (navigated REIMS), and to compare performance of navigated REIMS with conventional …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Cernelev, Pavel-Dumitru; Moga, Kristof; Groves, Leah; Haidegger, Tamás; Fichtinger, Gabor; Ungi, Tamas
Determining boundaries of accurate tracking for electromagnetic sensors Conference
SPIE, 2023.
@conference{Cernelev2023,
title = {Determining boundaries of accurate tracking for electromagnetic sensors},
author = {Pavel-Dumitru Cernelev and Kristof Moga and Leah Groves and Tamás Haidegger and Gabor Fichtinger and Tamas Ungi},
editor = {Cristian A. Linte and Jeffrey H. Siewerdsen},
doi = {10.1117/12.2654428},
year = {2023},
date = {2023-04-03},
urldate = {2023-04-03},
publisher = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Radcliffe, Olivia; Connolly, Laura; Ungi, Tamas; Yeo, Caitlin; Rudan, John F.; Fichtinger, Gabor; Mousavi, Parvin
Navigated surgical resection cavity inspection for breast conserving surgery Proceedings
2023.
@proceedings{nokey,
title = {Navigated surgical resection cavity inspection for breast conserving surgery},
author = {Olivia Radcliffe and Laura Connolly and Tamas Ungi and Caitlin Yeo and John F. Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.1117/12.2654015},
year = {2023},
date = {2023-04-03},
abstract = {Up to 40% of Breast Conserving Surgery (BCS) patients must undergo repeat surgery because cancer is left behind in the resection cavity. The mobility of the breast resection cavity makes it difficult to localize residual cancer and, therefore, cavity shaving is a common technique for cancer removal. Cavity shaving involves removing an additional layer of tissue from the entire resection cavity, often resulting in unnecessary healthy tissue loss. In this study, we demonstrated a navigation system and open-source software module that facilitates visualization of the breast resection cavity for targeted localization of residual cancer.},
keywords = {},
pubstate = {published},
tppubtype = {proceedings}
}
Klosa, Elizabeth; Hisey, Rebecca; Hashtrudi-Zaad, Kian; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Comparing methods of identifying tissues for workflow recognition of simulated open hernia repair Conference
2023.
@conference{nokey,
title = {Comparing methods of identifying tissues for workflow recognition of simulated open hernia repair},
author = {Elizabeth Klosa and Rebecca Hisey and Kian Hashtrudi-Zaad and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://imno.ca/sites/default/files/ImNO2023Proceedings.pdf},
year = {2023},
date = {2023-03-24},
urldate = {2024-03-24},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ndiaye, Fatou Bintou; Groves, Leah; Hisey, Rebecca; Ungi, Tamas; Diop, Idy; Mousavi, Parvin; Fichtinger, Gabor; Camara, Mamadou Samba
Desing and realization of a computer-assisted nephrostomy guidance system Journal Article
In: pp. 1-6, 2023.
@article{fichtinger2023l,
title = {Desing and realization of a computer-assisted nephrostomy guidance system},
author = {Fatou Bintou Ndiaye and Leah Groves and Rebecca Hisey and Tamas Ungi and Idy Diop and Parvin Mousavi and Gabor Fichtinger and Mamadou Samba Camara},
url = {https://ieeexplore.ieee.org/abstract/document/10253146/},
year = {2023},
date = {2023-01-01},
pages = {1-6},
publisher = {IEEE},
abstract = {Background and purpose
Nowadays, computerized nephrostomy techniques exist. Although relatively safe, several factors make it difficult for inexperienced users. A computer-assisted nephrostomy guidance system has been studied to increase the success rate of this intervention and reduce the work and difficulties encountered by the actors.
Methods
To design the system, two methods will be studied. Following this study, this system was designed based on method 2. SmartSysNephro is composed of a hardware part whose manipulations made by the user are visualized and assisted by the computer. This nephrostomy procedure that the user simulates is monitored by webcam. Using the data from this Intel Real Sense webcam, allowed to propose a CNN YOLO model.
Results
The results obtained show that the objectives set have been achieved globally. The SmartSysNephro system gives real time warning …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Nowadays, computerized nephrostomy techniques exist. Although relatively safe, several factors make it difficult for inexperienced users. A computer-assisted nephrostomy guidance system has been studied to increase the success rate of this intervention and reduce the work and difficulties encountered by the actors.
Methods
To design the system, two methods will be studied. Following this study, this system was designed based on method 2. SmartSysNephro is composed of a hardware part whose manipulations made by the user are visualized and assisted by the computer. This nephrostomy procedure that the user simulates is monitored by webcam. Using the data from this Intel Real Sense webcam, allowed to propose a CNN YOLO model.
Results
The results obtained show that the objectives set have been achieved globally. The SmartSysNephro system gives real time warning …
Hashtrudi-Zaad, Kian; Hisey, Rebecca; Klosa, Elizabeth; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Using object detection for surgical tool recognition in simulated open inguinal hernia repair surgery Journal Article
In: vol. 12466, pp. 96-101, 2023.
@article{fichtinger2023p,
title = {Using object detection for surgical tool recognition in simulated open inguinal hernia repair surgery},
author = {Kian Hashtrudi-Zaad and Rebecca Hisey and Elizabeth Klosa and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12466/124660E/Using-object-detection-for-surgical-tool-recognition-in-simulated-open/10.1117/12.2654393.short},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {96-101},
publisher = {SPIE},
abstract = {Following the shift from time-based medical education to a competency-based approach, a computer-assisted training platform would help relieve some of the new time burden placed on physicians. A vital component of these platforms is the computation of competency metrics which are based on surgical tool motion. Recognizing the class and motion of surgical tools is one step in the development of a training platform. Object detection can achieve tool recognition. While previous literature has reported on tool recognition in minimally invasive surgeries, open surgeries have not received the same attention. Open Inguinal Hernia Repair (OIHR), a common surgery that general surgery residents must learn, is an example of such surgeries. We present a method for object detection to recognize surgical tools in simulated OIHR. Images were extracted from six video recordings of OIHR performed on phantoms. Tools …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Yeung, Chris; Ehrlich, Joshua; Jamzad, Amoon; Kaufmann, Martin; Rudan, John; Engel, Cecil Jay; Mousavi, Parvin; Ungi, Tamas; Fichtinger, Gabor
Cautery trajectory analysis for evaluation of resection margins in breast-conserving surgery Journal Article
In: vol. 12466, pp. 495-501, 2023.
@article{fichtinger2023q,
title = {Cautery trajectory analysis for evaluation of resection margins in breast-conserving surgery},
author = {Chris Yeung and Joshua Ehrlich and Amoon Jamzad and Martin Kaufmann and John Rudan and Cecil Jay Engel and Parvin Mousavi and Tamas Ungi and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12466/1246622/Cautery-trajectory-analysis-for-evaluation-of-resection-margins-in-breast/10.1117/12.2654497.short},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {495-501},
publisher = {SPIE},
abstract = {After breast-conserving surgery, positive margins occur when breast cancer cells are found on the resection margin, leading to a higher chance of recurrence and the need for repeat surgery. The NaviKnife is an electromagnetic tracking-based surgical navigation system that helps to provide visual and spatial feedback to the surgeon. In this study, we conduct a gross evaluation of this navigation system with respect to resection margins. The trajectory of the surgical cautery relative to ultrasound-visible tumor will be visualized, and its distance and location from the tumor will be compared with pathology reports. Six breast-conserving surgery cases that resulted in positive margins were performed using the NaviKnife system. Trackers were placed on the surgical tools and their positions in three-dimensional space were recorded throughout the procedure. The closest distance between the cautery and the tumor …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Mousavi, Parvin; Fichtinger, Gabor
Multi-catheter modelling in reconstructed 3D transrectal ultrasound images from prostate brachytherapy Journal Article
In: vol. 12466, pp. 126-135, 2023.
@article{fichtinger2023b,
title = {Multi-catheter modelling in reconstructed 3D transrectal ultrasound images from prostate brachytherapy},
author = {Nicole Kitner and Jessica R Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and Parvin Mousavi and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12466/124660I/Multi-catheter-modelling-in-reconstructed-3D-transrectal-ultrasound-images-from/10.1117/12.2654019.short},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {126-135},
publisher = {SPIE},
abstract = {High-dose-rate brachytherapy is an accepted standard-of-care treatment for prostate cancer. In this procedure, catheters are inserted using three-dimensional (3D) transrectal ultrasound image-guidance. Their positions are manually segmented for treatment planning and delivery. The transverse ultrasound sweep, which is subject to tip and depth error for catheter localization, is a commonly used ultrasound imaging option available for image acquisition. We propose a two-step pipeline that uses a deep-learning network and curve fitting to automatically localize and model catheters in transversely reconstructed 3D ultrasound images. In the first step, a 3D U-Net was trained to automatically segment all catheters in a 3D ultrasound image. Following this step, curve fitting was implemented to detect the shapes of individual catheters using polynomial fitting. Of the 343 catheters (from 20 patients) in the testing data, the …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Klosa, Elizabeth; Hisey, Rebecca; Hashtrudi-Zaad, Kian; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Identifying tool-tissue interactions to distinguish steps in simulated open inguinal hernia repair Journal Article
In: vol. 12466, pp. 479-486, 2023.
@article{fichtinger2023s,
title = {Identifying tool-tissue interactions to distinguish steps in simulated open inguinal hernia repair},
author = {Elizabeth Klosa and Rebecca Hisey and Kian Hashtrudi-Zaad and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12466/1246620/Identifying-tool-tissue-interactions-to-distinguish-steps-in-simulated-open/10.1117/12.2654394.short},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {479-486},
publisher = {SPIE},
abstract = {As medical education adopts a competency-based training approach, assessment of skills and timely provision of formative feedback is required. Provision of such assessment and feedback places a substantial time burden on surgeons. To reduce this time burden, we look to develop a computer-assisted training platform to provide both instruction and feedback to residents learning open Inguinal Hernia Repairs (IHR). To provide feedback on residents’ technical skills, we must first find a method of workflow recognition of the IHR. We thus aim to recognize and distinguish between workflow steps of an open IHR based on the presence and frequencies of different tool-tissue interactions occurring during each step. Based on ground truth tissue segmentations and tool bounding boxes, we identify the visible tissues within a bounding box. This provides an estimation of which tissues a tool is interacting with. The …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Radcliffe, Olivia; Connolly, Laura; Ungi, Tamas; Yeo, Caitlin; Rudan, John F; Fichtinger, Gabor; Mousavi, Parvin
Navigated surgical resection cavity inspection for breast conserving surgery Journal Article
In: vol. 12466, pp. 234-241, 2023.
@article{fichtinger2023t,
title = {Navigated surgical resection cavity inspection for breast conserving surgery},
author = {Olivia Radcliffe and Laura Connolly and Tamas Ungi and Caitlin Yeo and John F Rudan and Gabor Fichtinger and Parvin Mousavi},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12466/124660Z/Navigated-surgical-resection-cavity-inspection-for-breast-conserving-surgery/10.1117/12.2654015.short},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {234-241},
publisher = {SPIE},
abstract = {Up to 40% of Breast Conserving Surgery (BCS) patients must undergo repeat surgery because cancer is left behind in the resection cavity. The mobility of the breast resection cavity makes it difficult to localize residual cancer and, therefore, cavity shaving is a common technique for cancer removal. Cavity shaving involves removing an additional layer of tissue from the entire resection cavity, often resulting in unnecessary healthy tissue loss. In this study, we demonstrated a navigation system and open-source software module that facilitates visualization of the breast resection cavity for targeted localization of residual cancer.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Orosz, Gábor; Szabó, Róbert Zsolt; Ungi, Tamás; Barr, Colton; Yeung, Chris; Fichtinger, Gábor; Gál, János; Haidegger, Tamás
Lung Ultrasound Imaging and Image Processing with Artificial Intelligence Methods for Bedside Diagnostic Examinations Journal Article
In: Acta Polytechnica Hungarica, vol. 20, iss. 8, 2023.
@article{fichtinger2023d,
title = {Lung Ultrasound Imaging and Image Processing with Artificial Intelligence Methods for Bedside Diagnostic Examinations},
author = {Gábor Orosz and Róbert Zsolt Szabó and Tamás Ungi and Colton Barr and Chris Yeung and Gábor Fichtinger and János Gál and Tamás Haidegger},
url = {https://acta.uni-obuda.hu/Orosz_Szabo_Ungi_Barr_Yeung_Fichtinger_Gal_Haidegger_137.pdf},
year = {2023},
date = {2023-01-01},
journal = {Acta Polytechnica Hungarica},
volume = {20},
issue = {8},
abstract = {Artificial Intelligence-assisted radiology has shown to offer significant benefits in clinical care. Physicians often face challenges in identifying the underlying causes of acute respiratory failure. One method employed by experts is the utilization of bedside lung ultrasound, although it has a significant learning curve. In our study, we explore the potential of a Machine Learning-based automated decision-support system to assist inexperienced practitioners in interpreting lung ultrasound scans. This system incorporates medical ultrasound, advanced data processing techniques, and a neural network implementation to achieve its objective. The article provides a comprehensive overview of the steps involved in data preparation and the implementation of the neural network. The accuracy and error rate of the most effective model are presented, accompanied by illustrative examples of their predictions. Furthermore, the paper concludes with an evaluation of the results, identification of limitations, and recommendations for future enhancements.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Keiran; Hookey, Lawrence; Ungi, Tamas; Fichtinger, Gabor; Holden, Matthew
Analyzing colonoscopy training learning curves using comparative hand tracking assessment Journal Article
In: vol. 12466, pp. 466-472, 2023.
@article{fichtinger2023w,
title = {Analyzing colonoscopy training learning curves using comparative hand tracking assessment},
author = {Keiran Barr and Lawrence Hookey and Tamas Ungi and Gabor Fichtinger and Matthew Holden},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12466/124661Y/Analyzing-colonoscopy-training-learning-curves-using-comparative-hand-tracking-assessment/10.1117/12.2654309.short},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {466-472},
publisher = {SPIE},
abstract = {A competency-based approach for colonoscopy training is particularly important, since the amount of practice required for proficiency varies widely between trainees. Though numerous objective proficiency assessment frameworks have been validated in the literature, these frameworks rely on expert observers. This process is time-consuming, and as a result, there has been increased interest in automated proficiency rating of colonoscopies. This work aims to investigate sixteen automatically computed performance metrics, and whether they can measure improvements in novices following a series of practice attempts. This involves calculating motion-tracking parameters for three groups: untrained novices, those same novices after undergoing training exercises, and experts. Both groups had electromagnetic tracking markers fixed to their hands and the scope tip. Each participant performed eight testing …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Pose-Díez-de-la-Lastra, Alicia; Ungi, Tamas; Morton, David; Fichtinger, Gabor; Pascau, Javier
Real-time integration between Microsoft HoloLens 2 and 3D Slicer with demonstration in pedicle screw placement planning Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 18, iss. 11, pp. 2023-2032, 2023.
@article{fichtinger2023f,
title = {Real-time integration between Microsoft HoloLens 2 and 3D Slicer with demonstration in pedicle screw placement planning},
author = {Alicia Pose-Díez-de-la-Lastra and Tamas Ungi and David Morton and Gabor Fichtinger and Javier Pascau},
url = {https://link.springer.com/article/10.1007/s11548-023-02977-0},
year = {2023},
date = {2023-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {18},
issue = {11},
pages = {2023-2032},
publisher = {Springer International Publishing},
abstract = {Purpose
Up to date, there has been a lack of software infrastructure to connect 3D Slicer to any augmented reality (AR) device. This work describes a novel connection approach using Microsoft HoloLens 2 and OpenIGTLink, with a demonstration in pedicle screw placement planning.
Methods
We developed an AR application in Unity that is wirelessly rendered onto Microsoft HoloLens 2 using Holographic Remoting. Simultaneously, Unity connects to 3D Slicer using the OpenIGTLink communication protocol. Geometrical transform and image messages are transferred between both platforms in real time. Through the AR glasses, a user visualizes a patient’s computed tomography overlaid onto virtual 3D models showing anatomical structures. We technically evaluated the system by measuring message transference latency between the platforms. Its functionality was assessed in pedicle screw placement planning …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Up to date, there has been a lack of software infrastructure to connect 3D Slicer to any augmented reality (AR) device. This work describes a novel connection approach using Microsoft HoloLens 2 and OpenIGTLink, with a demonstration in pedicle screw placement planning.
Methods
We developed an AR application in Unity that is wirelessly rendered onto Microsoft HoloLens 2 using Holographic Remoting. Simultaneously, Unity connects to 3D Slicer using the OpenIGTLink communication protocol. Geometrical transform and image messages are transferred between both platforms in real time. Through the AR glasses, a user visualizes a patient’s computed tomography overlaid onto virtual 3D models showing anatomical structures. We technically evaluated the system by measuring message transference latency between the platforms. Its functionality was assessed in pedicle screw placement planning …
Szabó, Róbert Zsolt; Orosz, Gábor; Ungi, Tamás; Barr, Colton; Yeung, Chris; Incze, Roland; Fichtinger, Gabor; Gál, János; Haidegger, Tamás
Automation of lung ultrasound imaging and image processing for bedside diagnostic examinations Journal Article
In: pp. 000779-000784, 2023.
@article{fichtinger2023h,
title = {Automation of lung ultrasound imaging and image processing for bedside diagnostic examinations},
author = {Róbert Zsolt Szabó and Gábor Orosz and Tamás Ungi and Colton Barr and Chris Yeung and Roland Incze and Gabor Fichtinger and János Gál and Tamás Haidegger},
url = {https://ieeexplore.ieee.org/abstract/document/10158672/},
year = {2023},
date = {2023-01-01},
pages = {000779-000784},
publisher = {IEEE},
abstract = {The causes of acute respiratory failure can be difficult to identify for physicians. Experts can differentiate these causes using bedside lung ultrasound, but lung ultrasound has a considerable learning curve. We investigate if an automated decision-support system could help novices interpret lung ultrasound scans. The system utilizes medical ultrasound, data processing, and a neural network implementation to achieve this goal. The article details the steps taken in the data preparation, and the implementation of the neural network. The best model’s accuracy and error rate are presented, along with examples of its predictions. The paper concludes with an evaluation of the results, identification of limitations, and suggestions for future improvements.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Morton, David; Connolly, Laura; Groves, Leah; Sunderland, Kyle; Jamzad, Amoon; Rudan, John F; Fichtinger, Gabor; Ungi, Tamas; Mousavi, Parvin
Tracked tissue sensing for tumor bed inspection Journal Article
In: vol. 12466, pp. 378-385, 2023.
@article{fichtinger2023x,
title = {Tracked tissue sensing for tumor bed inspection},
author = {David Morton and Laura Connolly and Leah Groves and Kyle Sunderland and Amoon Jamzad and John F Rudan and Gabor Fichtinger and Tamas Ungi and Parvin Mousavi},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12466/124661K/Tracked-tissue-sensing-for-tumor-bed-inspection/10.1117/12.2654217.short},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {378-385},
publisher = {SPIE},
abstract = {Up to 30% of breast-conserving surgery patients require secondary surgery to remove cancerous tissue missed in the initial intervention. We hypothesize that tracked tissue sensing can improve the success rate of breast-conserving surgery. Tissue sensor tracking allows the surgeon to intraoperatively scan the tumor bed for leftover cancerous tissue. In this study, we characterize the performance of our tracked optical scanning testbed using an experimental pipeline. We assess the Dice similarity coefficient, accuracy, and latency of the testbed.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Austin, Catherine; Hisey, Rebecca; O'Driscoll, Olivia; Ungi, Tamas; Fichtinger, Gabor
Using uncertainty quantification to improve reliability of video-based skill assessment metrics in central venous catheterization Journal Article
In: vol. 12466, pp. 84-88, 2023.
@article{fichtinger2023y,
title = {Using uncertainty quantification to improve reliability of video-based skill assessment metrics in central venous catheterization},
author = {Catherine Austin and Rebecca Hisey and Olivia O'Driscoll and Tamas Ungi and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12466/124660C/Using-uncertainty-quantification-to-improve-reliability-of-video-based-skill/10.1117/12.2654419.short},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {84-88},
publisher = {SPIE},
abstract = {Computed-based skill assessment relies on accurate metrics to provide comprehensive feedback to trainees. Improving the accuracy of video-based metrics computed using object detection is generally done by improving the performance of the object detection network, however increasing its performance requires resources that cannot always be obtained. This study aims to improve the accuracy of metrics in central venous catheterization without requiring a high performing object detection network by removing false positive predictions identified using uncertainty quantification. The uncertainty for each bounding box was calculated using an entropy equation. The uncertainties were then compared to an uncertainty threshold computed using the optimal point of a Receiver Operating Characteristic curve. Predictions were removed if the uncertainty fell below the predefined threshold. 50 videos were recorded and …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor; Korzeniowski, Martin
Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction Conference
Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting, Canadian Association for Radiation Oncologists, 2022.
@conference{Kitner2022ab,
title = {Automated catheter localization in ultrasound images from High-dose-rate prostate brachytherapy using deep learning and feature extraction},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger and Martin Korzeniowski},
year = {2022},
date = {2022-09-01},
urldate = {2022-09-01},
booktitle = {Canadian Association for Radiation Oncologists (CARO) Annual Scientific Meeting},
publisher = {Canadian Association for Radiation Oncologists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Degeut, Anton; Leonard, Simon; Tokuda, Junichi; Ungi, Tamas; Krieger, Axel; Kazanzides, Peter; Mousavi, Parvin; Fichtinger, Gabor; Taylor, Russell H.
Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions Journal Article
In: Sensors, vol. 22, 2022.
@article{Connolly2022c,
title = {Bridging 3D Slicer and ROS2 for Image-Guided Robotic Interventions},
author = {Laura Connolly and Anton Degeut and Simon Leonard and Junichi Tokuda and Tamas Ungi and Axel Krieger and Peter Kazanzides and Parvin Mousavi and Gabor Fichtinger and Russell H. Taylor},
doi = {https://doi.org/10.3390/s22145336},
year = {2022},
date = {2022-07-01},
journal = {Sensors},
volume = {22},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline Conference
Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting, Canadian Organization of Medical Physicists, 2022.
@conference{Kitner2022a,
title = {Automated Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-06-01},
urldate = {2022-06-01},
booktitle = {Canadian Organization of Medical Physicists (COMP) Annual Scientific Meeting},
publisher = {Canadian Organization of Medical Physicists},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hu, Zoe; Fauerbach, Paola V. Nasute; Yeung, Chris; Ungi, Tamas; Rudan, John; Engel, C. Jay; Mousavi, Parvin; Fichtinger, Gabor; Jabs, Doris
Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 17, no. 9, pp. 1663–1672, 2022.
@article{Hu2022,
title = {Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation},
author = {Zoe Hu and Paola V. Nasute Fauerbach and Chris Yeung and Tamas Ungi and John Rudan and C. Jay Engel and Parvin Mousavi and Gabor Fichtinger and Doris Jabs},
doi = {10.1007/s11548-022-02658-4},
year = {2022},
date = {2022-05-01},
urldate = {2022-05-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {17},
number = {9},
pages = {1663–1672},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kitner, Nicole; Rodgers, Jessica R.; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Joshi, C. P.; Mousavi, Parvin; Fichtinger, Gabor
Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy Conference
Imaging Network Ontario (IMNO) 2022 Symposium, Imaging Network of Ontario, Online, 2022.
@conference{Kitner2022ac,
title = {Automated Catheter Segmentation in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy},
author = {Nicole Kitner and Jessica R. Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and C. P. Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network Ontario (IMNO) 2022 Symposium},
publisher = {Imaging Network of Ontario},
address = {Online},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Klosa, Elizabeth; Hisey, Rebecca; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Tissue segmentation for workflow recognition in open inguinal hernia repair training Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{Klosa2022a,
title = {Tissue segmentation for workflow recognition in open inguinal hernia repair training},
author = {Elizabeth Klosa and Rebecca Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Klosa2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {PURPOSE: As medical education adopts a competency-based training method, experts are spending substantial amounts of time instructing and assessing trainees’ competence. In this study, we look to develop a computer-assisted training platform that can provide instruction and assessment of open inguinal hernia repairs without needing an expert observer. We recognize workflow tasks based on the tool-tissue interactions, suggesting that we first need a method to identify tissues. This study aims to train a neural network in identifying tissues in a low-cost phantom as we work towards identifying the tool-tissue interactions needed for task recognition. METHODS: Eight simulated tissues were segmented throughout five videos from experienced surgeons who performed open inguinal hernia repairs on phantoms. A U-Net was trained using leave-one-user-out cross validation. The average F-score, false positive rate and false negative rate were calculated for each tissue to evaluate the U-Net’s performance. RESULTS: Higher F-scores and lower false negative and positive rates were recorded for the skin, hernia sac, spermatic cord, and nerves, while slightly lower metrics were recorded for the subcutaneous tissue, Scarpa’s fascia, external oblique aponeurosis and superficial epigastric vessels. CONCLUSION: The U-Net performed better in recognizing tissues that were relatively larger in size and more prevalent, while struggling to recognize smaller tissues only briefly visible. Since workflow recognition does not require perfect segmentation, we believe our U-Net is sufficient in recognizing the tissues of an inguinal hernia repair phantom. Future studies will explore combining our segmentation U-Net with tool detection as we work towards workflow recognition.},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, Rebecca; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of object detection for skill assessment in central venous catheterization Conference
SPIE Medical Imaging, SPIE Medical Imaging SPIE Medical Imaging, San Diego, 2022.
@conference{ODriscoll2022a,
title = {Feasibility of object detection for skill assessment in central venous catheterization},
author = {Olivia O’Driscoll and Rebecca Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2022a.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {SPIE Medical Imaging},
publisher = {SPIE Medical Imaging},
address = {San Diego},
organization = {SPIE Medical Imaging},
abstract = {<p><strong>Purpose: </strong>Computer-assisted surgical skill assessment methods have traditionally relied on tracking tool motion with physical sensors. These tracking systems can be expensive, bulky, and impede tool function. Recent advances in object detection networks have made it possible to quantify tool motion using only a camera. These advances open the door for a low-cost alternative to current physical tracking systems for surgical skill assessment. This study determines the feasibility of using metrics computed with object detection by comparing them to widely accepted metrics computed using traditional tracking methods in central venous catheterization. <strong>Methods:</strong> Both video and tracking data were recorded from participants performing central venous catheterization on a venous access phantom. A Faster Region-Based Convolutional Neural Network was trained to recognize the ultrasound probe and syringe on the video data. Tracking-based metrics were computed using the Perk Tutor extension of 3D Slicer. The path length and usage time for each tool were then computed using both the video and tracking data. The metrics from object detection and tracking were compared using Spearman rank correlation. <strong>Results: </strong>The path lengths had a rank correlation coefficient of 0.22 for the syringe (p<0.03) and 0.35 (p<0.001) for the ultrasound probe. For the usage times, the correlation coefficient was 0.37 (p<0.001) for the syringe and 0.34 (p<0.001) for the ultrasound probe. <strong>Conclusions</strong>: The video-based metrics correlated significantly with the tracked metrics, suggesting that object detection could be a feasible skill assessment method for central venous catheterization.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
O’Driscoll, Olivia; Hisey, Rebecca; Holden, M.; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Feasibility of using object detection for performance assessment in central venous catherization Conference
Imaging Network of Ontario Symposium, 2022.
@conference{ODriscoll2022b,
title = {Feasibility of using object detection for performance assessment in central venous catherization},
author = {Olivia O’Driscoll and Rebecca Hisey and M. Holden and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/ODriscoll2021b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Klosa, Elizabeth; Hisey, Rebecca; Nazari, Tahmina; Wiggers, Theo; Zevin, Boris; Ungi, Tamas; Fichtinger, Gabor
Identifying tissues for task recognition in training of open inguinal hernia repairs Conference
Imaging Network of Ontario Symposium, 2022.
@conference{Klosa2022b,
title = {Identifying tissues for task recognition in training of open inguinal hernia repairs},
author = {Elizabeth Klosa and Rebecca Hisey and Tahmina Nazari and Theo Wiggers and Boris Zevin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Klosa2022b.pdf},
year = {2022},
date = {2022-02-01},
urldate = {2022-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lee, H. Y.; Hisey, Rebecca; Holden, Matthew; Liu, John; Ungi, Tamas; Fichtinger, Gabor; Law, Christine
Evaluating Faster R-CNN for cataract surgery tool detection using microscopy video Conference
Imaging Network of Ontario Symposium , 2022.
@conference{Lee2022a,
title = {Evaluating Faster R-CNN for cataract surgery tool detection using microscopy video},
author = {H. Y. Lee and Rebecca Hisey and Matthew Holden and John Liu and Tamas Ungi and Gabor Fichtinger and Christine Law},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Imaging Network of Ontario Symposium
},
abstract = {<p>Introduction: Traditional methods of cataract surgery skill assessment rely on human expert supervision. This exposes the trainee to interobserver variability and inconsistent feedback. Alternative measures such as sensorbased instrument motion analysis promise objective assessment [1]. However, sensor-based systems are logistically complicated and expensive to obtain. Previous studies have demonstrated a strong correlation between sensor-based metrics and two-dimensional motion metrics obtained from object detection [2]. Reliable object detection is the foundation for computing such performance metrics. Therefore, the objective of this study is to evaluate the performance of an object detection network, namely Faster Region-Based Convolutional Neural Network (FRCNN), in recognition of cataract surgery tools in microscopy video. Methods: Microscope video was recorded for 25 trials of cataract surgery on an artificial eye. The trials were performed by a cohort consisting of one senior-surgeon and four junior-surgeons and manually annotated for bounding box locations of the cataract surgery tools (Figure 1) The surgical tools used included: forceps, diamond keratomes, viscoelastic cannulas, and cystotome needles. A FRCNN [3] was trained on a total of 130,614 frames for object detection. We used five-fold cross validation, using a leave-one-userout method. In this manner, all videos from one surgeon were reserved for testing and the frames from the remaining 20 videos were divided among training and validation. Network performance was evaluated via mean average precision (mAP), which is defined as the area under the precision/recall curve. Samples were considered correctly identified when the intersection over union (IoU) between the ground truth and predicted bounding boxes was greater than 0.5. Results: The overall mAP of the network was 0.63. Toolspecific mAPs ranged between 0.49 and 0.96 (Table 1). The high accuracy in detection of the cystotome needle is likely due to the distinct size and shape of the tool tip. The diamond keratome had the lowest mAP of any of the tools recognized, however this may be attributed to variations in the appearance of the tool tip (Figure 2). Conclusions: The FRCNN was able to recognize the surgical tools used in cataract surgery with reasonably high accuracy. Now that we know the network can sufficiently recognize the surgical tools, our next goal is to use this network to compute motion-based performance metrics. Future work seeks to validate these performance metrics against those obtained from sensor-based tracking and against expert evaluations. This serves as a first step towards providing consistent and accessible feedback for future trainees learning cataract surgery. </p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Jamzad, Amoon; Nikniazi, Arash; Poushimin, Rana; Lasso, Andras; Sunderland, Kyle R.; Ungi, Tamas; Nunzi, Jean Michel; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
An open-source testbed for developing image-guided robotic tumor-bed inspection Conference
Imaging Network of Ontario (ImNO) Symposium, 2022.
@conference{connolly2022b,
title = {An open-source testbed for developing image-guided robotic tumor-bed inspection},
author = {Laura Connolly and Amoon Jamzad and Arash Nikniazi and Rana Poushimin and Andras Lasso and Kyle R. Sunderland and Tamas Ungi and Jean Michel Nunzi and John Rudan and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/01/Connolly2022b.pdf},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Imaging Network of Ontario (ImNO) Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Pandey, Prashant; Hohlmann, Benjamin; Brößner, Peter; Hacihaliloglu, Ilker; Barr, Keiran; Ungi, Tamas; Zettinig, Oliver; Prevost, Raphael; Dardenne, Guillaume; Fanti, Zian; Wein, Wolfgang; Stindel, Eric; Cosio, Fernando Arambula; Guy, Pierre; Fichtinger, Gabor; Radermacher, Klaus; Hodgson, Antony J
Standardized evaluation of current ultrasound bone segmentation algorithms on multiple datasets Journal Article
In: Proceedings of The 20th Annual Meeting of the Interna, vol. 5, pp. 148-153, 2022.
@article{fichtinger2022h,
title = {Standardized evaluation of current ultrasound bone segmentation algorithms on multiple datasets},
author = {Prashant Pandey and Benjamin Hohlmann and Peter Brößner and Ilker Hacihaliloglu and Keiran Barr and Tamas Ungi and Oliver Zettinig and Raphael Prevost and Guillaume Dardenne and Zian Fanti and Wolfgang Wein and Eric Stindel and Fernando Arambula Cosio and Pierre Guy and Gabor Fichtinger and Klaus Radermacher and Antony J Hodgson},
url = {https://easychair.org/publications/download/4mhjf},
year = {2022},
date = {2022-01-01},
journal = {Proceedings of The 20th Annual Meeting of the Interna},
volume = {5},
pages = {148-153},
abstract = {Ultrasound (US) bone segmentation is an important component of US-guided orthopaedic procedures. While there are many published segmentation techniques, there is no direct way to compare their performance. We present a solution to this, by curating a multi-institutional set of US images and corresponding segmentations, and systematically evaluating six previously-published bone segmentation algorithms using consistent metric definitions. We find that learning-based segmentation methods outperform traditional algorithms that rely on hand-crafted image features, as measured by their Dice scores, RMS distance errors and segmentation success rates. However, there is no single best performing algorithm across the datasets, emphasizing the need for carefully evaluating techniques on large, heterogenous datasets. The datasets and evaluation framework described can be used to accelerate development of new segmentation algorithms.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ehrlich, Josh; Jamzad, Amoon; Asselin, Mark; Rodgers, Jessica Robin; Kaufmann, Martin; Haidegger, Tamas; Rudan, John; Mousavi, Parvin; Fichtinger, Gabor; Ungi, Tamas
Sensor-Based Automated Detection of Electrosurgical Cautery States Journal Article
In: Sensors, vol. 22, iss. 15, pp. 5808, 2022.
@article{fichtinger2022i,
title = {Sensor-Based Automated Detection of Electrosurgical Cautery States},
author = {Josh Ehrlich and Amoon Jamzad and Mark Asselin and Jessica Robin Rodgers and Martin Kaufmann and Tamas Haidegger and John Rudan and Parvin Mousavi and Gabor Fichtinger and Tamas Ungi},
url = {https://www.mdpi.com/1424-8220/22/15/5808},
year = {2022},
date = {2022-01-01},
journal = {Sensors},
volume = {22},
issue = {15},
pages = {5808},
publisher = {MDPI},
abstract = {In computer-assisted surgery, it is typically required to detect when the tool comes into contact with the patient. In activated electrosurgery, this is known as the energy event. By continuously tracking the electrosurgical tools’ location using a navigation system, energy events can help determine locations of sensor-classified tissues. Our objective was to detect the energy event and determine the settings of electrosurgical cautery—robustly and automatically based on sensor data. This study aims to demonstrate the feasibility of using the cautery state to detect surgical incisions, without disrupting the surgical workflow. We detected current changes in the wires of the cautery device and grounding pad using non-invasive current sensors and an oscilloscope. An open-source software was implemented to apply machine learning on sensor data to detect energy events and cautery settings. Our methods classified each cautery state at an average accuracy of 95.56% across different tissue types and energy level parameters altered by surgeons during an operation. Our results demonstrate the feasibility of automatically identifying energy events during surgical incisions, which could be an important safety feature in robotic and computer-integrated surgery. This study provides a key step towards locating tissue classifications during breast cancer operations and reducing the rate of positive margins.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Diao, Babacar; Bagayogo, Ndèye Aissatou; Carreras, Nayra Pumar; Halle, Michael; Ruiz-Alzola, Juan; Ungi, Tamas; Fichtinger, Gabor; Kikinis, Ron
The use of 3D digital anatomy model improves the communication with patients presenting with prostate disease: The first experience in Senegal Journal Article
In: Plos one, vol. 17, iss. 12, pp. e0277397, 2022.
@article{fichtinger2022k,
title = {The use of 3D digital anatomy model improves the communication with patients presenting with prostate disease: The first experience in Senegal},
author = {Babacar Diao and Ndèye Aissatou Bagayogo and Nayra Pumar Carreras and Michael Halle and Juan Ruiz-Alzola and Tamas Ungi and Gabor Fichtinger and Ron Kikinis},
url = {https://journals.plos.org/plosone/article?id=10.1371/journal.pone.0277397},
year = {2022},
date = {2022-01-01},
journal = {Plos one},
volume = {17},
issue = {12},
pages = {e0277397},
publisher = {Public Library of Science},
abstract = {Objectives
We hypothesized that the use of an interactive 3D digital anatomy model can improve the quality of communication with patients about prostate disease.
Methods
A 3D digital anatomy model of the prostate was created from an MRI scan, according to McNeal’s zonal anatomy classification. During urological consultation, the physician presented the digital model on a computer and used it to explain the disease and available management options. The experience of patients and physicians was recorded in questionnaires.
Results
The main findings were as follows: 308 patients and 47 physicians participated in the study. In the patient group, 96.8% reported an improved level of understanding of prostate disease and 90.6% reported an improved ability to ask questions during consultation. Among the physicians, 91.5% reported improved communication skills and 100% reported an improved ability to obtain patient consent for subsequent treatment. At the same time, 76.6% of physicians noted that using the computer model lengthened the consultation.
Conclusion
This exploratory study found that the use of a 3D digital anatomy model in urology consultations was received overwhelmingly favorably by both patients and physicians, and it was perceived to improve the quality of communication between patient and physician. A randomized study is needed to confirm the preliminary findings and further quantify the improvements in the quality of patient-physician communication.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
We hypothesized that the use of an interactive 3D digital anatomy model can improve the quality of communication with patients about prostate disease.
Methods
A 3D digital anatomy model of the prostate was created from an MRI scan, according to McNeal’s zonal anatomy classification. During urological consultation, the physician presented the digital model on a computer and used it to explain the disease and available management options. The experience of patients and physicians was recorded in questionnaires.
Results
The main findings were as follows: 308 patients and 47 physicians participated in the study. In the patient group, 96.8% reported an improved level of understanding of prostate disease and 90.6% reported an improved ability to ask questions during consultation. Among the physicians, 91.5% reported improved communication skills and 100% reported an improved ability to obtain patient consent for subsequent treatment. At the same time, 76.6% of physicians noted that using the computer model lengthened the consultation.
Conclusion
This exploratory study found that the use of a 3D digital anatomy model in urology consultations was received overwhelmingly favorably by both patients and physicians, and it was perceived to improve the quality of communication between patient and physician. A randomized study is needed to confirm the preliminary findings and further quantify the improvements in the quality of patient-physician communication.
Kitner, Nicole; Rodgers, Jessica R; Ungi, Tamas; Olding, Timothy; Joshi, Chandra; Mousavi, Parvin; Fichtinger, Gabor; Korzeniowski, Martin
49: Automated Catheter Tracking in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy Using Deep Learning and Feature Extraction Journal Article
In: Radiotherapy and Oncology, vol. 174, pp. S23-S24, 2022.
@article{fichtinger2022o,
title = {49: Automated Catheter Tracking in 3D Ultrasound Images from High-Dose-Rate Prostate Brachytherapy Using Deep Learning and Feature Extraction},
author = {Nicole Kitner and Jessica R Rodgers and Tamas Ungi and Timothy Olding and Chandra Joshi and Parvin Mousavi and Gabor Fichtinger and Martin Korzeniowski},
url = {https://scholar.google.com/scholar?cluster=2766988044564319338&hl=en&oi=scholarr},
year = {2022},
date = {2022-01-01},
journal = {Radiotherapy and Oncology},
volume = {174},
pages = {S23-S24},
publisher = {Elsevier},
abstract = {Purpose: Stereotactic body radiotherapy (SBRT) improves complete pain response for painful spinal metastases compared to conventional external beam radiotherapy (cEBRT). We report mature local control and reirradiation rates in a large cohort of patients treated with SBRT versus cEBRT enrolled previously in the Canadian Clinical Trials Group Symptom Control (SC). 24 Phase II/III trial.
Materials and Methods: 137/229 (60%) patients randomized to 24 Gy in 2 SBRT fractions or 20 Gy in 5 cEBRT fractions were retrospectively reviewed. By including all treated spinal segments, we report on 66 patients (119 spine segments) treated with SBRT, and 71 patients (169 segments) treated with cEBRT. The primary outcomes were MR-based local control and reirradiation rates for each treated spine segment.
Results: The median follow-up was 11.3 months (IQR: 5.3-27.7 months), and median OS in the SBRT and cEBRT …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Materials and Methods: 137/229 (60%) patients randomized to 24 Gy in 2 SBRT fractions or 20 Gy in 5 cEBRT fractions were retrospectively reviewed. By including all treated spinal segments, we report on 66 patients (119 spine segments) treated with SBRT, and 71 patients (169 segments) treated with cEBRT. The primary outcomes were MR-based local control and reirradiation rates for each treated spine segment.
Results: The median follow-up was 11.3 months (IQR: 5.3-27.7 months), and median OS in the SBRT and cEBRT …
Kitner, Nicole; Rodgers, Jessica R; Ungi, Tamas; Korzeniowski, Martin; Olding, Tim; Joshi, Chandra P; Mousavi, Parvin; Fichtinger, Gabor
In: vol. 49, iss. 8, pp. 5662-5662, 2022.
@article{fichtinger2022p,
title = {Automatic catheter modelling in 3D transrectal ultrasound images from high-dose-rate prostate brachytherapy using a deep learning and feature extraction pipeline},
author = {Nicole Kitner and Jessica R Rodgers and Tamas Ungi and Martin Korzeniowski and Tim Olding and Chandra P Joshi and Parvin Mousavi and Gabor Fichtinger},
url = {https://scholar.google.com/scholar?cluster=1706675649324850852&hl=en&oi=scholarr},
year = {2022},
date = {2022-01-01},
volume = {49},
issue = {8},
pages = {5662-5662},
publisher = {WILEY},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Austin, Catherine; Hisey, Rebecca; O'Driscoll, Olivia; Camire, Daenis; Erb, Jason; Howes, Daniel; Ungi, Tamas; Fichtinger, Gabor
Recognizing multiple needle insertion attempts for performance assessment in central venous catheterization training Journal Article
In: vol. 12034, pp. 518-524, 2022.
@article{fichtinger2022r,
title = {Recognizing multiple needle insertion attempts for performance assessment in central venous catheterization training},
author = {Catherine Austin and Rebecca Hisey and Olivia O'Driscoll and Daenis Camire and Jason Erb and Daniel Howes and Tamas Ungi and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/12034/1203428/Recognizing-multiple-needle-insertion-attempts-for-performance-assessment-in-central/10.1117/12.2613190.short},
year = {2022},
date = {2022-01-01},
volume = {12034},
pages = {518-524},
publisher = {SPIE},
abstract = {Purpose
Computer-assisted skill assessment has traditionally been focused on general metrics related to tool motion and usage time. While these metrics are important for an overall evaluation of skill, they do not address critical errors made during the procedure. This study examines the effectiveness of utilizing object detection to quantify the critical error of making multiple needle insertion attempts in central venous catheterization.
Methods
6860 images were annotated with ground truth bounding boxes around the syringe attached to the needle. The images were registered using the location of the phantom, and the bounding boxes from the training set were used to identify the regions where the needle was most likely inserting the phantom. A Faster region-based convolutional neural network was trained to identify the syringe and produce the bounding box location for images in the test set. A needle insertion …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Computer-assisted skill assessment has traditionally been focused on general metrics related to tool motion and usage time. While these metrics are important for an overall evaluation of skill, they do not address critical errors made during the procedure. This study examines the effectiveness of utilizing object detection to quantify the critical error of making multiple needle insertion attempts in central venous catheterization.
Methods
6860 images were annotated with ground truth bounding boxes around the syringe attached to the needle. The images were registered using the location of the phantom, and the bounding boxes from the training set were used to identify the regions where the needle was most likely inserting the phantom. A Faster region-based convolutional neural network was trained to identify the syringe and produce the bounding box location for images in the test set. A needle insertion …
Connolly, Laura; Deguet, Anton; Leonard, Simon; Tokuda, Junichi; Ungi, Tamas; Krieger, Axel; Kazanzides, Peter; Mousavi, Parvin; Fichtinger, Gabor; Taylor, Russell H
Bridging 3D Slicer and ROS2 for image-guided robotic interventions Journal Article
In: Sensors, vol. 22, iss. 14, pp. 5336, 2022.
@article{fichtinger2022e,
title = {Bridging 3D Slicer and ROS2 for image-guided robotic interventions},
author = {Laura Connolly and Anton Deguet and Simon Leonard and Junichi Tokuda and Tamas Ungi and Axel Krieger and Peter Kazanzides and Parvin Mousavi and Gabor Fichtinger and Russell H Taylor},
url = {https://www.mdpi.com/1424-8220/22/14/5336},
year = {2022},
date = {2022-01-01},
journal = {Sensors},
volume = {22},
issue = {14},
pages = {5336},
publisher = {MDPI},
abstract = {Developing image-guided robotic systems requires access to flexible, open-source software. For image guidance, the open-source medical imaging platform 3D Slicer is one of the most adopted tools that can be used for research and prototyping. Similarly, for robotics, the open-source middleware suite robot operating system (ROS) is the standard development framework. In the past, there have been several “ad hoc” attempts made to bridge both tools; however, they are all reliant on middleware and custom interfaces. Additionally, none of these attempts have been successful in bridging access to the full suite of tools provided by ROS or 3D Slicer. Therefore, in this paper, we present the SlicerROS2 module, which was designed for the direct use of ROS2 packages and libraries within 3D Slicer. The module was developed to enable real-time visualization of robots, accommodate different robot configurations, and facilitate data transfer in both directions (between ROS and Slicer). We demonstrate the system on multiple robots with different configurations, evaluate the system performance and discuss an image-guided robotic intervention that can be prototyped with this module. This module can serve as a starting point for clinical system development that reduces the need for custom interfaces and time-intensive platform setup.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Poole, Meredith; Ungi, Tamas; Fichtinger, Gabor; Zevin, Boris
Training in soft tissue resection using real-time visual computer navigation feedback from the Surgery Tutor: A randomized controlled trial Journal Article
In: Surgery, 2021, ISSN: 0039-6060.
@article{Poole2021,
title = {Training in soft tissue resection using real-time visual computer navigation feedback from the Surgery Tutor: A randomized controlled trial},
author = {Meredith Poole and Tamas Ungi and Gabor Fichtinger and Boris Zevin},
url = {https://www.sciencedirect.com/science/article/pii/S0039606021011855},
doi = {https://doi.org/10.1016/j.surg.2021.11.037},
issn = {0039-6060},
year = {2021},
date = {2021-12-01},
urldate = {2021-12-01},
journal = {Surgery},
abstract = {<p>Background In competency-based medical education, surgery trainees are often required to learn procedural skills in a simulated setting before proceeding to the clinical environment. The Surgery Tutor computer navigation platform allows for real-time proctor-less assessment of open soft tissue resection skills; however, the use of this platform as an aid in acquisition of procedural skills is yet to be explored. Methods In this prospective randomized controlled trial, 20 final year medical students were randomized to receive either training with real-time computer navigation feedback (Intervention, n = 10) or simulation training without navigation feedback (Control, n = 10) during resection of simulated non-palpable soft tissue tumors. Real-time computer navigation feedback allowed participants to visualize the position of their scalpel relative to the tumor. Computer navigation feedback was removed for postintervention assessment. Primary outcome was positive margin rate. Secondary outcomes were procedure time, mass of tissue excised, number of scalpel motions, and distance traveled by the scalpel. Results Training with real-time computer navigation resulted in a significantly lower positive margin rate as compared to training without navigation feedback (0% vs 40%, P = .025). All other performance metrics were not significantly different between the 2 groups. Participants in the intervention group displayed significant improvement in positive margin rate from baseline to final assessment (80% vs 0%, P < .01), whereas participants in the Control group did not. Conclusion Real-time visual computer navigation feedback from the Surgery Tutor resulted in superior acquisition of procedural skills as compared to training without navigation feedback.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Degeut, Anton; Sunderland, Kyle R.; Lasso, Andras; Ungi, Tamas; Rudan, John; Taylor, Russell H.; Mousavi, Parvin; Fichtinger, Gabor
An open-source platform for cooperative semi-autonomous robotic surgery Conference
IEEE International Conference on Autonomous Systems, IEEE IEEE, Montreal, Quebec, 2021.
@conference{Connolly2021,
title = {An open-source platform for cooperative semi-autonomous robotic surgery},
author = {Laura Connolly and Anton Degeut and Kyle R. Sunderland and Andras Lasso and Tamas Ungi and John Rudan and Russell H. Taylor and Parvin Mousavi and Gabor Fichtinger},
doi = {https://doi.org/10.1109/ICAS49788.2021.9551149},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
booktitle = {IEEE International Conference on Autonomous Systems},
publisher = {IEEE},
address = {Montreal, Quebec},
organization = {IEEE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Barr, Colton; Hisey, Rebecca; Ungi, Tamas; Fichtinger, Gabor
Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization Conference
43rd Conference of the IEEE Engineering Medicine and Biology Society, 2021.
@conference{CBarr2021b,
title = {Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization},
author = {Colton Barr and Rebecca Hisey and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CBarr2021a.pdf},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
booktitle = {43rd Conference of the IEEE Engineering Medicine and Biology Society},
abstract = {<p>Central Line Tutor is a system that facilitates real-time feedback during training for central venous catheterization. One limitation of Central Line Tutor is its reliance on expensive, cumbersome electromagnetic tracking to facilitate various training aids, including ultrasound task identification and segmentation of neck vasculature. The purpose of this study is to validate deep learning methods for vessel segmentation and ultrasound pose classification in order to mitigate the system’s reliance on electromagnetic tracking. A large dataset of segmented and classified ultrasound images was generated from participant data captured using Central Line Tutor. A U-Net architecture was used to perform vessel segmentation, while a shallow Convolutional Neural Network (CNN) architecture was designed to classify the pose of the ultrasound probe. A second classifier architecture was also tested that used the U-Net output as the CNN input. The mean testing set Intersect over Union score for U-Net cross-validation was 0.746 ± 0.052. The mean test set classification accuracy for the CNN was 92.0% ± 3.0, while the U-Net + CNN achieved 92.7% ± 2.1%. This study highlights the potential for deep learning on ultrasound images to replace the current electromagnetic tracking-based methods for vessel segmentation and ultrasound pose classification, and represents an important step towards removing the electromagnetic tracker altogether. Removing the need for an external tracking system would significantly reduce the cost of Central Line Tutor and make it far more accessible to the medical trainees that would benefit from it most.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, Rebecca; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
System for central venous catheterization training using computer vision-based workflow feedback Journal Article
In: IEEE Transactions on Biomedical Engineering, 2021.
@article{Hisey2021b,
title = {System for central venous catheterization training using computer vision-based workflow feedback},
author = {Rebecca Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
journal = {IEEE Transactions on Biomedical Engineering},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fichtinger, Gabor; Mousavi, Parvin; Ungi, Tamas; Fenster, Aaron; Abolmaesumi, Purang; Kronreif, Gernot; Ruiz-Alzola, Juan; Ndoye, Alain; Diao, Babacar; Kikinis, Ron
Design of an Ultrasound-Navigated Prostate Cancer Biopsy System for Nationwide Implementation in Senegal Journal Article
In: Journal of Imaging, vol. 7, no. 8, pp. 154, 2021, ISSN: 2313-433X.
@article{Fichtinger2021,
title = {Design of an Ultrasound-Navigated Prostate Cancer Biopsy System for Nationwide Implementation in Senegal},
author = {Gabor Fichtinger and Parvin Mousavi and Tamas Ungi and Aaron Fenster and Purang Abolmaesumi and Gernot Kronreif and Juan Ruiz-Alzola and Alain Ndoye and Babacar Diao and Ron Kikinis},
url = {https://www.mdpi.com/2313-433X/7/8/154},
doi = {10.3390/jimaging7080154},
issn = {2313-433X},
year = {2021},
date = {2021-08-01},
urldate = {2021-08-01},
journal = {Journal of Imaging},
volume = {7},
number = {8},
pages = {154},
abstract = {<p>This paper presents the design of NaviPBx, an ultrasound-navigated prostate cancer biopsy system. NaviPBx is designed to support an affordable and sustainable national healthcare program in Senegal. It uses spatiotemporal navigation and multiparametric transrectal ultrasound to guide biopsies. NaviPBx integrates concepts and methods that have been independently validated previously in clinical feasibility studies and deploys them together in a practical prostate cancer biopsy system. NaviPBx is based entirely on free open-source software and will be shared as a free open-source program with no restriction on its use. NaviPBx is set to be deployed and sustained nationwide through the Senegalese Military Health Service. This paper reports on the results of the design process of NaviPBx. Our approach concentrates on “frugal technology”, intended to be affordable for low–middle income (LMIC) countries. Our project promises the wide-scale application of prostate biopsy and will foster time-efficient development and programmatic implementation of ultrasound-guided diagnostic and therapeutic interventions in Senegal and beyond.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Wu, Catherine O.; Diao, Babacar; Ungi, Tamas; Sedghi, Alireza; Kikinis, Ron; Mousavi, Parvin; Fichtinger, Gabor
Development of an open-source system for prostate biopsy training in Senegal Conference
SPIE Medical Imaging 2021: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 11598, 2021.
@conference{CWu2021a,
title = {Development of an open-source system for prostate biopsy training in Senegal},
author = {Catherine O. Wu and Babacar Diao and Tamas Ungi and Alireza Sedghi and Ron Kikinis and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2021a-poster_0.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2021a_0.pdf},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {SPIE Medical Imaging 2021: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {11598},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, Rebecca; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
Imaging Network of Ontario Symposium, 2021.
@conference{Hisey2021a,
title = {Central Line Tutor: using computer vision workflow recognition in a central venous catheterization training system},
author = {Rebecca Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/RHisey_ImNO2021.pdf},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Barr, Colton; Hisey, Rebecca; Ungi, Tamas; Fichtinger, Gabor
Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization Conference
Imaging Network of Ontario Symposium, 2021.
@conference{CBarr2021a,
title = {Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization},
author = {Colton Barr and Rebecca Hisey and Tamas Ungi and Gabor Fichtinger},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}