Alqaoud, Motaz; Plemmons, John; Feliberti, Eric; Dong, Siqin; Kaipa, Krishnanand; Fichtinger, Gabor; Xiao, Yiming; Audette, Michel A
nnUNet-based multi-modality breast MRI segmentation and tissue-delineating phantom for robotic tumor surgery planning Journal Article
In: pp. 3495-3501, 2022.
@article{fichtinger2022f,
title = {nnUNet-based multi-modality breast MRI segmentation and tissue-delineating phantom for robotic tumor surgery planning},
author = {Motaz Alqaoud and John Plemmons and Eric Feliberti and Siqin Dong and Krishnanand Kaipa and Gabor Fichtinger and Yiming Xiao and Michel A Audette},
url = {https://ieeexplore.ieee.org/abstract/document/9871109/},
year = {2022},
date = {2022-01-01},
pages = {3495-3501},
publisher = {IEEE},
abstract = {Segmentation of the thoracic region and breast tissues is crucial for analyzing and diagnosing the presence of breast masses. This paper introduces a medical image segmentation architecture that aggregates two neural networks based on the state-of-the-art nnU-Net. Additionally, this study proposes a polyvinyl alcohol cryogel (PVA-C) breast phantom, based on its automated segmentation approach, to enable planning and navigation experiments for robotic breast surgery. The dataset consists of multimodality breast MRI of T2W and STIR images obtained from 10 patients. A statistical analysis of segmentation tasks emphasizes the Dice Similarity Coefficient (DSC), segmentation accuracy, sensitivity, and specificity. We first use a single class labeling to segment the breast region and then exploit it as an input for three-class labeling to segment fatty, fibroglandular (FGT), and tumorous tissues. The first network has …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Deguet, Anton; Leonard, Simon; Tokuda, Junichi; Ungi, Tamas; Krieger, Axel; Kazanzides, Peter; Mousavi, Parvin; Fichtinger, Gabor; Taylor, Russell H
Bridging 3D Slicer and ROS2 for image-guided robotic interventions Journal Article
In: Sensors, vol. 22, iss. 14, pp. 5336, 2022.
@article{fichtinger2022e,
title = {Bridging 3D Slicer and ROS2 for image-guided robotic interventions},
author = {Laura Connolly and Anton Deguet and Simon Leonard and Junichi Tokuda and Tamas Ungi and Axel Krieger and Peter Kazanzides and Parvin Mousavi and Gabor Fichtinger and Russell H Taylor},
url = {https://www.mdpi.com/1424-8220/22/14/5336},
year = {2022},
date = {2022-01-01},
journal = {Sensors},
volume = {22},
issue = {14},
pages = {5336},
publisher = {MDPI},
abstract = {Developing image-guided robotic systems requires access to flexible, open-source software. For image guidance, the open-source medical imaging platform 3D Slicer is one of the most adopted tools that can be used for research and prototyping. Similarly, for robotics, the open-source middleware suite robot operating system (ROS) is the standard development framework. In the past, there have been several “ad hoc” attempts made to bridge both tools; however, they are all reliant on middleware and custom interfaces. Additionally, none of these attempts have been successful in bridging access to the full suite of tools provided by ROS or 3D Slicer. Therefore, in this paper, we present the SlicerROS2 module, which was designed for the direct use of ROS2 packages and libraries within 3D Slicer. The module was developed to enable real-time visualization of robots, accommodate different robot configurations, and facilitate data transfer in both directions (between ROS and Slicer). We demonstrate the system on multiple robots with different configurations, evaluate the system performance and discuss an image-guided robotic intervention that can be prototyped with this module. This module can serve as a starting point for clinical system development that reduces the need for custom interfaces and time-intensive platform setup.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Santilli, Alice Maria Leondina; Ren, Kevin; Oleschuk, Richard; Kaufmann, Martin; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
Application of intraoperative mass spectrometry and data analytics for oncological margin detection, a review Journal Article
In: vol. 69, iss. 7, pp. 2220-2232, 2022.
@article{fichtinger2022d,
title = {Application of intraoperative mass spectrometry and data analytics for oncological margin detection, a review},
author = {Alice Maria Leondina Santilli and Kevin Ren and Richard Oleschuk and Martin Kaufmann and John Rudan and Gabor Fichtinger and Parvin Mousavi},
url = {https://ieeexplore.ieee.org/abstract/document/9669135/},
year = {2022},
date = {2022-01-01},
volume = {69},
issue = {7},
pages = {2220-2232},
publisher = {IEEE},
abstract = {Objective
A common phase of early-stage oncological treatment is the surgical resection of cancerous tissue. The presence of cancer cells on the resection margin, referred to as positive margin, is correlated with the recurrence of cancer and may require re-operation, negatively impacting many facets of patient outcomes. There exists a significant gap in the surgeon’s ability to intraoperatively delineate between tissues. Mass spectrometry methods have shown considerable promise as intraoperative tissue profiling tools that can assist with the complete resection of cancer. To do so, the vastness of the information collected through these modalities must be digested, relying on robust and efficient extraction of insights through data analysis pipelines.
Methods
We review clinical mass spectrometry literature and prioritize intraoperatively applied modalities. We also survey the data analysis methods employed in these …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
A common phase of early-stage oncological treatment is the surgical resection of cancerous tissue. The presence of cancer cells on the resection margin, referred to as positive margin, is correlated with the recurrence of cancer and may require re-operation, negatively impacting many facets of patient outcomes. There exists a significant gap in the surgeon’s ability to intraoperatively delineate between tissues. Mass spectrometry methods have shown considerable promise as intraoperative tissue profiling tools that can assist with the complete resection of cancer. To do so, the vastness of the information collected through these modalities must be digested, relying on robust and efficient extraction of insights through data analysis pipelines.
Methods
We review clinical mass spectrometry literature and prioritize intraoperatively applied modalities. We also survey the data analysis methods employed in these …
Lasso, Andras; Herz, Christian; Nam, Hannah; Cianciulli, Alana; Pieper, Steve; Drouin, Simon; Pinter, Csaba; St-Onge, Samuelle; Vigil, Chad; Ching, Stephen; Sunderland, Kyle; Fichtinger, Gabor; Kikinis, Ron; Jolley, Matthew A
SlicerHeart: An open-source computing platform for cardiac image analysis and modeling Journal Article
In: vol. 9, pp. 886549, 2022.
@article{fichtinger2022c,
title = {SlicerHeart: An open-source computing platform for cardiac image analysis and modeling},
author = {Andras Lasso and Christian Herz and Hannah Nam and Alana Cianciulli and Steve Pieper and Simon Drouin and Csaba Pinter and Samuelle St-Onge and Chad Vigil and Stephen Ching and Kyle Sunderland and Gabor Fichtinger and Ron Kikinis and Matthew A Jolley},
url = {https://www.frontiersin.org/articles/10.3389/fcvm.2022.886549/full},
year = {2022},
date = {2022-01-01},
volume = {9},
pages = {886549},
publisher = {Frontiers},
abstract = {Cardiovascular disease is a significant cause of morbidity and mortality in the developed world. 3D imaging of the heart’s structure is critical to the understanding and treatment of cardiovascular disease. However, open-source tools for image analysis of cardiac images, particularly 3D echocardiographic (3DE) data, are limited. We describe the rationale, development, implementation, and application of SlicerHeart, a cardiac-focused toolkit for image analysis built upon 3D Slicer, an open-source image computing platform. We designed and implemented multiple Python scripted modules within 3D Slicer to import, register, and view 3DE data, including new code to volume render and crop 3DE. In addition, we developed dedicated workflows for the modeling and quantitative analysis of multi-modality image-derived heart models, including heart valves. Finally, we created and integrated new functionality to facilitate the planning of cardiac interventions and surgery. We demonstrate application of SlicerHeart to a diverse range of cardiovascular modeling and simulation including volume rendering of 3DE images, mitral valve modeling, transcatheter device modeling, and planning of complex surgical intervention such as cardiac baffle creation. SlicerHeart is an evolving open-source image processing platform based on 3D Slicer initiated to support the investigation and treatment of congenital heart disease. The technology in SlicerHeart provides a robust foundation for 3D image-based investigation in cardiovascular medicine.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fichtinger, Gabor; Troccaz, Jocelyne; Haidegger, Tamas
Image-guided interventional robotics: Lost in translation? Journal Article
In: Proceedings of the IEEE, vol. 110, iss. 7, pp. 932-950, 2022.
@article{fichtinger2022b,
title = {Image-guided interventional robotics: Lost in translation?},
author = {Gabor Fichtinger and Jocelyne Troccaz and Tamas Haidegger},
url = {https://ieeexplore.ieee.org/abstract/document/9777771/},
year = {2022},
date = {2022-01-01},
journal = {Proceedings of the IEEE},
volume = {110},
issue = {7},
pages = {932-950},
publisher = {IEEE},
abstract = {Interventional robotic systems have been deployed with all existing imaging modalities in an expansive portfolio of therapies and surgeries. Over the years, literature reviews have painted a comprehensive portrait of the translation of the underlying technology from research to practice. While many of these robots performed promisingly in preclinical settings, only a handful of them managed to evolve further, break through the commercialization boundary, and even fewer reached a wide-scale adoption. Despite the undeniable success of service robotics in general and particularly in some sophisticated medical applications, image-guided robotics’ impact remained modest compared to other surgical areas, especially laparoscopic minimally invasive surgery. This article aims to embrace the state of the art on the one hand, provide a comprehensive narrative of the situation described, support future system developers …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Maier-Hein, Lena; Eisenmann, Matthias; Sarikaya, Duygu; März, Keno; Collins, Toby; Malpani, Anand; Fallert, Johannes; Feussner, Hubertus; Giannarou, Stamatia; Mascagni, Pietro; Nakawala, Hirenkumar; Park, Adrian; Pugh, Carla; Stoyanov, Danail; Vedula, Swaroop S; Cleary, Kevin; Fichtinger, Gabor; Forestier, Germain; Gibaud, Bernard; Grantcharov, Teodor; Hashizume, Makoto; Heckmann-Nötzel, Doreen; Kenngott, Hannes G; Kikinis, Ron; Mündermann, Lars; Navab, Nassir; Onogur, Sinan; Ross, Tobias; Sznitman, Raphael; Taylor, Russell H; Tizabi, Minu D; Wagner, Martin; Hager, Gregory D; Neumuth, Thomas; Padoy, Nicolas; Collins, Justin; Gockel, Ines; Goedeke, Jan; Hashimoto, Daniel A; Joyeux, Luc; Lam, Kyle; Leff, Daniel R; Madani, Amin; Marcus, Hani J; Meireles, Ozanan; Seitel, Alexander; Teber, Dogu; Ueckert, Frank; Müller-Stich, Beat P; Jannin, Pierre; Speidel, Stefanie
Surgical data science–from concepts toward clinical translation Journal Article
In: vol. 76, pp. 102306, 2022.
@article{fichtinger2022,
title = {Surgical data science–from concepts toward clinical translation},
author = {Lena Maier-Hein and Matthias Eisenmann and Duygu Sarikaya and Keno März and Toby Collins and Anand Malpani and Johannes Fallert and Hubertus Feussner and Stamatia Giannarou and Pietro Mascagni and Hirenkumar Nakawala and Adrian Park and Carla Pugh and Danail Stoyanov and Swaroop S Vedula and Kevin Cleary and Gabor Fichtinger and Germain Forestier and Bernard Gibaud and Teodor Grantcharov and Makoto Hashizume and Doreen Heckmann-Nötzel and Hannes G Kenngott and Ron Kikinis and Lars Mündermann and Nassir Navab and Sinan Onogur and Tobias Ross and Raphael Sznitman and Russell H Taylor and Minu D Tizabi and Martin Wagner and Gregory D Hager and Thomas Neumuth and Nicolas Padoy and Justin Collins and Ines Gockel and Jan Goedeke and Daniel A Hashimoto and Luc Joyeux and Kyle Lam and Daniel R Leff and Amin Madani and Hani J Marcus and Ozanan Meireles and Alexander Seitel and Dogu Teber and Frank Ueckert and Beat P Müller-Stich and Pierre Jannin and Stefanie Speidel},
url = {https://www.sciencedirect.com/science/article/pii/S1361841521003510},
year = {2022},
date = {2022-01-01},
volume = {76},
pages = {102306},
publisher = {Elsevier},
abstract = {Recent developments in data science in general and machine learning in particular have transformed the way experts envision the future of surgery. Surgical Data Science (SDS) is a new research field that aims to improve the quality of interventional healthcare through the capture, organization, analysis and modeling of data. While an increasing number of data-driven approaches and clinical applications have been studied in the fields of radiological and clinical data science, translational success stories are still lacking in surgery. In this publication, we shed light on the underlying reasons and provide a roadmap for future advances in the field. Based on an international workshop involving leading researchers in the field of SDS, we review current practice, key achievements and initiatives as well as available standards and tools for a number of topics relevant to the field, namely (1) infrastructure for data …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Jamzad, Amoon; Nikniazi, Arash; Poushimin, Rana; Lasso, Andras; Sunderland, Kyle R.; Ungi, Tamas; Nunzi, Jean Michel; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
An open-source testbed for developing image-guided robotic tumor-bed inspection Conference
Imaging Network of Ontario (ImNO) Symposium, 2022.
@conference{connolly2022b,
title = {An open-source testbed for developing image-guided robotic tumor-bed inspection},
author = {Laura Connolly and Amoon Jamzad and Arash Nikniazi and Rana Poushimin and Andras Lasso and Kyle R. Sunderland and Tamas Ungi and Jean Michel Nunzi and John Rudan and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/01/Connolly2022b.pdf},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Imaging Network of Ontario (ImNO) Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kaufmann, Martin; Vaysse, Pierre-Maxence; Savage, Adele; Amgheib, Ala; Marton, András; Manoli, Eftychios; Fichtinger, Gabor; Pringle, Steven D.; Rudan, John; Heeren, Ron M. A.; Takáts, Zoltán; Balog, Júlia; Siegel, Tiffany Porta
Harmonization of Rapid Evaporative Ionization Mass Spectrometry Workflows across Four Sites and Testing Using Reference Material and Local Food-Grade Meats Journal Article
In: Metabolites, 2022.
@article{Kaufmann2022b,
title = {Harmonization of Rapid Evaporative Ionization Mass Spectrometry Workflows across Four Sites and Testing Using Reference Material and Local Food-Grade Meats},
author = {Martin Kaufmann and Pierre-Maxence Vaysse and Adele Savage and Ala Amgheib and András Marton and Eftychios Manoli and Gabor Fichtinger and Steven D. Pringle and John Rudan and Ron M. A. Heeren and Zoltán Takáts and Júlia Balog and Tiffany Porta Siegel},
year = {2022},
date = {2022-01-01},
journal = {Metabolites},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lee, H. Y.
Evaluating Faster R-CNN for cataract surgery tool detection using microscopy video Conference
Imaging Network of Ontario Symposium , 2022.
@conference{Lee2022a,
title = {Evaluating Faster R-CNN for cataract surgery tool detection using microscopy video},
author = {H. Y. Lee},
editor = {R. Hisey},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
booktitle = {Imaging Network of Ontario Symposium
},
abstract = {<p>Introduction: Traditional methods of cataract surgery skill assessment rely on human expert supervision. This exposes the trainee to interobserver variability and inconsistent feedback. Alternative measures such as sensorbased instrument motion analysis promise objective assessment [1]. However, sensor-based systems are logistically complicated and expensive to obtain. Previous studies have demonstrated a strong correlation between sensor-based metrics and two-dimensional motion metrics obtained from object detection [2]. Reliable object detection is the foundation for computing such performance metrics. Therefore, the objective of this study is to evaluate the performance of an object detection network, namely Faster Region-Based Convolutional Neural Network (FRCNN), in recognition of cataract surgery tools in microscopy video. Methods: Microscope video was recorded for 25 trials of cataract surgery on an artificial eye. The trials were performed by a cohort consisting of one senior-surgeon and four junior-surgeons and manually annotated for bounding box locations of the cataract surgery tools (Figure 1) The surgical tools used included: forceps, diamond keratomes, viscoelastic cannulas, and cystotome needles. A FRCNN [3] was trained on a total of 130,614 frames for object detection. We used five-fold cross validation, using a leave-one-userout method. In this manner, all videos from one surgeon were reserved for testing and the frames from the remaining 20 videos were divided among training and validation. Network performance was evaluated via mean average precision (mAP), which is defined as the area under the precision/recall curve. Samples were considered correctly identified when the intersection over union (IoU) between the ground truth and predicted bounding boxes was greater than 0.5. Results: The overall mAP of the network was 0.63. Toolspecific mAPs ranged between 0.49 and 0.96 (Table 1). The high accuracy in detection of the cystotome needle is likely due to the distinct size and shape of the tool tip. The diamond keratome had the lowest mAP of any of the tools recognized, however this may be attributed to variations in the appearance of the tool tip (Figure 2). Conclusions: The FRCNN was able to recognize the surgical tools used in cataract surgery with reasonably high accuracy. Now that we know the network can sufficiently recognize the surgical tools, our next goal is to use this network to compute motion-based performance metrics. Future work seeks to validate these performance metrics against those obtained from sensor-based tracking and against expert evaluations. This serves as a first step towards providing consistent and accessible feedback for future trainees learning cataract surgery. </p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Poole, Meredith; Ungi, Tamas; Fichtinger, Gabor; Zevin, Boris
Training in soft tissue resection using real-time visual computer navigation feedback from the Surgery Tutor: A randomized controlled trial Journal Article
In: Surgery, 2021, ISSN: 0039-6060.
@article{Poole2021,
title = {Training in soft tissue resection using real-time visual computer navigation feedback from the Surgery Tutor: A randomized controlled trial},
author = {Meredith Poole and Tamas Ungi and Gabor Fichtinger and Boris Zevin},
url = {https://www.sciencedirect.com/science/article/pii/S0039606021011855},
doi = {https://doi.org/10.1016/j.surg.2021.11.037},
issn = {0039-6060},
year = {2021},
date = {2021-12-01},
urldate = {2021-12-01},
journal = {Surgery},
abstract = {<p>Background In competency-based medical education, surgery trainees are often required to learn procedural skills in a simulated setting before proceeding to the clinical environment. The Surgery Tutor computer navigation platform allows for real-time proctor-less assessment of open soft tissue resection skills; however, the use of this platform as an aid in acquisition of procedural skills is yet to be explored. Methods In this prospective randomized controlled trial, 20 final year medical students were randomized to receive either training with real-time computer navigation feedback (Intervention, n = 10) or simulation training without navigation feedback (Control, n = 10) during resection of simulated non-palpable soft tissue tumors. Real-time computer navigation feedback allowed participants to visualize the position of their scalpel relative to the tumor. Computer navigation feedback was removed for postintervention assessment. Primary outcome was positive margin rate. Secondary outcomes were procedure time, mass of tissue excised, number of scalpel motions, and distance traveled by the scalpel. Results Training with real-time computer navigation resulted in a significantly lower positive margin rate as compared to training without navigation feedback (0% vs 40%, P = .025). All other performance metrics were not significantly different between the 2 groups. Participants in the intervention group displayed significant improvement in positive margin rate from baseline to final assessment (80% vs 0%, P < .01), whereas participants in the Control group did not. Conclusion Real-time visual computer navigation feedback from the Surgery Tutor resulted in superior acquisition of procedural skills as compared to training without navigation feedback.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Colton; Hisey, R.; Ungi, Tamas; Fichtinger, Gabor
Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization Conference
43rd Conference of the IEEE Engineering Medicine and Biology Society, 2021.
@conference{CBarr2021b,
title = {Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization},
author = {Colton Barr and R. Hisey and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CBarr2021a.pdf},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
booktitle = {43rd Conference of the IEEE Engineering Medicine and Biology Society},
abstract = {<p>Central Line Tutor is a system that facilitates real-time feedback during training for central venous catheterization. One limitation of Central Line Tutor is its reliance on expensive, cumbersome electromagnetic tracking to facilitate various training aids, including ultrasound task identification and segmentation of neck vasculature. The purpose of this study is to validate deep learning methods for vessel segmentation and ultrasound pose classification in order to mitigate the system’s reliance on electromagnetic tracking. A large dataset of segmented and classified ultrasound images was generated from participant data captured using Central Line Tutor. A U-Net architecture was used to perform vessel segmentation, while a shallow Convolutional Neural Network (CNN) architecture was designed to classify the pose of the ultrasound probe. A second classifier architecture was also tested that used the U-Net output as the CNN input. The mean testing set Intersect over Union score for U-Net cross-validation was 0.746 ± 0.052. The mean test set classification accuracy for the CNN was 92.0% ± 3.0, while the U-Net + CNN achieved 92.7% ± 2.1%. This study highlights the potential for deep learning on ultrasound images to replace the current electromagnetic tracking-based methods for vessel segmentation and ultrasound pose classification, and represents an important step towards removing the electromagnetic tracker altogether. Removing the need for an external tracking system would significantly reduce the cost of Central Line Tutor and make it far more accessible to the medical trainees that would benefit from it most.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
System for central venous catheterization training using computer vision-based workflow feedback Journal Article
In: IEEE Transactions on Biomedical Engineering, 2021.
@article{Hisey2021b,
title = {System for central venous catheterization training using computer vision-based workflow feedback},
author = {R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
journal = {IEEE Transactions on Biomedical Engineering},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Degeut, Anton; Sunderland, Kyle R.; Lasso, Andras; Ungi, Tamas; Rudan, John; Taylor, Russell H.; Mousavi, Parvin; Fichtinger, Gabor
An open-source platform for cooperative semi-autonomous robotic surgery Conference
IEEE International Conference on Autonomous Systems, IEEE IEEE, Montreal, Quebec, 2021.
@conference{Connolly2021,
title = {An open-source platform for cooperative semi-autonomous robotic surgery},
author = {Laura Connolly and Anton Degeut and Kyle R. Sunderland and Andras Lasso and Tamas Ungi and John Rudan and Russell H. Taylor and Parvin Mousavi and Gabor Fichtinger},
doi = {https://doi.org/10.1109/ICAS49788.2021.9551149},
year = {2021},
date = {2021-10-01},
urldate = {2021-10-01},
booktitle = {IEEE International Conference on Autonomous Systems},
publisher = {IEEE},
address = {Montreal, Quebec},
organization = {IEEE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Jamzad, Amoon; Kaufmann, Martin; Farquharson, Catriona E.; Ren, Kevin; Rudan, John; Fichtinger, Gabor; Mousavi, Parvin
Combined Mass Spectrometry and Histopathology Imaging for Perioperative Tissue Assessment in Cancer Surgery Journal Article
In: Journal of Imaging, vol. 7, no. 203, 2021.
@article{Connolly2021c,
title = {Combined Mass Spectrometry and Histopathology Imaging for Perioperative Tissue Assessment in Cancer Surgery},
author = {Laura Connolly and Amoon Jamzad and Martin Kaufmann and Catriona E. Farquharson and Kevin Ren and John Rudan and Gabor Fichtinger and Parvin Mousavi},
doi = {https://doi.org/10.3390/jimaging7100203},
year = {2021},
date = {2021-10-01},
journal = {Journal of Imaging},
volume = {7},
number = {203},
abstract = {<p><span style="color:rgb(34, 34, 34); font-family:arial; font-size:12px">Mass spectrometry is an effective imaging tool for evaluating biological tissue to detect cancer. With the assistance of deep learning, this technology can be used as a perioperative tissue assessment tool that will facilitate informed surgical decisions. To achieve such a system requires the development of a database of mass spectrometry signals and their corresponding pathology labels. Assigning correct labels, in turn, necessitates precise spatial registration of histopathology and mass spectrometry data. This is a challenging task due to the domain differences and noisy nature of images. In this study, we create a registration framework for mass spectrometry and pathology images as a contribution to the development of perioperative tissue assessment. In doing so, we explore two opportunities in deep learning for medical image registration, namely, unsupervised, multi-modal deformable image registration and evaluation of the registration. We test this system on prostate needle biopsy cores that were imaged with desorption electrospray ionization mass spectrometry (DESI) and show that we can successfully register DESI and histology images to achieve accurate alignment and, consequently, labelling for future training. This automation is expected to improve the efficiency and development of a deep learning architecture that will benefit the use of mass spectrometry imaging for cancer diagnosis.</span></p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Farahmand, Mohammad; Nabi, Majid
Channel Quality Prediction for TSCH Blacklisting in Highly Dynamic Networks: A Self-Supervised Deep Learning Approach Journal Article
In: IEEE Sensors J., vol. 21, no. 18, pp. 21059–21068, 2021, ISSN: 1558-1748.
@article{Farahmand2021,
title = {Channel Quality Prediction for TSCH Blacklisting in Highly Dynamic Networks: A Self-Supervised Deep Learning Approach},
author = {Mohammad Farahmand and Majid Nabi},
doi = {10.1109/jsen.2021.3093424},
issn = {1558-1748},
year = {2021},
date = {2021-09-15},
urldate = {2021-09-15},
journal = {IEEE Sensors J.},
volume = {21},
number = {18},
pages = {21059--21068},
publisher = {Institute of Electrical and Electronics Engineers (IEEE)},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fichtinger, Gabor; Mousavi, Parvin; Ungi, Tamas; Fenster, Aaron; Abolmaesumi, Purang; Kronreif, Gernot; Ruiz-Alzola, Juan; Ndoye, Alain; Diao, Babacar; Kikinis, Ron
Design of an Ultrasound-Navigated Prostate Cancer Biopsy System for Nationwide Implementation in Senegal Journal Article
In: Journal of Imaging, vol. 7, no. 8, pp. 154, 2021, ISSN: 2313-433X.
@article{Fichtinger2021,
title = {Design of an Ultrasound-Navigated Prostate Cancer Biopsy System for Nationwide Implementation in Senegal},
author = {Gabor Fichtinger and Parvin Mousavi and Tamas Ungi and Aaron Fenster and Purang Abolmaesumi and Gernot Kronreif and Juan Ruiz-Alzola and Alain Ndoye and Babacar Diao and Ron Kikinis},
url = {https://www.mdpi.com/2313-433X/7/8/154},
doi = {10.3390/jimaging7080154},
issn = {2313-433X},
year = {2021},
date = {2021-08-01},
urldate = {2021-08-01},
journal = {Journal of Imaging},
volume = {7},
number = {8},
pages = {154},
abstract = {<p>This paper presents the design of NaviPBx, an ultrasound-navigated prostate cancer biopsy system. NaviPBx is designed to support an affordable and sustainable national healthcare program in Senegal. It uses spatiotemporal navigation and multiparametric transrectal ultrasound to guide biopsies. NaviPBx integrates concepts and methods that have been independently validated previously in clinical feasibility studies and deploys them together in a practical prostate cancer biopsy system. NaviPBx is based entirely on free open-source software and will be shared as a free open-source program with no restriction on its use. NaviPBx is set to be deployed and sustained nationwide through the Senegalese Military Health Service. This paper reports on the results of the design process of NaviPBx. Our approach concentrates on “frugal technology”, intended to be affordable for low–middle income (LMIC) countries. Our project promises the wide-scale application of prostate biopsy and will foster time-efficient development and programmatic implementation of ultrasound-guided diagnostic and therapeutic interventions in Senegal and beyond.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fauerbach, Paola V. Nasute; Tyryshkin, Kathrin; Rodrigo, Silvia Perez; Rudan, John; Fichtinger, Gabor; Reedijk, Michael; Varma, Sonal; Berman, David M.
Lack of definitive presurgical pathological diagnosis is associated with inadequate surgical margins in breast-conserving surgery Journal Article
In: European Journal of Surgical Oncology, 2021, ISSN: 0748-7983.
@article{NasuteFauerbach2021,
title = {Lack of definitive presurgical pathological diagnosis is associated with inadequate surgical margins in breast-conserving surgery},
author = {Paola V. Nasute Fauerbach and Kathrin Tyryshkin and Silvia Perez Rodrigo and John Rudan and Gabor Fichtinger and Michael Reedijk and Sonal Varma and David M. Berman},
url = {https://www.sciencedirect.com/science/article/pii/S0748798321005424},
doi = {https://doi.org/10.1016/j.ejso.2021.05.047},
issn = {0748-7983},
year = {2021},
date = {2021-06-01},
urldate = {2021-06-01},
journal = {European Journal of Surgical Oncology},
abstract = {<p>Purpose To determine the impact of definitive presurgical diagnosis on surgical margins in breast-conserving surgery (BCS) for primary carcinomas; clinicopathological features were also analyzed. Methods This retrospective study included women who underwent BCS for primary carcinomas in 2016 and 2017. Definitive presurgical diagnosis was defined as having a presurgical core needle biopsy (CNB) and not being upstaged between biopsy and surgery. Biopsy data and imaging findings including breast density were retrieved. Inadequate surgical margins (IM) were defined per latest ASCO and ASTRO guidelines. Univariable and multivariable analyses were performed. Results 360 women (median age, 66) met inclusion criteria with 1 having 2 cancers. 82.5% (298/361) were invasive cancers while 17.5% (63/361) were ductal carcinoma in situ (DCIS). Most biopsies were US-guided (284/346, 82.0%), followed by mammographic (60/346, 17.3%), and MRI-guided (2/346, 0.6%). US and mammographic CNB yielded median samples of 2 and 4, respectively, with a 14G needle. 15 patients (4.2%) lacked presurgical CNB. The IM rate was 30.0%. In multivariable analysis, large invasive cancers (>20 mm), dense breasts, and DCIS were associated with IM (p = 0.029, p = 0.010, and p = 0.013, respectively). Most importantly, lack of definitive presurgical diagnosis was a risk factor for IM (OR, 2.35; 95% CI: 1.23–4.51, p = 0.010). In contrast, neither patient age (<50) nor aggressive features (e.g., LVI) were associated with IM. Conclusion Lack of a definitive presurgical diagnosis was associated with a two-fold increase of IM in BCS; other risk factors were dense breasts, large invasive cancers, and DCIS.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Colton; Hisey, R.; Ungi, Tamas; Fichtinger, Gabor
Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization Conference
Imaging Network of Ontario Symposium, 2021.
@conference{CBarr2021a,
title = {Ultrasound Probe Pose Classification for Task Recognition in Central Venous Catheterization},
author = {Colton Barr and R. Hisey and Tamas Ungi and Gabor Fichtinger},
year = {2021},
date = {2021-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Catherine O.; Diao, Babacar; Ungi, Tamas; Sedghi, Alireza; Kikinis, Ron; Mousavi, Parvin; Fichtinger, Gabor
Development of an open-source system for prostate biopsy training in Senegal Conference
SPIE Medical Imaging 2021: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 11598, 2021.
@conference{CWu2021a,
title = {Development of an open-source system for prostate biopsy training in Senegal},
author = {Catherine O. Wu and Babacar Diao and Tamas Ungi and Alireza Sedghi and Ron Kikinis and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2021a-poster_0.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/CWu2021a_0.pdf},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {SPIE Medical Imaging 2021: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {11598},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, R.; Camire, Daenis; Erb, Jason; Howes, Daniel; Fichtinger, Gabor; Ungi, Tamas
Imaging Network of Ontario Symposium, 2021.
@conference{Hisey2021a,
title = {Central Line Tutor: using computer vision workflow recognition in a central venous catheterization training system},
author = {R. Hisey and Daenis Camire and Jason Erb and Daniel Howes and Gabor Fichtinger and Tamas Ungi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/RHisey_ImNO2021.pdf},
year = {2021},
date = {2021-02-01},
urldate = {2021-02-01},
booktitle = {Imaging Network of Ontario Symposium},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}