Fichtinger, G; Deguet, A; Masamune, K; Balogh, E; Fischer, G; Mathieu, H; Taylor, RH; Fayad, L; deOliveira, M; Zinreich, SJ
Needle insertion in CT Scanner with Image Overlay-Phantom and Cadaver Studies Journal Article
In: 0000.
@article{fichtinger0000_30,
title = {Needle insertion in CT Scanner with Image Overlay-Phantom and Cadaver Studies},
author = {G Fichtinger and A Deguet and K Masamune and E Balogh and G Fischer and H Mathieu and RH Taylor and L Fayad and M deOliveira and SJ Zinreich},
url = {https://www.researchgate.net/profile/Emese-Balogh-4/publication/250727227_Needle_insertion_in_CT_Scanner_with_Image_Overlay_-_Phantom_and_Cadaver_Studies/links/0deec529c3a9b9c285000000/Needle-insertion-in-CT-Scanner-with-Image-Overlay-Phantom-and-Cadaver-Studies.pdf},
abstract = {We present an image overlay system that assists in surgical needle insertions inside conventional CT scanners. The device is mounted on the gantry of the CT scanner and consists of a flat LCD display and a semi-transparent mirror. In the preoperative calibration process the overlay system and the imaging plane of the scanner are registered. As a result, looking at the patient through the mirror, the CT image appears to be floating inside the patient with correct size and position providing the physician with two-dimensional” X-ray vision” to guide needle placement procedures. The physician inserts the needle following the optimal path identified in the CT image that is rendered on the LCD and thereby reflected in the mirror. The system promises to increase needle placement accuracy and also to reduce X-ray dose, patient discomfort, and procedure time by eliminating faulty insertion attempts. We report phantom studies and cadaver experiments in several clinical applications.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Krieger, Axel; Song, S; Cho, Nathan Bongjoon; Guion, Peter; Iordachita, Iulian; Fichtinger, Gabor; Whitcomb, Louis L
Development and Preliminary Evaluation of a MRI-guided Transrectal Prostate Intervention Journal Article
In: 0000.
@article{fichtinger0000_31,
title = {Development and Preliminary Evaluation of a MRI-guided Transrectal Prostate Intervention},
author = {Axel Krieger and S Song and Nathan Bongjoon Cho and Peter Guion and Iulian Iordachita and Gabor Fichtinger and Louis L Whitcomb},
url = {https://www.academia.edu/download/42878814/Development_and_Preliminary_Evaluation_o20160220-17499-1vaqu44.pdf},
abstract = {Numerous studies have shown that transrectal ultrasound (TRUS)-guided prostate biopsy fails to detect cancer in significant numbers, since contemporary ultrasound cannot resolve target lesions [1]. Improved biopsy targeting with magnetic resonance imaging (MRI) could potentially overcome the shortcomings of ultrasound for the diagnosis and local therapy for prostate cancer. To utilize such advantages, a number of MRI-compatible prostate intervention systems were introduced. However, previously reported systems require the patient to be removed from the MRI scanner during the interventional procedure. A fully actuated robot, however, enables both imaging and interventional procedures to be performed entirely inside the scanner without removing the patient. Moreover, a fully actuated robot could simplify and speed up the procedure; allow for real-time needle insertion visualization; enable detection of prostate deformation, misalignment, and deflection of the needle; and allow for onthe-spot corrections to needle placements.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Rivaz, Hassan; Foroughi, Pezhman; Boctor, Emad; Zellars, Richard; Fichtinger, Gabor; Hager, Gregory
High Resolution Ultrasound Elastography: a Dynamic Programming Approach Journal Article
In: MICCAI 2007 Workshop Proceedings, pp. 113, 0000.
@article{fichtinger0000_32,
title = {High Resolution Ultrasound Elastography: a Dynamic Programming Approach},
author = {Hassan Rivaz and Pezhman Foroughi and Emad Boctor and Richard Zellars and Gabor Fichtinger and Gregory Hager},
url = {https://www.researchgate.net/profile/Poul-Nielsen-2/publication/224027899_Computational_biomechanics_of_the_breast_the_importance_of_the_reference_state/links/02bfe51326862be08a000000/Computational-biomechanics-of-the-breast-the-importance-of-the-reference-state.pdf#page=118},
journal = {MICCAI 2007 Workshop Proceedings},
pages = {113},
abstract = {This paper presents a 2D strain estimation technique that minimizes a cost function using dynamic programming (DP). The cost function incorporates similarity of echo amplitudes and displacement continuity. The method is capable of creating high quality elastograms at the same resolution as the original RF data. Since tissue deformations are smooth, the incorporation of the smoothness into the cost function results in reduced decorrelation noise. Freehand palpation elastography shows that the method is more robust to signal decorrelation (caused by scatterer motion in high axial compression and non-axial motions of the probe) compared to the correlation techniques. In-vitro experiments depict that the method is able to detect small hard lesions. The method is also suitable for real time elastography.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Rivaz, Hassan; Boctor, Emad; Fichtinger, Gabor
Ultrasound Speckle Detection Journal Article
In: 0000.
@article{fichtinger0000_33,
title = {Ultrasound Speckle Detection},
author = {Hassan Rivaz and Emad Boctor and Gabor Fichtinger},
url = {https://citeseerx.ist.psu.edu/document?repid=rep1&type=pdf&doi=444ad2671e8c5046cc3ba8e198c532abe14177c8},
abstract = {Speckle detection is essential in many areas of quantitative ultrasound. In this work, speckle is characterized with R= SNR and S= skewness of the amplitude of the ultrasound signal data A. Different powers of A can be used to calculate R and S. Prager et al.[1] proposed a method for finding the optimum power value, which then was further scrutinized [2]. We propose using two different powers of A in R and S, and perform a large number of computer simulations to find these optimal values.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Xu, Sheng; Fichtinger, Gabor
Volumetric Reconstruction from Projected Images Journal Article
In: 0000.
@article{fichtinger0000_34,
title = {Volumetric Reconstruction from Projected Images},
author = {Sheng Xu and Gabor Fichtinger},
url = {https://scholar.google.com/scholar?cluster=1237346242502800244&hl=en&oi=scholarr},
abstract = {[METHODS]
After the silhouettes are drawn in each 2D image, the contours are digitized, filled, and pixelized. When the system goes operational or real patients, the silhouettes will be drawn by the physicians. The resulting binary image contains pixels of value 0 that correspond to the background and pixels of value 1 that correspond to the inside of the silhouette. Fluoroscopic images are typically used at their original resolution, but one may consider re-sampling when speed becomes a critical issue in intra-operative treatment planning. For simplicity, we reconstruct one object at a time.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
After the silhouettes are drawn in each 2D image, the contours are digitized, filled, and pixelized. When the system goes operational or real patients, the silhouettes will be drawn by the physicians. The resulting binary image contains pixels of value 0 that correspond to the background and pixels of value 1 that correspond to the inside of the silhouette. Fluoroscopic images are typically used at their original resolution, but one may consider re-sampling when speed becomes a critical issue in intra-operative treatment planning. For simplicity, we reconstruct one object at a time.
Tokuda, Junichi; Fischer, Gregory S; Csoma, Csaba; Gobbi, David G; Blevins, Jack; Fichtinger, Gabor; Burdette, Clif; Tempany, Clare M; Hata, Nobuhiko
Software/Hardware Integration for MRI-guided Robotic Prostate Intervention using Open IGT Link Journal Article
In: 0000.
@article{fichtinger0000_35,
title = {Software/Hardware Integration for MRI-guided Robotic Prostate Intervention using Open IGT Link},
author = {Junichi Tokuda and Gregory S Fischer and Csaba Csoma and David G Gobbi and Jack Blevins and Gabor Fichtinger and Clif Burdette and Clare M Tempany and Nobuhiko Hata},
url = {https://www.na-mic.org/w/img_auth.php/9/9c/Miccaiws-tokuda.pdf},
abstract = {We propose a software and hardware integration strategy for MRI-guided robotic prostate intervention using an open network communication protocol. In the MRI-guided robotic intervention, the robot, navigation software and MRI scanner communicate one to another via Ethernet to exchange various data eg robot position, commands and MR images. Since the components were developed in the different sites including academic and industrial in our research partnership, establishing interoperativity among them with standardized communication protocol was essential in the collaborative development. As a key technology for the collaboration, we developed a new open network protocol called Open IGT Link. The specification of the protocol is simple enough for the developers to implement the interface to any kinds of software including embedded system, but extensible to transfer any types of data for image guided application in the standardized way. We report our system integration for MRI-guided robotic prostate intervention using Open IGT Link.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Barr, Colton
SlicerChat: Building a Local Chatbot for 3D Slicer Unpublished Forthcoming
Forthcoming.
@unpublished{nokey,
title = {SlicerChat: Building a Local Chatbot for 3D Slicer},
author = {Colton Barr},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/06/SlicerChat_2024.pdf},
keywords = {},
pubstate = {forthcoming},
tppubtype = {unpublished}
}
Yang, Jianming; Hisey, Rebecca; Bierbrier, Joshua; Fichtinger, Gabor; Law, Christine; Holden, Matthew
Frame Selection Methods to Streamline Surgical Video Annotation for Tool Detection Tasks Proceedings Forthcoming
Forthcoming.
@proceedings{nokey,
title = {Frame Selection Methods to Streamline Surgical Video Annotation for Tool Detection Tasks},
author = {Jianming Yang and Rebecca Hisey and Joshua Bierbrier and Gabor Fichtinger and Christine Law and Matthew Holden},
abstract = {—Given the growing volume of surgical data and the
increasing demand for annotation, there is a pressing need to
streamline the annotation process for surgical videos. Previously,
annotation tools for object detection tasks have greatly evolved,
reducing time expense and enhancing ease. There are also many
initial frame selection approaches for Artificial Intelligence
(AI) assisted annotation tasks to further reduce human effort.
However, these methods have rarely been implemented and
reported in the context of surgical datasets, especially in cataract
surgery datasets. The identification of initial frames to annotate
before the use of any tools or algorithms determines annotation
efficiency. Therefore, in this paper, we chose to prioritize the
development of a method for selecting initial frames to facilitate
the subsequent automated annotation process. We propose a
customized initial frames selection method based on feature
clustering and compare it to commonly used temporal selection
methods. In each method, initial frames from cataract surgery
videos are selected to train a surgical tool detection model.
The model assists in the automated annotation process by
predicting bounding boxes for the surgery video objects in the
remaining frames. Evaluations of these methods are based on
how many edits users need to perform when annotating the
initial frames and how many edits users are expected to perform
to correct all predictions. Additionally, the total annotation cost
for each method is compared. Results indicate that on average,
the proposed cluster-based approach requires the fewest total
edits and exhibits the lowest total annotation cost compared
to conventional methods. These findings highlight a promising
direction for developing a complete application, featuring
streamlined AI-assisted annotation processes for surgical tool
detection tasks.},
keywords = {},
pubstate = {forthcoming},
tppubtype = {proceedings}
}
increasing demand for annotation, there is a pressing need to
streamline the annotation process for surgical videos. Previously,
annotation tools for object detection tasks have greatly evolved,
reducing time expense and enhancing ease. There are also many
initial frame selection approaches for Artificial Intelligence
(AI) assisted annotation tasks to further reduce human effort.
However, these methods have rarely been implemented and
reported in the context of surgical datasets, especially in cataract
surgery datasets. The identification of initial frames to annotate
before the use of any tools or algorithms determines annotation
efficiency. Therefore, in this paper, we chose to prioritize the
development of a method for selecting initial frames to facilitate
the subsequent automated annotation process. We propose a
customized initial frames selection method based on feature
clustering and compare it to commonly used temporal selection
methods. In each method, initial frames from cataract surgery
videos are selected to train a surgical tool detection model.
The model assists in the automated annotation process by
predicting bounding boxes for the surgery video objects in the
remaining frames. Evaluations of these methods are based on
how many edits users need to perform when annotating the
initial frames and how many edits users are expected to perform
to correct all predictions. Additionally, the total annotation cost
for each method is compared. Results indicate that on average,
the proposed cluster-based approach requires the fewest total
edits and exhibits the lowest total annotation cost compared
to conventional methods. These findings highlight a promising
direction for developing a complete application, featuring
streamlined AI-assisted annotation processes for surgical tool
detection tasks.
Yang, Jianming; Hisey, Rebecca; Bierbrier, Joshua; Fichtinger, Gabor; Law, Christine; Holden, Matthew
Frame Selection Methods to Streamline Surgical Video Annotation for Tool Detection Tasks Conference Forthcoming
IEEE, Forthcoming.
@conference{nokey,
title = {Frame Selection Methods to Streamline Surgical Video Annotation for Tool Detection Tasks},
author = {Jianming Yang and Rebecca Hisey and Joshua Bierbrier and Gabor Fichtinger and Christine Law and Matthew Holden},
publisher = {IEEE},
abstract = {Given the growing volume of surgical data and the
increasing demand for annotation, there is a pressing need to
streamline the annotation process for surgical videos. Previously,
annotation tools for object detection tasks have greatly evolved,
reducing time expense and enhancing ease. There are also many
initial frame selection approaches for Artificial Intelligence
(AI) assisted annotation tasks to further reduce human effort.
However, these methods have rarely been implemented and
reported in the context of surgical datasets, especially in cataract
surgery datasets. The identification of initial frames to annotate
before the use of any tools or algorithms determines annotation
efficiency. Therefore, in this paper, we chose to prioritize the
development of a method for selecting initial frames to facilitate
the subsequent automated annotation process. We propose a
customized initial frames selection method based on feature
clustering and compare it to commonly used temporal selection
methods. In each method, initial frames from cataract surgery
videos are selected to train a surgical tool detection model.
The model assists in the automated annotation process by
predicting bounding boxes for the surgery video objects in the
remaining frames. Evaluations of these methods are based on
how many edits users need to perform when annotating the
initial frames and how many edits users are expected to perform
to correct all predictions. Additionally, the total annotation cost
for each method is compared. Results indicate that on average,
the proposed cluster-based approach requires the fewest total
edits and exhibits the lowest total annotation cost compared
to conventional methods. These findings highlight a promising
direction for developing a complete application, featuring
streamlined AI-assisted annotation processes for surgical tool
detection tasks.},
keywords = {},
pubstate = {forthcoming},
tppubtype = {conference}
}
increasing demand for annotation, there is a pressing need to
streamline the annotation process for surgical videos. Previously,
annotation tools for object detection tasks have greatly evolved,
reducing time expense and enhancing ease. There are also many
initial frame selection approaches for Artificial Intelligence
(AI) assisted annotation tasks to further reduce human effort.
However, these methods have rarely been implemented and
reported in the context of surgical datasets, especially in cataract
surgery datasets. The identification of initial frames to annotate
before the use of any tools or algorithms determines annotation
efficiency. Therefore, in this paper, we chose to prioritize the
development of a method for selecting initial frames to facilitate
the subsequent automated annotation process. We propose a
customized initial frames selection method based on feature
clustering and compare it to commonly used temporal selection
methods. In each method, initial frames from cataract surgery
videos are selected to train a surgical tool detection model.
The model assists in the automated annotation process by
predicting bounding boxes for the surgery video objects in the
remaining frames. Evaluations of these methods are based on
how many edits users need to perform when annotating the
initial frames and how many edits users are expected to perform
to correct all predictions. Additionally, the total annotation cost
for each method is compared. Results indicate that on average,
the proposed cluster-based approach requires the fewest total
edits and exhibits the lowest total annotation cost compared
to conventional methods. These findings highlight a promising
direction for developing a complete application, featuring
streamlined AI-assisted annotation processes for surgical tool
detection tasks.
Taylor, Russell H; Menciassi, Arianna; Fichtinger, Gabor; Dario, Paolo
Medical Robotics Journal Article
In: Systems, Research, vol. 52, iss. 6Registration, pp. 1208, 0000.
@article{fichtinger0000c,
title = {Medical Robotics},
author = {Russell H Taylor and Arianna Menciassi and Gabor Fichtinger and Paolo Dario},
url = {https://scholar.google.ca/citations?view_op=view_citation&hl=en&user=_KxkI6UAAAAJ&cstart=300&pagesize=100&citation_for_view=_KxkI6UAAAAJ:N4u4nq0IxgcC},
journal = {Systems, Research},
volume = {52},
issue = {6Registration},
pages = {1208},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Sorantin, Erich; Balogh, Emese; Bartroli, Anna Vilanova; Palagyi, Kalman; Nyul, Laszlo G; Loncaric, Sven; Subasic, Marko; Kovacevic, Domagoj; Dudasne-Nagy, Marianna; Mate, Eors; Kari, Bela; Fichtinger, Gabor; DeWeese, Theodore L; Patriciu, Alexandru; Tanacs, Attila; Mazilu, Dumitru; Anderson, James H; Masamune, Ken; Taylor, Russel H; Stoianovici, Dan; Kuba, Attila; Balogh, Emese; Nyul, Laszlo G; Falcao, Alexandre X; Udupa, Jayaram K; Palagyi, Kalman; Sorantin, Erich; Halmai, Csongor; Erdohelyi, Balazs; Palagyi, Kalman; Nyul, Laszlo G; Olle, Krisztian; Geiger, Bernhard; Lindbichler, Franz; Friedrich, Gerhard; Kiesler, Karl
Virtual Dissection of the Colon Journal Article
In: 3D Image Processing–Techniques and Clinical Applications, vol. 21, pp. 263-273, 0000.
@article{fichtinger0000d,
title = {Virtual Dissection of the Colon},
author = {Erich Sorantin and Emese Balogh and Anna Vilanova Bartroli and Kalman Palagyi and Laszlo G Nyul and Sven Loncaric and Marko Subasic and Domagoj Kovacevic and Marianna Dudasne-Nagy and Eors Mate and Bela Kari and Gabor Fichtinger and Theodore L DeWeese and Alexandru Patriciu and Attila Tanacs and Dumitru Mazilu and James H Anderson and Ken Masamune and Russel H Taylor and Dan Stoianovici and Attila Kuba and Emese Balogh and Laszlo G Nyul and Alexandre X Falcao and Jayaram K Udupa and Kalman Palagyi and Erich Sorantin and Csongor Halmai and Balazs Erdohelyi and Kalman Palagyi and Laszlo G Nyul and Krisztian Olle and Bernhard Geiger and Franz Lindbichler and Gerhard Friedrich and Karl Kiesler},
url = {https://scholar.google.com/scholar?cluster=955860295717033248&hl=en&oi=scholarr},
journal = {3D Image Processing–Techniques and Clinical Applications},
volume = {21},
pages = {263-273},
publisher = {Springer Verlag},
abstract = {In this paper, we present a new mathematical method that synthesizes normal data sets for quantification of regional myocardium perfusion. In clinical practice, regional myocardial perfusion is often measured with a gamma camera and quantified via circumferential profile analysis. Normal reference profile data is used to increase the accuracy of the clinical interpretations. Our goal is to create reference data from an existing set of archived studies. An iterative mathematical method, based on two statistical hypotheses, was used to generate the study set instead of collecting normal examinations from a healthy population. Clinical validation is based on interpretations by six independent observers. Results of evaluation with synthesized normal data and its validation are presented.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fichtinger, Gabor; Jain, Ameet; Mustufa, Tabish; Wyrobek, Keenan; Chirikijian, Greg; Zhou, Yu; Burdette, Everette C
Image registration of multiple medical imaging modalities using a multiple degree-of-freedom-encoded fiducial device Journal Article
In: 0000.
@article{fichtinger0000b,
title = {Image registration of multiple medical imaging modalities using a multiple degree-of-freedom-encoded fiducial device},
author = {Gabor Fichtinger and Ameet Jain and Tabish Mustufa and Keenan Wyrobek and Greg Chirikijian and Yu Zhou and Everette C Burdette
},
url = {https://scholar.google.ca/citations?view_op=view_citation&hl=en&user=_KxkI6UAAAAJ&cstart=20&pagesize=80&citation_for_view=_KxkI6UAAAAJ:4fKUyHm3Qg0C},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ungi, Tamas; Moult, Eric; Schwab, Joseph H.; Fichtinger, Gabor
Ultrasound Snapshots in Percutaneous Pedicle Screw Placement Navigation: A Feasibility Study Journal Article
In: Clinical Orthopaedics and Related Research, 0000.
@article{Ungi2013e,
title = {Ultrasound Snapshots in Percutaneous Pedicle Screw Placement Navigation: A Feasibility Study},
author = {Tamas Ungi and Eric Moult and Joseph H. Schwab and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ungi2013e_WebVersion.pdf},
journal = {Clinical Orthopaedics and Related Research},
keywords = {},
pubstate = {published},
tppubtype = {article}
}