Pompeu-Robinson, Alexandra M.; Gray, James; Marble, Joshua; Peikari, Hamed; Hall, Jena; U-Thainual, Paweena; Aboofazeli, Mohammad; Lasso, Andras; Fichtinger, Gabor
Validation platform for ultrasound-based monitoring of thermal ablation Conference
SPIE - Medical Imaging, San Diego, 2010.
@conference{Pompeu-Robinson2010,
title = {Validation platform for ultrasound-based monitoring of thermal ablation},
author = {Alexandra M. Pompeu-Robinson and James Gray and Joshua Marble and Hamed Peikari and Jena Hall and Paweena U-Thainual and Mohammad Aboofazeli and Andras Lasso and Gabor Fichtinger},
doi = {10.1117/12.844053},
year = {2010},
date = {2010-01-01},
booktitle = {SPIE - Medical Imaging},
address = {San Diego},
abstract = {<p>PURPOSE: A ground-truth validation platform was developed to provide spatial correlation between ultrasound (US), temperature measurements and histopathology images to validate US based thermal ablation monitoring methods. METHOD: The test-bed apparatus consists of a container box with integrated fiducial lines. Tissue samples are suspended within the box using agar gel as the fixation medium. Following US imaging, the gel block is sliced and pathology images are acquired. Interactive software segments the fiducials as well as structures of interest in the pathology and US images. The software reconstructs the regions in 3D space and performs analysis and comparison of the features identified from both imaging modalities. RESULTS: The apparatus and software were constructed to meet technical requirements. Tissue samples were contoured, reconstructed and registered in the common coordinate system of fiducials. There was agreement between the sample shapes, but systematic shift of several millimeters was found between histopathology and US. This indicates that during pathology slicing shear forces tend to dislocate the fiducial lines. Softer fiducial lines and harder gel material can eliminate this problem. CONCLUSION: Viability of concept was presented. Despite our straightforward approach, further experimental work is required to optimize all materials and customize software.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Xu, Helen; Lasso, Andras; Vikal, Siddharth; Guion, Peter; Krieger, Axel; Kaushal, Aradhana; Whitcomb, Louis L; Fichtinger, Gabor
Clinical accuracy of robot-assisted prostate biopsy in closed MRI scanner Journal Article
In: The Hamlyn Symposium on Medical Robotics, London, pp. 7-8, 2010.
@article{fichtinger2010u,
title = {Clinical accuracy of robot-assisted prostate biopsy in closed MRI scanner},
author = {Helen Xu and Andras Lasso and Siddharth Vikal and Peter Guion and Axel Krieger and Aradhana Kaushal and Louis L Whitcomb and Gabor Fichtinger},
url = {http://perk.cs.queensu.ca/sites/perk.cs.queensu.ca/files/Xu2010b.pdf},
year = {2010},
date = {2010-01-01},
journal = {The Hamlyn Symposium on Medical Robotics, London},
pages = {7-8},
abstract = {Prostate cancer, affecting one in every six men, remains the number one cancer-related death in men [1]. In the pursuit of more accurate biopsy, Krieger and Susil [2, 3] developed robotic assistance under MR image guidance. To date, their system has been used in 200+ biopsies at the US National Cancer Institute. A limited validation study was presented earlier [4]. Here we report a more comprehensive retrospective evaluation of the Krieger-Susil biopsy system. We analyze a larger set of patient data in an improved validation workflow and produce a formal statistical analysis and draw strong conclusions.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fallavollita, Pascal; Burdette, Clif; Song, Danny; Abolmaesumi, Purang; Fichtinger, Gabor
C-arm pose estimation in prostate brachytherapy by registration to ultrasound Journal Article
In: pp. 311-318, 2010.
@article{fichtinger2010v,
title = {C-arm pose estimation in prostate brachytherapy by registration to ultrasound},
author = {Pascal Fallavollita and Clif Burdette and Danny Song and Purang Abolmaesumi and Gabor Fichtinger},
url = {https://link.springer.com/chapter/10.1007/978-3-642-15711-0_39},
year = {2010},
date = {2010-01-01},
pages = {311-318},
publisher = {Springer Berlin Heidelberg},
abstract = {In prostate brachytherapy, transrectal ultrasound (TRUS) is used to visualize the anatomy, while implanted seeds can be seen in C-arm fluoroscopy. Intra-operative dosimetry optimization requires reconstruction of the implanted seeds from multiple C-arm fluoroscopy images, which in turn requires estimation of the C-arm poses. We estimate the pose of the C-arm by two-stage registration between the 2D fluoroscopy images to a 3D TRUS volume. As single-view 2D/3D registration tends to yield depth error, we first estimate the depth from multiple 2D fluoro images and input this to a single-view 2D/3D registration. A commercial phantom was implanted with seeds and imaged with TRUS and CT. Ground-truth registration was established between the two by radiographic fiducials. Synthetic ground-truth fluoro images were created from the CT volume and registered to the 3D TRUS. The average rotation and …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Tadayyon, Hadi; Vikal, Siddharth; Gill, Sean; Lasso, Andras; Fichtinger, Gabor
MRI-guided prostate motion tracking by means of multislice-to-volume registration Journal Article
In: vol. 7625, pp. 904-911, 2010.
@article{fichtinger2010w,
title = {MRI-guided prostate motion tracking by means of multislice-to-volume registration},
author = {Hadi Tadayyon and Siddharth Vikal and Sean Gill and Andras Lasso and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/7625/76252V/MRI-GUIDED-prostate-motion-tracking-by-means-of-multislice-to/10.1117/12.844454.short},
year = {2010},
date = {2010-01-01},
volume = {7625},
pages = {904-911},
publisher = {SPIE},
abstract = {We developed an algorithm for tracking prostate motion during MRI-guided prostatic needle placement, with the primary application in prostate biopsy. Our algorithm has been tested on simulated patient and phantom data. The algorithm features a robust automatic restart and a 12-core biopsy error validation scheme. Simulation tests were performed on four patient MRI pre-operative volumes. Three orthogonal slices were extracted from the pre-operative volume to simulate the intra-operative volume and a volume of interest was defined to isolate the prostate. Phantom tests used six datasets, each representing the phantom at a known perturbed position. These volumes were registered to their corresponding reference volume (the phantom at its home position). Convergence tests on the phantom data showed that the algorithm demonstrated accurate results at 100% confidence level for initial misalignments of less …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Song, S; Cho, Nathan Bongjoon; Iordachita, Iulian; Fischer, Gregory Scott; Tokuda, Junichi; Hata, Nobuhito; Fichtinger, Gabor; Tempany, Clare
Development of a Pneumatic Robot for MRI-guided Transperineal Prostate Intervention Journal Article
In: 18th Scientific Meeting and Exhibition of the International Society of Magnetic Resonance in Medicine-ISMRM, 2010.
@article{fichtinger2010x,
title = {Development of a Pneumatic Robot for MRI-guided Transperineal Prostate Intervention},
author = {S Song and Nathan Bongjoon Cho and Iulian Iordachita and Gregory Scott Fischer and Junichi Tokuda and Nobuhito Hata and Gabor Fichtinger and Clare Tempany},
url = {https://www.academia.edu/download/41205898/546f9ae10cf2d67fc0311977.pdf20160115-19908-re80hc.pdf},
year = {2010},
date = {2010-01-01},
journal = {18th Scientific Meeting and Exhibition of the International Society of Magnetic Resonance in Medicine-ISMRM},
abstract = {Transrectal ultrasound (TRUS) guidance is the most commonly used navigation method for the biopsy and brachytherapy. However, TRUS-guided biopsy has a poor cancer detection rate. In order for greater detection, magnetic resonance imaging (MRI) has been sought for the prostate intervention. Due to strong magnetic field that requires MRI-compatibility of surgical devices and physical limitation of in-bore access and workspace, closed-bore high-field MRI has not been widely adopted for prostate interventions. Recently, a number of MRI-compatible pneumatically actuated robotic systems for in-bore operation have been developed [1]. This study introduces a new pneumatic robot development by understanding advantages and disadvantages of such robots, and investigating new strategic and engineering approaches for enhanced controllability and optimized workflow towards clinical implementation.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gobbi, DG; Mousavi, P; Campigotto, A; AWL, Dickinson; Abolmaesumi, P
Visual programming of VTK pipelines in Simulink Journal Article
In: The VTK Journal, 2010.
@article{fichtinger2010y,
title = {Visual programming of VTK pipelines in Simulink},
author = {DG Gobbi and P Mousavi and A Campigotto and Dickinson AWL and P Abolmaesumi},
url = {https://scholar.google.com/scholar?cluster=8929457771243975168&hl=en&oi=scholarr},
year = {2010},
date = {2010-01-01},
journal = {The VTK Journal},
abstract = {We have created a wrapper package named SimVTK that allows VTK, and third-party classes derived from VTK classes, to be seamlessly integrated with MATLAB R G’s Simulink R G interface. Our package generates a loadable Simulink module for each VTK class, which is then represented as a “block” on the Simulink canvas, and can be connected with other blocks to form a pipeline. Each block can be doubleclicked to bring up a dialog box that allows introspection and control of the VTK class. After a VTK pipeline has been built, it can be run interactively from within Simulink. The outputs of the pipeline can be displayed in an interactive render window, written to a disk file, or exported to a MATLAB variable. Within Simulink, the VTK pipeline can also be connected to the ITK pipeline through the use of our SimITK package. We generate the VTK-Simulink wrapper automatically though the use of CMake build scripts …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Xu, H; Lasso, A; Vikal, S; Guion, P; Krieger, A; Kaushal, A; Whitcomb, L; Fichtinger, G
SU‐GG‐I‐119: MRI‐Guided Transrectal Robotic Prostate Biopsy Validation Journal Article
In: Medical Physics, vol. 37, iss. 6Part4, pp. 3128-3129, 2010.
@article{fichtinger2010z,
title = {SU‐GG‐I‐119: MRI‐Guided Transrectal Robotic Prostate Biopsy Validation},
author = {H Xu and A Lasso and S Vikal and P Guion and A Krieger and A Kaushal and L Whitcomb and G Fichtinger},
url = {https://aapm.onlinelibrary.wiley.com/doi/abs/10.1118/1.3468153},
year = {2010},
date = {2010-01-01},
journal = {Medical Physics},
volume = {37},
issue = {6Part4},
pages = {3128-3129},
publisher = {American Association of Physicists in Medicine},
abstract = {PURPOSE
We report a quantitative evaluation of the clinical accuracy of an MRI‐guided robotic transrectal prostate biopsy system that has been in use for over five years at the U.S. National Cancer Institute. Through this retrospective study we also expect to gain insight to the nature and interplay of error factors in MRI‐guided transrectal robotic prostate biopsy.
METHODS
Pre‐ and post‐insertion MR images show that while the whole group of relevant organs moves deformably, the prostate, rectum, and pubic bone shows little deformation according to Karnik et al. (SPIE Medical Imaging, vol. 7625, 2010.) Hence our validation framework entails two‐stage rigid volume registration using mutual information between the pre‐and post insertion images, using the Insight Toolkit. Global registration over the whole scene capture coupled gross prostate motion. Next, to account for residual decoupled prostate motion we re …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
We report a quantitative evaluation of the clinical accuracy of an MRI‐guided robotic transrectal prostate biopsy system that has been in use for over five years at the U.S. National Cancer Institute. Through this retrospective study we also expect to gain insight to the nature and interplay of error factors in MRI‐guided transrectal robotic prostate biopsy.
METHODS
Pre‐ and post‐insertion MR images show that while the whole group of relevant organs moves deformably, the prostate, rectum, and pubic bone shows little deformation according to Karnik et al. (SPIE Medical Imaging, vol. 7625, 2010.) Hence our validation framework entails two‐stage rigid volume registration using mutual information between the pre‐and post insertion images, using the Insight Toolkit. Global registration over the whole scene capture coupled gross prostate motion. Next, to account for residual decoupled prostate motion we re …
Fichtinger, Gabor
MO‐B‐203‐01: Robotic Brachytherapy‐Overview of Robotic Brachytherapy Approaches and Synergistic Applications Journal Article
In: vol. 37, iss. 6Part15, pp. 3336-3336, 2010.
@article{fichtinger2010_27,
title = {MO‐B‐203‐01: Robotic Brachytherapy‐Overview of Robotic Brachytherapy Approaches and Synergistic Applications},
author = {Gabor Fichtinger},
url = {https://aapm.onlinelibrary.wiley.com/doi/abs/10.1118/1.3469042},
year = {2010},
date = {2010-01-01},
urldate = {2010-01-01},
volume = {37},
issue = {6Part15},
pages = {3336-3336},
publisher = {American Association of Physicists in Medicine},
abstract = {Image‐guided robotics continues to make advances in assisting brachytherapy procedures and synergistic clinical applications. Each anatomical site, access route image guidance modality has its own unique challenges for both the physician and technology developer. Proper understanding of these is key importance for further advances in the field.
The lecture will provide a brief technological history and review the state art. We will examine relevant robotic systems, including available robotic systems for brachytherapy, from the perspectives of clinical features, image guidance modalities, and mechatronic techniques. We will identify their innovative and positive features, as well as limitations, challenges, and liabilities.
Based on progresses, successes and sometimes even failures, we will draw conclusions and offer recommendations for future consideration.
Learning Objectives:
1. Short historical background …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
The lecture will provide a brief technological history and review the state art. We will examine relevant robotic systems, including available robotic systems for brachytherapy, from the perspectives of clinical features, image guidance modalities, and mechatronic techniques. We will identify their innovative and positive features, as well as limitations, challenges, and liabilities.
Based on progresses, successes and sometimes even failures, we will draw conclusions and offer recommendations for future consideration.
Learning Objectives:
1. Short historical background …
Cowan, Noah J; Goldberg, Ken; Chirikjian, Gregory S; Fichtinger, Gabor; Alterovitz, Ron; Reed, Kyle B; Kallem, Vinutha; Park, Wooram; Misra, Sarthak; Okamura, Allison M
Robotic needle steering: Design, modeling, planning, and image guidance Journal Article
In: pp. 557-582, 2010.
@article{fichtinger2010,
title = {Robotic needle steering: Design, modeling, planning, and image guidance},
author = {Noah J Cowan and Ken Goldberg and Gregory S Chirikjian and Gabor Fichtinger and Ron Alterovitz and Kyle B Reed and Vinutha Kallem and Wooram Park and Sarthak Misra and Allison M Okamura},
url = {https://link.springer.com/chapter/10.1007/978-1-4419-1126-1_23},
year = {2010},
date = {2010-01-01},
pages = {557-582},
publisher = {Springer US},
abstract = {This chapter describes how advances in needle design, modeling, planning, and image guidance make it possible to steer flexible needles from outside the body to reach specified anatomical targets not accessible using traditional needle insertion methods. Steering can be achieved using a variety of mechanisms, including tip-based steering, lateral manipulation, and applying forces to the tissue as the needle is inserted. Models of these steering mechanisms can predict needle trajectory based on steering commands, motivating new preoperative path planning algorithms. These planning algorithms can be integrated with emerging needle imaging technology to achieve intraoperative closed-loop guidance and control of steerable needles.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Song, Sang-Eun; Cho, Nathan B; Fischer, Gregory; Hata, Nobuhito; Tempany, Clare; Fichtinger, Gabor; Iordachita, Iulian
Development of a pneumatic robot for MRI-guided transperineal prostate biopsy and brachytherapy: New approaches Journal Article
In: pp. 2580-2585, 2010.
@article{fichtinger2010b,
title = {Development of a pneumatic robot for MRI-guided transperineal prostate biopsy and brachytherapy: New approaches},
author = {Sang-Eun Song and Nathan B Cho and Gregory Fischer and Nobuhito Hata and Clare Tempany and Gabor Fichtinger and Iulian Iordachita},
url = {https://ieeexplore.ieee.org/abstract/document/5509710/},
year = {2010},
date = {2010-01-01},
pages = {2580-2585},
publisher = {IEEE},
abstract = {Magnetic Resonance Imaging (MRI) guided prostate biopsy and brachytherapy has been introduced in order to enhance the cancer detection and treatment. For the accurate needle positioning, a number of robotic assistants have been developed. However, problems exist due to the strong magnetic field and limited workspace. Pneumatically actuated robots have shown the minimum distraction in the environment but the confined workspace limits optimal robot design and thus controllability is often poor. To overcome the problem, a simple external damping mechanism using timing belts was sought and a 1-DOF mechanism test result indicated sufficient positioning accuracy. Based on the damping mechanism and modular system design approach, a new workspace-optimized 4-DOF parallel robot was developed for the MRI-guided prostate biopsy and brachytherapy. A preliminary evaluation of the robot was …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ahmidi, Narges; Hager, Gregory D; Ishii, Lisa; Fichtinger, Gabor; Gallia, Gary L; Ishii, Masaru
Surgical task and skill classification from eye tracking and tool motion in minimally invasive surgery Journal Article
In: pp. 295-302, 2010.
@article{fichtinger2010c,
title = {Surgical task and skill classification from eye tracking and tool motion in minimally invasive surgery},
author = {Narges Ahmidi and Gregory D Hager and Lisa Ishii and Gabor Fichtinger and Gary L Gallia and Masaru Ishii},
url = {https://link.springer.com/chapter/10.1007/978-3-642-15711-0_37},
year = {2010},
date = {2010-01-01},
pages = {295-302},
publisher = {Springer Berlin Heidelberg},
abstract = {In the context of minimally invasive surgery, clinical risks are highly associated with surgeons’ skill in manipulating surgical tools and their knowledge of the closed anatomy. A quantitative surgical skill assessment can reduce faulty procedures and prevent some surgical risks. In this paper focusing on sinus surgery, we present two methods to identify both skill level and task type by recording motion data of surgical tools as well as the surgeon’s eye gaze location on the screen. We generate a total of 14 discrete Hidden Markov Models for seven surgical tasks at both expert and novice levels using a repeated k-fold evaluation method. The dataset contains 95 expert and 139 novice trials of surgery over a cadaver. The results reveal two insights: eye-gaze data contains skill related structures; and adding this info to the surgical tool motion data improves skill assessment by 13.2% and 5.3% for expert and …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Tokuda, Junichi; Fischer, Gregory S; DiMaio, Simon P; Gobbi, David G; Csoma, Csaba; Mewes, Philip W; Fichtinger, Gabor; Tempany, Clare M; Hata, Nobuhiko
Integrated navigation and control software system for MRI-guided robotic prostate interventions Journal Article
In: Computerized Medical Imaging and Graphics, vol. 34, iss. 1, pp. 3-8, 2010.
@article{fichtinger2010d,
title = {Integrated navigation and control software system for MRI-guided robotic prostate interventions},
author = {Junichi Tokuda and Gregory S Fischer and Simon P DiMaio and David G Gobbi and Csaba Csoma and Philip W Mewes and Gabor Fichtinger and Clare M Tempany and Nobuhiko Hata},
url = {https://www.sciencedirect.com/science/article/pii/S0895611109000901},
year = {2010},
date = {2010-01-01},
journal = {Computerized Medical Imaging and Graphics},
volume = {34},
issue = {1},
pages = {3-8},
publisher = {Pergamon},
abstract = {A software system to provide intuitive navigation for MRI-guided robotic transperineal prostate therapy is presented. In the system, the robot control unit, the MRI scanner, and the open-source navigation software are connected together via Ethernet to exchange commands, coordinates, and images using an open network communication protocol, OpenIGTLink. The system has six states called “workphases” that provide the necessary synchronization of all components during each stage of the clinical workflow, and the user interface guides the operator linearly through these workphases. On top of this framework, the software provides the following features for needle guidance: interactive target planning; 3D image visualization with current needle position; treatment monitoring through real-time MR images of needle trajectories in the prostate. These features are supported by calibration of robot and image …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gao, Yi; Sandhu, Romeil; Fichtinger, Gabor; Tannenbaum, Allen Robert
A coupled global registration and segmentation framework with application to magnetic resonance prostate imagery Journal Article
In: IEEE transactions on medical imaging, vol. 29, iss. 10, pp. 1781-1794, 2010.
@article{fichtinger2010e,
title = {A coupled global registration and segmentation framework with application to magnetic resonance prostate imagery},
author = {Yi Gao and Romeil Sandhu and Gabor Fichtinger and Allen Robert Tannenbaum},
url = {https://ieeexplore.ieee.org/abstract/document/5482197/},
year = {2010},
date = {2010-01-01},
journal = {IEEE transactions on medical imaging},
volume = {29},
issue = {10},
pages = {1781-1794},
publisher = {IEEE},
abstract = {Extracting the prostate from magnetic resonance (MR) imagery is a challenging and important task for medical image analysis and surgical planning. We present in this work a unified shape-based framework to extract the prostate from MR prostate imagery. In many cases, shape-based segmentation is a two-part problem. First, one must properly align a set of training shapes such that any variation in shape is not due to pose. Then segmentation can be performed under the constraint of the learnt shape. However, the general registration task of prostate shapes becomes increasingly difficult due to the large variations in pose and shape in the training sets, and is not readily handled through existing techniques. Thus, the contributions of this paper are twofold. We first explicitly address the registration problem by representing the shapes of a training set as point clouds. In doing so, we are able to exploit the more …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Krieger, Axel; Iordachita, Iulian; Song, Sang-Eun; Cho, Nathan B; Guion, Peter; Fichtinger, Gabor; Whitcomb, Louis L
Development and preliminary evaluation of an actuated MRI-compatible robotic device for MRI-guided prostate intervention Journal Article
In: pp. 1066-1073, 2010.
@article{fichtinger2010f,
title = {Development and preliminary evaluation of an actuated MRI-compatible robotic device for MRI-guided prostate intervention},
author = {Axel Krieger and Iulian Iordachita and Sang-Eun Song and Nathan B Cho and Peter Guion and Gabor Fichtinger and Louis L Whitcomb},
url = {https://ieeexplore.ieee.org/abstract/document/5509727/},
year = {2010},
date = {2010-01-01},
pages = {1066-1073},
publisher = {IEEE},
abstract = {This paper reports the design, development, and magnetic resonance imaging (MRI) compatibility evaluation of an actuated transrectal prostate robot for MRI-guided intervention. The robot employs an actuated needle guide with the goal of reducing interventional procedure times and increasing needle placement accuracy. The design of the robot, employing piezo-ceramic-motor actuated needle guide positioning and manual needle insertion, is reported. Results of a MRI compatibility study show no reduction of MRI image signal-to-noise-ratio (SNR) with the motors disabled and a 40% to 60% reduction in SNR with the motors enabled. The addition of radio-frequency (RF) shielding is shown to significantly reduce image SNR degradation due to the presence of the robotic device.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Khallaghi, Siavash; Mousavi, Parvin; Gong, Ren Hui; Gill, Sean; Boisvert, Jonathan; Fichtinger, Gabor; Pichora, David; Borschneck, Dan; Abolmaesumi, Purang
Registration of a statistical shape model of the lumbar spine to 3D ultrasound images Journal Article
In: pp. 68-75, 2010.
@article{fichtinger2010g,
title = {Registration of a statistical shape model of the lumbar spine to 3D ultrasound images},
author = {Siavash Khallaghi and Parvin Mousavi and Ren Hui Gong and Sean Gill and Jonathan Boisvert and Gabor Fichtinger and David Pichora and Dan Borschneck and Purang Abolmaesumi},
url = {https://link.springer.com/chapter/10.1007/978-3-642-15745-5_9},
year = {2010},
date = {2010-01-01},
pages = {68-75},
publisher = {Springer Berlin Heidelberg},
abstract = {Motivation: Spinal needle injections are technically demanding procedures. The use of ultrasound image guidance without prior CT and MR imagery promises to improve the efficacy and safety of these procedures in an affordable manner. Methodology: We propose to create a statistical shape model of the lumbar spine and warp this atlas to patient-specific ultrasound images during the needle placement procedure. From CT image volumes of 35 patients, statistical shape model of the L3 vertebra is built, including mean shape and main modes of variation. This shape model is registered to the ultrasound data by simultaneously optimizing the parameters of the model and its relative pose. Ground-truth data was established by printing 3D anatomical models of 3 patients using a rapid prototyping. CT and ultrasound data of these models were registered using fiducial markers. Results: Pairwise …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Xu, Helen; Lasso, Andras; Vikal, Siddharth; Guion, Peter; Krieger, Axel; Kaushal, Aradhana; Whitcomb, Louis L; Fichtinger, Gabor
MRI-guided robotic prostate biopsy: a clinical accuracy validation Journal Article
In: pp. 383-391, 2010.
@article{fichtinger2010h,
title = {MRI-guided robotic prostate biopsy: a clinical accuracy validation},
author = {Helen Xu and Andras Lasso and Siddharth Vikal and Peter Guion and Axel Krieger and Aradhana Kaushal and Louis L Whitcomb and Gabor Fichtinger},
url = {https://link.springer.com/chapter/10.1007/978-3-642-15711-0_48},
year = {2010},
date = {2010-01-01},
pages = {383-391},
publisher = {Springer Berlin Heidelberg},
abstract = {Prostate cancer is a major health threat for men. For over five years, the U.S. National Cancer Institute has performed prostate biopsies with a magnetic resonance imaging (MRI)-guided robotic system. Purpose: A retrospective evaluation methodology and analysis of the clinical accuracy of this system is reported. Methods: Using the pre and post-needle insertion image volumes, a registration algorithm that contains a two-step rigid registration followed by a deformable refinement was developed to capture prostate dislocation during the procedure. The method was validated by using three-dimensional contour overlays of the segmented prostates and the registrations were accurate up to 2 mm. Results: It was found that tissue deformation was less of a factor than organ displacement. Out of the 82 biopsies from 21 patients, the mean target displacement, needle placement error, and clinical biopsy error was …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Vikal, Siddharth; Paweena, U; Carrino, John A; Iordachita, Iulian; Fischer, Gregory S; Fichtinger, Gabor
Perk Station—Percutaneous surgery training and performance measurement platform Journal Article
In: Computerized Medical Imaging and Graphics, vol. 34, iss. 1, pp. 19-32, 2010.
@article{fichtinger2010i,
title = {Perk Station—Percutaneous surgery training and performance measurement platform},
author = {Siddharth Vikal and U Paweena and John A Carrino and Iulian Iordachita and Gregory S Fischer and Gabor Fichtinger},
url = {https://www.sciencedirect.com/science/article/pii/S0895611109000603},
year = {2010},
date = {2010-01-01},
journal = {Computerized Medical Imaging and Graphics},
volume = {34},
issue = {1},
pages = {19-32},
publisher = {Pergamon},
abstract = {MOTIVATION
Image-guided percutaneous (through the skin) needle-based surgery has become part of routine clinical practice in performing procedures such as biopsies, injections and therapeutic implants. A novice physician typically performs needle interventions under the supervision of a senior physician; a slow and inherently subjective training process that lacks objective, quantitative assessment of the surgical skill and performance. Shortening the learning curve and increasing procedural consistency are important factors in assuring high-quality medical care.
METHODS
This paper describes a laboratory validation system, called Perk Station, for standardized training and performance measurement under different assistance techniques for needle-based surgical guidance systems. The initial goal of the Perk Station is to assess and compare different techniques: 2D image overlay, biplane laser guide, laser …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Image-guided percutaneous (through the skin) needle-based surgery has become part of routine clinical practice in performing procedures such as biopsies, injections and therapeutic implants. A novice physician typically performs needle interventions under the supervision of a senior physician; a slow and inherently subjective training process that lacks objective, quantitative assessment of the surgical skill and performance. Shortening the learning curve and increasing procedural consistency are important factors in assuring high-quality medical care.
METHODS
This paper describes a laboratory validation system, called Perk Station, for standardized training and performance measurement under different assistance techniques for needle-based surgical guidance systems. The initial goal of the Perk Station is to assess and compare different techniques: 2D image overlay, biplane laser guide, laser …
Lee, Junghoon; Labat, Christian; Jain, Ameet K; Song, Danny Y; Burdette, Everette Clif; Fichtinger, Gabor; Prince, Jerry L
REDMAPS: reduced-dimensionality matching for prostate brachytherapy seed reconstruction Journal Article
In: IEEE transactions on medical imaging, vol. 30, iss. 1, pp. 38-51, 2010.
@article{fichtinger2010j,
title = {REDMAPS: reduced-dimensionality matching for prostate brachytherapy seed reconstruction},
author = {Junghoon Lee and Christian Labat and Ameet K Jain and Danny Y Song and Everette Clif Burdette and Gabor Fichtinger and Jerry L Prince},
url = {https://ieeexplore.ieee.org/abstract/document/5512632/},
year = {2010},
date = {2010-01-01},
journal = {IEEE transactions on medical imaging},
volume = {30},
issue = {1},
pages = {38-51},
publisher = {IEEE},
abstract = {The success of prostate brachytherapy critically depends on delivering adequate dose to the prostate gland. Intraoperative localization of the implanted seeds provides potential for dose evaluation and optimization during therapy. A reduced-dimensionality matching algorithm for prostate brachytherapy seed reconstruction (REDMAPS) that uses multiple X-ray fluoroscopy images obtained from different poses is proposed. The seed reconstruction problem is formulated as a combinatorial optimization problem, and REDMAPS finds a solution in a clinically acceptable amount of time using dimensionality reduction to create a smaller space of possible solutions. Dimensionality reduction is possible since the optimal solution has approximately zero cost when the poses of the acquired images are known to be within a small error. REDMAPS is also formulated to address the “hidden seed problem” in which seeds overlap …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fallavollita, P; Aghaloo, Z Karim; Burdette, EC; Song, DY; Abolmaesumi, P; Fichtinger, G
Registration between ultrasound and fluoroscopy or CT in prostate brachytherapy Journal Article
In: Medical physics, vol. 37, iss. 6Part1, pp. 2749-2760, 2010.
@article{fichtinger2010k,
title = {Registration between ultrasound and fluoroscopy or CT in prostate brachytherapy},
author = {P Fallavollita and Z Karim Aghaloo and EC Burdette and DY Song and P Abolmaesumi and G Fichtinger},
url = {https://aapm.onlinelibrary.wiley.com/doi/abs/10.1118/1.3416937},
year = {2010},
date = {2010-01-01},
journal = {Medical physics},
volume = {37},
issue = {6Part1},
pages = {2749-2760},
publisher = {American Association of Physicists in Medicine},
abstract = {Purpose
In prostate brachytherapy, transrectal ultrasound (TRUS) is used to visualize the anatomy, while implanted seeds can be visualized by fluoroscopy. Intraoperative dosimetry optimization is possible using a combination of TRUS and fluoroscopy, but requires localization of the fluoroscopy‐derived seed cloud, relative to the anatomy as seen on TRUS. The authors propose to develop a method of registration of TRUS images and the implants reconstructed from fluoroscopy.
Methods
A phantom was implanted with 48 seeds then imaged with TRUS and CT. Seeds were reconstructed from CT yielding a cloud of seeds. Fiducial‐based ground‐truth registration was established between the TRUS and CT. TRUS images are filtered, compounded, and registered to the reconstructed implants by using an intensity‐based metric. The authors evaluated a volume‐to‐volume and point‐to‐volume registration scheme …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
In prostate brachytherapy, transrectal ultrasound (TRUS) is used to visualize the anatomy, while implanted seeds can be visualized by fluoroscopy. Intraoperative dosimetry optimization is possible using a combination of TRUS and fluoroscopy, but requires localization of the fluoroscopy‐derived seed cloud, relative to the anatomy as seen on TRUS. The authors propose to develop a method of registration of TRUS images and the implants reconstructed from fluoroscopy.
Methods
A phantom was implanted with 48 seeds then imaged with TRUS and CT. Seeds were reconstructed from CT yielding a cloud of seeds. Fiducial‐based ground‐truth registration was established between the TRUS and CT. TRUS images are filtered, compounded, and registered to the reconstructed implants by using an intensity‐based metric. The authors evaluated a volume‐to‐volume and point‐to‐volume registration scheme …
Chen, Elvis CS; Mousavi, Parvin; Gill, Sean; Fichtinger, Gabor; Abolmaesumi, Purang
Ultrasound guided spine needle insertion Journal Article
In: vol. 7625, pp. 1028-1035, 2010.
@article{fichtinger2010l,
title = {Ultrasound guided spine needle insertion},
author = {Elvis CS Chen and Parvin Mousavi and Sean Gill and Gabor Fichtinger and Purang Abolmaesumi},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/7625/762538/Ultrasound-guided-spine-needle-insertion/10.1117/12.843716.short},
year = {2010},
date = {2010-01-01},
volume = {7625},
pages = {1028-1035},
publisher = {SPIE},
abstract = {An ultrasound (US) guided, CT augmented, spine needle insertion navigational system is introduced. The system consists of an electromagnetic (EM) sensor, an US machine, and a preoperative CT volume of the patient anatomy. Three-dimensional (3D) US volume is reconstructed intraoperatively from a set of two-dimensional (2D) freehand US slices, and is coregistered with the preoperative CT. This allows the preoperative CT volume to be used in the intraoperative clinical coordinate. The spatial relationship between the patient anatomy, surgical tools, and the US transducer are tracked using the EM sensor, and are displayed with respect to the CT volume. The pose of the US transducer is used to interpolate the CT volume, providing the physician with a 2D "x-ray vision" to guide the needle insertion. Many of the system software components are GPU-accelerated, allowing real-time performance of the guidance …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}