Zachary Baum
Zachary (Zac) started in the Perk Lab as 2nd-year Biomedical Computing undergraduate student and completed his undergraduate degree (2017) and MSc (2019) in the Perk Lab.
His research focuses are in Computer-Assisted Surgery and Guided Interventions, and he specializes in the use of augmented reality (AR) and holographic visualizations for surgical training and planning.
Ungi, Tamas; Greer, Hastings; Sunderland, Kyle R.; Wu, Victoria; Baum, Zachary M C; Schlenger, Christopher; Oetgen, Matthew; Cleary, Kevin; Aylward, Stephen; Fichtinger, Gabor
Automatic spine ultrasound segmentation for scoliosis visualization and measurement Journal Article
In: IEEE Transactions on Biomedical Engineering, vol. 67, no. 11, pp. 3234 - 3241, 2020.
@article{Ungi2020,
title = {Automatic spine ultrasound segmentation for scoliosis visualization and measurement},
author = {Tamas Ungi and Hastings Greer and Kyle R. Sunderland and Victoria Wu and Zachary M C Baum and Christopher Schlenger and Matthew Oetgen and Kevin Cleary and Stephen Aylward and Gabor Fichtinger},
url = {https://ieeexplore.ieee.org/document/9034149
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ungi2020.pdf},
doi = {10.1109/TBME.2020.2980540},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
journal = {IEEE Transactions on Biomedical Engineering},
volume = {67},
number = {11},
pages = {3234 - 3241},
abstract = {<p>\emph{Objective:} Integrate tracked ultrasound and AI methods to provide a safer and more accessible alternative to X-ray for scoliosis measurement. We propose automatic ultrasound segmentation for 3-dimensional spine visualization and scoliosis measurement to address difficulties in using ultrasound for spine imaging. \emph{Methods:} We trained a convolutional neural network for spine segmentation on ultrasound scans using data from eight healthy adult volunteers. We tested the trained network on eight pediatric patients. We evaluated image segmentation and 3-dimensional volume reconstruction for scoliosis measurement. \emph{Results:} As expected, fuzzy segmentation metrics reduced when trained networks were translated from healthy volunteers to patients. Recall decreased from 0.72 to 0.64 (8.2% decrease), and precision from 0.31 to 0.27 (3.7% decrease). However, after finding optimal thresholds for prediction maps, binary segmentation metrics performed better on patient data. Recall decreased from 0.98 to 0.97 (1.6% decrease), and precision from 0.10 to 0.06 (4.5% decrease). Segmentation prediction maps were reconstructed to 3-dimensional volumes and scoliosis was measured in all patients. Measurement in these reconstructions took less than 1 minute and had a maximum error of 2.2° compared to X-ray. \emph{Conclusion:} automatic spine segmentation makes scoliosis measurement both efficient and accurate in tracked ultrasound scans. \emph{Significance:} Automatic segmentation may overcome the limitations of tracked ultrasound that so far prevented its use as an alternative of X-ray in scoliosis measurement.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Baum, Zachary M C
AUGMENTED REALITY TRAINING PLATFORM FOR PLACEMENT OF NEUROSURGICAL BURR HOLES Masters Thesis
Queen’s University, Kingston, ON, 2019.
@mastersthesis{Baum2019c,
title = {AUGMENTED REALITY TRAINING PLATFORM FOR PLACEMENT OF NEUROSURGICAL BURR HOLES},
author = {Zachary M C Baum},
url = {http://hdl.handle.net/1974/26123
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2019c.pdf},
year = {2019},
date = {2019-04-01},
urldate = {2019-04-01},
volume = {Masters},
address = {Kingston, ON},
school = {Queen’s University},
abstract = {<p>Augmented reality has been used in neurosurgery to aid in the visualization of lesions, though it has not been widely adopted for simulation-based neurosurgical training. This work aims to determine if augmented reality can improve identification of drill location and drill angle for neurosurgical procedures and to define objective metrics for assessing trainee performance.<br />
An augmented reality visualization system was developed using the Microsoft HoloLens. Trainee performance metrics were defined and validated intra-operatively in fifteen neurosurgical cases by attending neurosurgeons and trainees. Trainee performance in localization of drill location and angle tasks was assessed in a simulated training with augmented reality visualization and compared with two other visualization methods.<br /> The proposed metrics allowed us to significantly differentiate levels of competence between attending neurosurgeons and trainees in identification of drill location with (p = 0.011) and without (p = 0.001) the HoloLens and drill angle with (p = 0.032) the HoloLens. Augmented reality visualization significantly improved trainee performance in localization of drill location (p < 0.001 and p = 0.008) and angle (p < 0.001 and p < 0.001) in comparison to two other visualization methods. Trainees rated augmented reality visualization equally or more helpful compared to the two other visualization methods.<br />
Trainee performance assessment with augmented reality visualization and the proposed performance metrics stands to add practical value to neurosurgical training curricula. This work represents a necessary step in curriculum development in neurosurgical training for the task of drill location and angle localization in a variety of neurosurgical procedures.</p>},
keywords = {},
pubstate = {published},
tppubtype = {mastersthesis}
}
An augmented reality visualization system was developed using the Microsoft HoloLens. Trainee performance metrics were defined and validated intra-operatively in fifteen neurosurgical cases by attending neurosurgeons and trainees. Trainee performance in localization of drill location and angle tasks was assessed in a simulated training with augmented reality visualization and compared with two other visualization methods.<br /> The proposed metrics allowed us to significantly differentiate levels of competence between attending neurosurgeons and trainees in identification of drill location with (p = 0.011) and without (p = 0.001) the HoloLens and drill angle with (p = 0.032) the HoloLens. Augmented reality visualization significantly improved trainee performance in localization of drill location (p < 0.001 and p = 0.008) and angle (p < 0.001 and p < 0.001) in comparison to two other visualization methods. Trainees rated augmented reality visualization equally or more helpful compared to the two other visualization methods.<br />
Trainee performance assessment with augmented reality visualization and the proposed performance metrics stands to add practical value to neurosurgical training curricula. This work represents a necessary step in curriculum development in neurosurgical training for the task of drill location and angle localization in a variety of neurosurgical procedures.</p>
Baum, Zachary M C; Church, Ben; Lasso, Andras; Ungi, Tamas; Schlenger, Christopher; Borschneck, Daniel P.; Mousavi, Parvin; Fichtinger, Gabor
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Baum2019a,
title = {Step-wise identification of ultrasound-visible anatomical landmarks for 3D visualization of scoliotic spine},
author = {Zachary M C Baum and Ben Church and Andras Lasso and Tamas Ungi and Christopher Schlenger and Daniel P. Borschneck and Parvin Mousavi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2019a_0.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wiercigroch, Julia; Baum, Zachary M C; Ungi, Tamas; Fritz, Jan; Fichtinger, Gabor
Validation of a low-cost adjustable, handheld needle guide for spine interventions Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Wiercigroch2019,
title = {Validation of a low-cost adjustable, handheld needle guide for spine interventions},
author = {Julia Wiercigroch and Zachary M C Baum and Tamas Ungi and Jan Fritz and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wiercigroch2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lia, H.; Baum, Zachary M C; Vaughan, Thomas; Ungi, Tamas; McGregor, T.; Fichtinger, Gabor
Usability and accuracy of an electromagnetically tracked partial nephrectomy navigation system Conference
17th Annual Imaging Network of Ontario Symposium (ImNO), 2019.
@conference{Lia2019a,
title = {Usability and accuracy of an electromagnetically tracked partial nephrectomy navigation system},
author = {H. Lia and Zachary M C Baum and Thomas Vaughan and Tamas Ungi and T. McGregor and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Lia2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {17th Annual Imaging Network of Ontario Symposium (ImNO)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Lasso, Andras; Ryan, Sarah; Ungi, Tamas; Rae, Emily; Zevin, Boris; Levy, Ron; Fichtinger, Gabor
Augmented reality training platform for neurosurgical burr hole localization Journal Article
In: Journal of Medical Robotics Research, vol. 4, no. 3-4, pp. 1942001-1 - 1942001-13, 2019.
@article{Baum2020,
title = {Augmented reality training platform for neurosurgical burr hole localization},
author = {Zachary M C Baum and Andras Lasso and Sarah Ryan and Tamas Ungi and Emily Rae and Boris Zevin and Ron Levy and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2020a.pdf},
doi = {https://doi.org/10.1142/S2424905X19420017},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
journal = {Journal of Medical Robotics Research},
volume = {4},
number = {3-4},
pages = {1942001-1 - 1942001-13},
abstract = {<p>Augmented reality (AR) is used in neurosurgery to visualize lesions and plan procedures pre-operatively and intra-operatively, though its use has not been widely adopted in simulation-based neurosurgical training for the same tasks. This work defines metrics to determine performance in drill position and angle identification for neurosurgical training. The metrics were validated intra-operatively and in a simulated training environment, demonstrating that trainees identify drill position and angle faster and more accurately with AR compared to standard techniques. Training using AR and the proposed metrics stands to add value to neurosurgical curricula development.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Perrin, Sydney; Baum, Zachary M C; Asselin, Mark; Underwood, Grace; Choueib, Saleh; Lia, H.; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
Reproducibility of freehand calibrations for ultrasound-guided needle navigation Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Perrin2019a,
title = {Reproducibility of freehand calibrations for ultrasound-guided needle navigation},
author = {Sydney Perrin and Zachary M C Baum and Mark Asselin and Grace Underwood and Saleh Choueib and H. Lia and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Perrin2019a.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Ryan, Sarah; Rae, Emily; Lasso, Andras; Ungi, Tamas; Levy, Ron; Fichtinger, Gabor
Assessment of intraoperative neurosurgical planning with the Microsoft HoloLens Conference
17th Annual Imaging Network Ontario Symposium (ImNO), Imaging Network Ontario (ImNO), London, Ontario, 2019.
@conference{Baum2019b,
title = {Assessment of intraoperative neurosurgical planning with the Microsoft HoloLens},
author = {Zachary M C Baum and Sarah Ryan and Emily Rae and Andras Lasso and Tamas Ungi and Ron Levy and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2019b.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {17th Annual Imaging Network Ontario Symposium (ImNO)},
publisher = {Imaging Network Ontario (ImNO)},
address = {London, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Pinter, Csaba; Travers, Bryan; Baum, Zachary M C; Kamali, Shahrokh; Ungi, Tamas; Lasso, Andras; Church, Ben; Fichtinger, Gabor
Real-time transverse process detection in ultrasound Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, Houston, Texas, 2018.
@conference{Pinter2018,
title = {Real-time transverse process detection in ultrasound},
author = {Csaba Pinter and Bryan Travers and Zachary M C Baum and Shahrokh Kamali and Tamas Ungi and Andras Lasso and Ben Church and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2018.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
address = {Houston, Texas},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Church, Ben; Schlenger, Christopher; Fichtinger, Gabor
Visual aid for identifying vertebral landmarks in ultrasound Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10576, Houston, Texas, 2018.
@conference{Baum2018a,
title = {Visual aid for identifying vertebral landmarks in ultrasound},
author = {Zachary M C Baum and Tamas Ungi and Andras Lasso and Ben Church and Christopher Schlenger and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2018a.pdf},
doi = {10.1117/12.2292820},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10576},
pages = {105760Z},
address = {Houston, Texas},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Pinter, Csaba; Travers, Bryan; Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Church, Ben; Fichtinger, Gabor
Real-time transverse process delineation in tracked ultrasound for scoliosis measurement Conference
Imaging Network Ontario Symposium (ImNO 2018), Toronto, Canada, 2018.
@conference{Pinter2018a,
title = {Real-time transverse process delineation in tracked ultrasound for scoliosis measurement},
author = {Csaba Pinter and Bryan Travers and Zachary M C Baum and Tamas Ungi and Andras Lasso and Ben Church and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2018a.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {Imaging Network Ontario Symposium (ImNO 2018)},
address = {Toronto, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, Rebecca; Ungi, Tamas; Holden, M.; Baum, Zachary M C; Keri, Zsuzsanna; McCallum, Caitlin; Howes, Daniel; Fichtinger, Gabor
Imaging Network Ontario (IMNO), 2018.
@conference{Hisey2018b,
title = {Assessment of the use of webcam based workflow detection for providing real-time feedback in central venous catheterization training},
author = {Rebecca Hisey and Tamas Ungi and M. Holden and Zachary M C Baum and Zsuzsanna Keri and Caitlin McCallum and Daniel Howes and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Rebecca_ImNO2018_07.pdf},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Imaging Network Ontario (IMNO)},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Hisey, Rebecca; Ungi, Tamas; Holden, M.; Baum, Zachary M C; Keri, Zsuzsanna; McCallum, Caitlin; Howes, Daniel; Fichtinger, Gabor
Real-time workflow detection using webcam video for providing real-time feedback in central venous catheterization training Honorable Mention Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, 2018.
@conference{Hisey2018,
title = {Real-time workflow detection using webcam video for providing real-time feedback in central venous catheterization training},
author = {Rebecca Hisey and Tamas Ungi and M. Holden and Zachary M C Baum and Zsuzsanna Keri and Caitlin McCallum and Daniel Howes and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/RHisey_SPIE2018_Full_02.pdf},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Church, Ben; Schlenger, Christopher; Fichtinger, Gabor
Ultrasound-based vertebral landmark localization using deformable spine models Conference
16th Annual Imaging Network Ontario Symposiuim, Imaging Network Ontario (ImNO), Toronto, Canada, 2018.
@conference{Baum2018b,
title = {Ultrasound-based vertebral landmark localization using deformable spine models},
author = {Zachary M C Baum and Tamas Ungi and Andras Lasso and Ben Church and Christopher Schlenger and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2018b.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2018b-Poster.pdf},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {16th Annual Imaging Network Ontario Symposiuim},
publisher = {Imaging Network Ontario (ImNO)},
address = {Toronto, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
Usability of a real-time tracked augmented reality display system in musculoskeletal injections Conference
SPIE Medical Imaging 2017: Image-Guided Procedures, Robotic Interventions, and Modeling, 2017.
@conference{Baum2017a,
title = {Usability of a real-time tracked augmented reality display system in musculoskeletal injections},
author = {Zachary M C Baum and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2017a_0.pdf},
doi = {10.1117/12.2255897},
year = {2017},
date = {2017-03-01},
urldate = {2017-03-01},
booktitle = {SPIE Medical Imaging 2017: Image-Guided Procedures, Robotic Interventions, and Modeling},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ilina, Anna; Lasso, Andras; Jolley, MatthewA; Wohler, Brittany; Nguyen, Alexander V.; Scanlan, Adam B.; Baum, Zachary M C; McGowan, FrancisX; Fichtinger, Gabor
Patient-specific pediatric silicone heart valve models based on 3D ultrasound Conference
SPIE Medical Imaging 2017, vol. 10135, SPIE Medical Imaging SPIE Medical Imaging, Orlando, FL, United States, Feb. 16, 2017, 2017.
@conference{Ilina2017a,
title = {Patient-specific pediatric silicone heart valve models based on 3D ultrasound},
author = {Anna Ilina and Andras Lasso and MatthewA Jolley and Brittany Wohler and Alexander V. Nguyen and Adam B. Scanlan and Zachary M C Baum and FrancisX McGowan and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ilina2017a.pdf},
year = {2017},
date = {2017-03-01},
urldate = {2017-03-01},
booktitle = {SPIE Medical Imaging 2017},
volume = {10135},
publisher = {SPIE Medical Imaging},
address = {Orlando, FL, United States, Feb. 16, 2017},
organization = {SPIE Medical Imaging},
abstract = {<p>PURPOSE: Patient-specific heart and valve models have shown promise as training and planning tools for heart surgery, but physically realistic valve models remain elusive. Available proprietary, simulation-focused heart valve models are generic adult mitral valves and do not allow for patient-specific modeling as may be needed for rare diseases such as congenitally abnormal valves. We propose creating silicone valve models from a 3D-printed plastic mold as a solution that can be adapted to any individual patient and heart valve at a fraction of the cost of direct 3D-printing using soft materials.<br />
<br />
METHODS: Leaflets of a pediatric mitral valve, a tricuspid valve in a patient with hypoplastic left heart syndrome, and a complete atrioventricular canal valve were segmented from ultrasound images. A custom software was developed to automatically generate molds for each valve based on the segmentation. These molds were 3D-printed and used to make silicone valve models. The models were designed with cylindrical rims of different sizes surrounding the leaflets, to show the outline of the valve and add rigidity. Pediatric cardiac surgeons practiced suturing on the models and evaluated them for use as surgical planning and training tools.<br />
<br />
RESULTS: Five out of six surgeons reported that the valve models would be very useful as training tools for cardiac surgery. In this first iteration of valve models, leaflets were felt to be unrealistically thick or stiff compared to real pediatric leaflets. A thin tube rim was preferred for valve flexibility.<br />
<br />
CONCLUSION: The valve models were well received and considered to be valuable and accessible tools for heart valve surgery training. Further improvements will be made based on surgeons’ feedback.<br />
<br />
Keywords: surgery, training, heart valve models, pediatric, patient-specific, 3D-printing, congenital heart disease, mitral valve, tricuspid valve, complete atrioventricular canal defect, ultrasound</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
<br />
METHODS: Leaflets of a pediatric mitral valve, a tricuspid valve in a patient with hypoplastic left heart syndrome, and a complete atrioventricular canal valve were segmented from ultrasound images. A custom software was developed to automatically generate molds for each valve based on the segmentation. These molds were 3D-printed and used to make silicone valve models. The models were designed with cylindrical rims of different sizes surrounding the leaflets, to show the outline of the valve and add rigidity. Pediatric cardiac surgeons practiced suturing on the models and evaluated them for use as surgical planning and training tools.<br />
<br />
RESULTS: Five out of six surgeons reported that the valve models would be very useful as training tools for cardiac surgery. In this first iteration of valve models, leaflets were felt to be unrealistically thick or stiff compared to real pediatric leaflets. A thin tube rim was preferred for valve flexibility.<br />
<br />
CONCLUSION: The valve models were well received and considered to be valuable and accessible tools for heart valve surgery training. Further improvements will be made based on surgeons’ feedback.<br />
<br />
Keywords: surgery, training, heart valve models, pediatric, patient-specific, 3D-printing, congenital heart disease, mitral valve, tricuspid valve, complete atrioventricular canal defect, ultrasound</p>
Underwood, Grace; Ungi, Tamas; Baum, Zachary M C; Lasso, Andras; Kronreif, Gernot; Fichtinger, Gabor
Skull registration for prone patient position using tracked ultrasound Conference
SPIE Medical Imaging 2017, 2017.
@conference{Underwood2017,
title = {Skull registration for prone patient position using tracked ultrasound},
author = {Grace Underwood and Tamas Ungi and Zachary M C Baum and Andras Lasso and Gernot Kronreif and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Underwood2017.pdf},
year = {2017},
date = {2017-01-01},
urldate = {2017-01-01},
booktitle = {SPIE Medical Imaging 2017},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Harish, Vinyas; Bibic, Eden; Lasso, Andras; Holden, M.; Vaughan, Thomas; Baum, Zachary M C; Ungi, Tamas; Fichtinger, Gabor
An application of redundant sensors for intraoperative electromagnetic tracking error monitoring Conference
15th Annual Imaging Network Ontario Symposium, London, ON, Canada, 2017.
@conference{Harish2017b,
title = {An application of redundant sensors for intraoperative electromagnetic tracking error monitoring},
author = {Vinyas Harish and Eden Bibic and Andras Lasso and M. Holden and Thomas Vaughan and Zachary M C Baum and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Harish2017b.pdf},
year = {2017},
date = {2017-01-01},
urldate = {2017-01-01},
booktitle = {15th Annual Imaging Network Ontario Symposium},
address = {London, ON, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ilina, Anna; Lasso, Andras; Jolley, MatthewA; Wohler, Brittany; Nguyen, Alexander V.; Scanlan, Adam B.; Baum, Zachary M C; McGowan, FrancisX; Fichtinger, Gabor
Creating patient-specific anatomical models from highly elastic materials using 3D-printed molds Conference
15th Annual Imaging Network Ontario Symposium (ImNO), London, Canada, 2017.
@conference{Ilina2017b,
title = {Creating patient-specific anatomical models from highly elastic materials using 3D-printed molds},
author = {Anna Ilina and Andras Lasso and MatthewA Jolley and Brittany Wohler and Alexander V. Nguyen and Adam B. Scanlan and Zachary M C Baum and FrancisX McGowan and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ilina2017b.pdf
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Ilina2017b-poster.pdf},
year = {2017},
date = {2017-01-01},
urldate = {2017-01-01},
booktitle = {15th Annual Imaging Network Ontario Symposium (ImNO)},
address = {London, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
Evaluation of a mobile, real-time, tracked augmented reality display for surgical navigation Conference
15th Annual Imaging Network Ontario Symposiuim, Imaging Network Ontario (ImNO), London, Canada, 2017.
@conference{Baum2017b,
title = {Evaluation of a mobile, real-time, tracked augmented reality display for surgical navigation},
author = {Zachary M C Baum and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2017b.pdf},
year = {2017},
date = {2017-01-01},
urldate = {2017-01-01},
booktitle = {15th Annual Imaging Network Ontario Symposiuim},
publisher = {Imaging Network Ontario (ImNO)},
address = {London, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Bibic, Eden; Baum, Zachary M C; Harish, Vinyas; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
PLUS Model Catalog: A library of 3D-printable models Conference
15th Annual Imaging Network Ontario Symposiuim, Imaging Network Ontario (ImNO), London, Canada, 2017.
@conference{Bibic2017a,
title = {PLUS Model Catalog: A library of 3D-printable models},
author = {Eden Bibic and Zachary M C Baum and Vinyas Harish and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Bibic2017a-Poster.pptx
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Bibic2017a-Poster.pptx},
year = {2017},
date = {2017-01-01},
urldate = {2017-01-01},
booktitle = {15th Annual Imaging Network Ontario Symposiuim},
publisher = {Imaging Network Ontario (ImNO)},
address = {London, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
The 9th IGT Workshop, National Center for Image Guided Therapy, Bethesda, USA, 2017.
@conference{Baum2017c,
title = {Real-Time, Tracked, Mobile Augmented Reality Display for Surgical Navigation: Usability Study on Simulated Patients},
author = {Zachary M C Baum and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2017c.pdf},
year = {2017},
date = {2017-01-01},
urldate = {2017-01-01},
booktitle = {The 9th IGT Workshop},
publisher = {National Center for Image Guided Therapy},
address = {Bethesda, USA},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Harish, Vinyas; Bibic, Eden; Lasso, Andras; Holden, M.; Vaughan, Thomas; Baum, Zachary M C; Ungi, Tamas; Fichtinger, Gabor
Monitoring electromagnetic tracking error using redundant sensors Conference
SPIE Medical Imaging 2017, SPIE Society for Optics and Photonics SPIE Society for Optics and Photonics, Orlando, FL, USA, 2017.
@conference{Harish2017a,
title = {Monitoring electromagnetic tracking error using redundant sensors},
author = {Vinyas Harish and Eden Bibic and Andras Lasso and M. Holden and Thomas Vaughan and Zachary M C Baum and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Harish2017a.pdf},
year = {2017},
date = {2017-01-01},
urldate = {2017-01-01},
booktitle = {SPIE Medical Imaging 2017},
publisher = {SPIE Society for Optics and Photonics},
address = {Orlando, FL, USA},
organization = {SPIE Society for Optics and Photonics},
abstract = {<div class="page" title="Page 1"> <div class="layoutArea"> <div class="column"> <p><span style="font-family:trebuchet ms,helvetica,sans-serif"><span style="font-size:12px"><strong>PURPOSE: </strong>The intraoperative measurement of tracking error is crucial to ensure the reliability of electromagnetically navigated procedures. For intraoperative use, methods need to be quick to set up, easy to interpret, and not interfere with the ongoing procedure. Our goal was to evaluate the feasibility of using redundant electromagnetic sensors to alert users to tracking error in a navigated intervention setup. <strong>METHODS: </strong>Electromagnetic sensors were fixed to a rigid frame around a region of interest and on surgical tools. A software module was designed to detect tracking error by comparing real-time measurements of the differences between inter-sensor distances and angles to baseline measurements. Once these measurements were collected, a linear support vector machine-based classifier was used to predict tracking errors from redundant sensor readings. <strong>RESULTS: </strong>Measuring the deviation in the reported inter-sensor distance and </span></span><span style="font-family:timesnewromanpsmt; font-size:10.000000pt"><span style="font-family:lucida sans unicode,lucida grande,sans-serif"><span style="font-size:10px"><span style="font-size:12px"><span style="font-family:arial,helvetica,sans-serif"><span style="font-family:trebuchet ms,helvetica,sans-serif">angle</span></span></span></span></span></span><span style="font-family:trebuchet ms,helvetica,sans-serif"><span style="font-size:12px"> between the needle and cautery served as a valid indicator for electromagnetic tracking error. The highest classification accuracy, 86%, was achieved based on readings from the cautery when the two sensors on the cautery were close together. The specificity of this classifier was 93% and the sensitivity was 82%. <strong>CONCLUSION: </strong>Placing redundant electromagnetic sensors in a workspace seems to be feasible for the intraoperative detection of electromagnetic tracking error in controlled environments. Further testing should be performed to optimize the measurement error threshold used for classification in the support vector </span></span><span style="font-family:timesnewromanpsmt; font-size:10.000000pt"><span style="font-family:lucida sans unicode,lucida grande,sans-serif"><span style="font-size:10px"><span style="font-size:12px"><span style="font-family:arial,helvetica,sans-serif"><span style="font-family:trebuchet ms,helvetica,sans-serif">machine,</span></span></span></span></span></span><span style="font-family:trebuchet ms,helvetica,sans-serif"><span style="font-size:12px"> and improve the sensitivity of our method before application in real procedures. </span></span></p>
</div>
</div>
</div>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
</div>
</div>
</div>
House, Rachael; Lasso, Andras; Harish, Vinyas; Baum, Zachary M C; Fichtinger, Gabor
SPIE Medical Imaging, 2017.
@conference{House2017,
title = {Evaluation of the Intel RealSense SR300 camera for image-guided interventions and application in vertebral level localization},
author = {Rachael House and Andras Lasso and Vinyas Harish and Zachary M C Baum and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/House2017a.pdf},
year = {2017},
date = {2017-01-01},
urldate = {2017-01-01},
booktitle = {SPIE Medical Imaging},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Lasso, Andras; Ungi, Tamas; Fichtinger, Gabor
Real-time self-calibration of a tracked augmented reality display Conference
SPIE Medical Imaging 2016, vol. 9786, SPIE Medical Imaging SPIE Medical Imaging, San Diego, CA, United States, Feb. 29, 2016, 2016.
@conference{Baum2016a,
title = {Real-time self-calibration of a tracked augmented reality display},
author = {Zachary M C Baum and Andras Lasso and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2016a.pdf},
year = {2016},
date = {2016-01-01},
urldate = {2016-01-01},
booktitle = {SPIE Medical Imaging 2016},
volume = {9786},
publisher = {SPIE Medical Imaging},
address = {San Diego, CA, United States, Feb. 29, 2016},
organization = {SPIE Medical Imaging},
abstract = {<div class="page" title="Page 1"> <div class="layoutArea"> <div class="column"> <p><strong>PURPOSE</strong><span style="font-family:timesnewromanpsmt; font-size:10.000000pt">: Augmented reality systems have been proposed for image-guided needle interventions but they have not become widely used in clinical practice due to restrictions such as limited portability, low display refresh rates, and tedious calibration procedures. We propose a handheld tablet-based self-calibrating image overlay system. </span></p> <p><strong>METHODS</strong><span style="font-family:timesnewromanpsmt; font-size:10.000000pt">: A modular handheld augmented reality viewbox was constructed from a tablet computer and a semi-transparent mirror. A consistent and precise self-calibration method, without the use of any temporary markers, was designed to achieve an accurate calibration of the system. Markers attached to the viewbox and patient are simultaneously tracked using an optical pose tracker to report the position of the patient with respect to a displayed image plane that is visualized in real-time. The software was built using the open-source 3D Slicer application platform’s SlicerIGT extension and the PLUS toolkit. </span></p> <p><strong>RESULTS</strong><span style="font-family:timesnewromanpsmt; font-size:10.000000pt">: The accuracy of the image overlay with image-guided needle interventions yielded a mean absolute position error of 0.99 mm (95</span><span style="font-family:timesnewromanpsmt; font-size:6.000000pt">th </span><span style="font-family:timesnewromanpsmt; font-size:10.000000pt">percentile 1.93 mm) in-plane of the overlay and a mean absolute position error of 0.61 mm (95</span><span style="font-family:timesnewromanpsmt; font-size:6.000000pt">th </span><span style="font-family:timesnewromanpsmt; font-size:10.000000pt">percentile 1.19 mm) out-of-plane. This accuracy is clinically acceptable for tool guidance during various procedures, such as musculoskeletal injections. </span></p> <p><strong>CONCLUSION</strong><span style="font-family:timesnewromanpsmt; font-size:10.000000pt">: A self-calibration method was developed and evaluated for a tracked augmented reality display. The results show potential for the use of handheld image overlays in clinical studies with image-guided needle interventions. </span></p>
</div>
</div>
</div>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
</div>
</div>
</div>
Harish, Vinyas; Baksh, A; Ungi, Tamas; Lasso, Andras; Baum, Zachary M C; Gauvin, G; Engel, C. Jay; Rudan, John; Fichtinger, Gabor
Measurement of electromagnetic tracking error in a navigated breast surgery setup Conference
SPIE Medical Imaging 2016, vol. 9786, SPIE Medical Imaging SPIE Medical Imaging, San Diego, CA, United States, Feb. 29, 2016, 2016.
@conference{Harish2016a,
title = {Measurement of electromagnetic tracking error in a navigated breast surgery setup},
author = {Vinyas Harish and A Baksh and Tamas Ungi and Andras Lasso and Zachary M C Baum and G Gauvin and C. Jay Engel and John Rudan and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Harish2016a.pdf},
year = {2016},
date = {2016-01-01},
urldate = {2016-01-01},
booktitle = {SPIE Medical Imaging 2016},
volume = {9786},
publisher = {SPIE Medical Imaging},
address = {San Diego, CA, United States, Feb. 29, 2016},
organization = {SPIE Medical Imaging},
abstract = {<p>The measurement of tracking error is crucial to ensure the safety and feasibility of electromagnetically tracked, image-guided procedures. Measurement should occur in a clinical environment because electromagnetic field distortion depends on positioning relative to the field generator and metal objects. However, we could not find an accessible and open-source system for calibration, error measurement, and visualization. We developed such a system and tested it in a navigated breast surgery setup.A pointer tool was designed for concurrent electromagnetic and optical tracking. Software modules were developed for automatic calibration of the measurement system, real-time error visualization, and analysis. The system was taken to an operating room to test for field distortion in a navigated breast surgery setup. Positional and rotational electromagnetic tracking errors were then calculated using optical tracking as a ground truth. Our system is quick to set up and can be rapidly deployed. The process from calibration to visualization also only takes a few minutes. Field distortion was measured in the presence of various surgical equipment. Positional and rotational error in a clean field was approximately 0.90 mm and 0.31°. The presence of a surgical table, an electrosurgical cautery, and anesthesia machine increased the error by up to a few tenths of a millimetre and tenth of a degree.In a navigated breast surgery setup, measurement and visualization of tracking error defines a safe working area in the presence of surgical equipment. Our system is available as an extension for the open-source 3D Slicer platform.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Harish, Vinyas; Baksh, A; Ungi, Tamas; Lasso, Andras; Baum, Zachary M C; Gauvin, G; Engel, C. Jay; Rudan, John; Fichtinger, Gabor
Monitoring electromagnetic tracking error in computer-navigated breast cancer surgery Conference
14th Annual Imaging Network Ontario Symposium (ImNO), Imaging Network Ontario (ImNO), Toronto, Canada, 2016.
@conference{Harish2016c,
title = {Monitoring electromagnetic tracking error in computer-navigated breast cancer surgery},
author = {Vinyas Harish and A Baksh and Tamas Ungi and Andras Lasso and Zachary M C Baum and G Gauvin and C. Jay Engel and John Rudan and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Harish2016c.pdf},
year = {2016},
date = {2016-01-01},
urldate = {2016-01-01},
booktitle = {14th Annual Imaging Network Ontario Symposium (ImNO)},
publisher = {Imaging Network Ontario (ImNO)},
address = {Toronto, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Baum, Zachary M C; Lasso, Andras; Ungi, Tamas; Fichtinger, Gabor
Real-time self-calibration of a handheld augmented reality overlay system Conference
14th Annual Imaging Network Ontario Symposium (ImNO), Imaging Network Ontario (ImNO), Toronto, Canada, 2016.
@conference{Baum2016b,
title = {Real-time self-calibration of a handheld augmented reality overlay system},
author = {Zachary M C Baum and Andras Lasso and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Baum2016b.pdf},
year = {2016},
date = {2016-01-01},
urldate = {2016-01-01},
booktitle = {14th Annual Imaging Network Ontario Symposium (ImNO)},
publisher = {Imaging Network Ontario (ImNO)},
address = {Toronto, Canada},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}