Mark Asselin
Mark is a masters student in biomedical computing with previous experience in electrical engineering. Please see his website at https://markasselin.github.io/ for more information about him.
Kaufmann, Martin; Jamzad, Amoon; Ungi, Tamas; Rodgers, Jessica R; Koster, Teaghan; Yeung, Chris; Ehrlich, Josh; Santilli, Alice; Asselin, Mark; Janssen, Natasja; McMullen, Julie; Solberg, Kathryn; Cheesman, Joanna; Carlo, Alessia Di; Ren, Kevin Yi Mi; Varma, Sonal; Merchant, Shaila; Engel, Cecil Jay; Walker, G Ross; Gallo, Andrea; Jabs, Doris; Mousavi, Parvin; Fichtinger, Gabor; Rudan, John F
Abstract PO2-23-07: Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery Journal Article
In: Cancer Research, vol. 84, iss. 9_Supplement, pp. PO2-23-07-PO2-23-07, 2024.
@article{fichtinger2024c,
title = {Abstract PO2-23-07: Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery},
author = {Martin Kaufmann and Amoon Jamzad and Tamas Ungi and Jessica R Rodgers and Teaghan Koster and Chris Yeung and Josh Ehrlich and Alice Santilli and Mark Asselin and Natasja Janssen and Julie McMullen and Kathryn Solberg and Joanna Cheesman and Alessia Di Carlo and Kevin Yi Mi Ren and Sonal Varma and Shaila Merchant and Cecil Jay Engel and G Ross Walker and Andrea Gallo and Doris Jabs and Parvin Mousavi and Gabor Fichtinger and John F Rudan},
url = {https://aacrjournals.org/cancerres/article/84/9_Supplement/PO2-23-07/743683},
year = {2024},
date = {2024-01-01},
journal = {Cancer Research},
volume = {84},
issue = {9_Supplement},
pages = {PO2-23-07-PO2-23-07},
publisher = {The American Association for Cancer Research},
abstract = {Positive resection margins occur in approximately 25% of breast cancer (BCa) surgeries, requiring re-operation. Margin status is not routinely available during surgery; thus, technologies that identify residual cancer on the specimen or cavity are needed to provide intraoperative decision support that may reduce positive margin rates. Rapid evaporative ionization mass spectrometry (REIMS) is an emerging technique that chemically profiles the plume generated by tissue cauterization to classify the ablated tissue as either cancerous or non-cancerous, on the basis of detected lipid species. Although REIMS can distinguish cancer and non-cancerous breast tissue by the signals generated, it does not indicate the location of the classified tissue in real-time. Our objective was to combine REIMS with spatio-temporal navigation (navigated REIMS), and to compare performance of navigated REIMS with conventional …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Asselin, Mark; Fichtinger, Gabor
Spatio-temporal localization for mass spectrometry sample analysis Journal Article
In: 2022.
@article{fichtinger2022n,
title = {Spatio-temporal localization for mass spectrometry sample analysis},
author = {Mark Asselin and Gabor Fichtinger},
url = {https://patents.google.com/patent/US11456165B2/en},
year = {2022},
date = {2022-01-01},
urldate = {2022-01-01},
abstract = {In a method for spatially localizing mass-spectrometry analysis of an analyte derived from an energy event, an electrical device is used to deliver an energy event to a substrate, and the analyte produced is analyzed using mass spectrometry. Electrical signals sent to and received from the electrical device under different modes of operation are sensed and classified according to each different mode of operation. A location of the electrical device is tracked in three dimensions during the energy event, and a processor is used to perform spatial-temporal alignment of the mass-spectrometry, the determined modes of operation of the electrical device, and the tracked location of the electrical device, wherein mass spectrometry data corresponding to the determined modes of the electrical device are identified and localized within the site of the energy event. The substrate may be tissue in a surgical site, and the electrical …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ehrlich, Josh; Jamzad, Amoon; Asselin, Mark; Rodgers, Jessica Robin; Kaufmann, Martin; Haidegger, Tamas; Rudan, John; Mousavi, Parvin; Fichtinger, Gabor; Ungi, Tamas
Sensor-Based Automated Detection of Electrosurgical Cautery States Journal Article
In: Sensors, vol. 22, iss. 15, pp. 5808, 2022.
@article{fichtinger2022i,
title = {Sensor-Based Automated Detection of Electrosurgical Cautery States},
author = {Josh Ehrlich and Amoon Jamzad and Mark Asselin and Jessica Robin Rodgers and Martin Kaufmann and Tamas Haidegger and John Rudan and Parvin Mousavi and Gabor Fichtinger and Tamas Ungi},
url = {https://www.mdpi.com/1424-8220/22/15/5808},
year = {2022},
date = {2022-01-01},
journal = {Sensors},
volume = {22},
issue = {15},
pages = {5808},
publisher = {MDPI},
abstract = {In computer-assisted surgery, it is typically required to detect when the tool comes into contact with the patient. In activated electrosurgery, this is known as the energy event. By continuously tracking the electrosurgical tools’ location using a navigation system, energy events can help determine locations of sensor-classified tissues. Our objective was to detect the energy event and determine the settings of electrosurgical cautery—robustly and automatically based on sensor data. This study aims to demonstrate the feasibility of using the cautery state to detect surgical incisions, without disrupting the surgical workflow. We detected current changes in the wires of the cautery device and grounding pad using non-invasive current sensors and an oscilloscope. An open-source software was implemented to apply machine learning on sensor data to detect energy events and cautery settings. Our methods classified each cautery state at an average accuracy of 95.56% across different tissue types and energy level parameters altered by surgeons during an operation. Our results demonstrate the feasibility of automatically identifying energy events during surgical incisions, which could be an important safety feature in robotic and computer-integrated surgery. This study provides a key step towards locating tissue classifications during breast cancer operations and reducing the rate of positive margins.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Pinter, Csaba; Lasso, Andras; Choueib, Saleh; Asselin, Mark; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Jolley, MatthewA; Fichtinger, Gabor
SlicerVR for Medical Intervention Training and Planning in Immersive Virtual Reality Journal Article
In: IEEE Transactions on Medical Robotics and Bionics, vol. 2, no. 2, pp. 108-117, 2020.
@article{Pinter2020,
title = {SlicerVR for Medical Intervention Training and Planning in Immersive Virtual Reality},
author = {Csaba Pinter and Andras Lasso and Saleh Choueib and Mark Asselin and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and MatthewA Jolley and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Pinter2020a_0.pdf},
doi = {10.1109/TMRB.2020.2983199},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
journal = {IEEE Transactions on Medical Robotics and Bionics},
volume = {2},
number = {2},
pages = {108-117},
abstract = {<p>Virtual reality (VR) provides immersive visualization that has proved to be useful in a variety of medical applications. Currently, however, no free open-source software platform exists that would provide comprehensive support for translational clinical researchers in prototyping experimental VR scenarios in training, planning or guiding medical interventions. By integrating VR functions in 3D Slicer, an established medical image analysis and visualization platform, SlicerVR enables virtual reality experience by a single click. It provides functions to navigate and manipulate the virtual scene, as well as various settings to abate the feeling of motion sickness. SlicerVR allows for shared collaborative VR experience both locally and remotely. We present illustrative scenarios created with SlicerVR in a wide spectrum of applications, including echocardiography, neurosurgery, spine surgery, brachytherapy, intervention training and personalized patient education. SlicerVR is freely available under BSD type license as an extension to 3D Slicer and it has been downloaded over 7,800 times at the time of writing this article.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Jamzad, Amoon; Kaufmann, Martin; Rubino, Rachel; Sedghi, Alireza; Ungi, Tamas; Asselin, Mark; Yam, Scott; Rudan, John; Nicol, Christopher; Fichtinger, Gabor; Mousavi, Parvin
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{Connolly2020a,
title = {Classification of tumor signatures from electrosurgical vapors using mass spectrometry and machine learning: a feasibility study},
author = {Laura Connolly and Amoon Jamzad and Martin Kaufmann and Rachel Rubino and Alireza Sedghi and Tamas Ungi and Mark Asselin and Scott Yam and John Rudan and Christopher Nicol and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2020a.pdf},
doi = {https://doi.org/10.1117/12.2549343},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Barr, Colton; Lasso, Andras; Asselin, Mark; Pieper, Steve; Robertson, Faith C.; Gormley, William B.; Fichtinger, Gabor
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{BarrC2020,
title = {Towards portable image guidance and automatic patient registration using an RGB-D camera and video projector},
author = {Colton Barr and Andras Lasso and Mark Asselin and Steve Pieper and Faith C. Robertson and William B. Gormley and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Barr2020.pdf},
doi = {10.1117/12.2549723},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Yates, Lauren; Connolly, Laura; Jamzad, Amoon; Asselin, Mark; Rubino, Rachel; Yam, Scott; Ungi, Tamas; Lasso, Andras; Nicol, Christopher; Mousavi, Parvin; Fichtinger, Gabor
Robotic tissue scanning with biophotonic probe Journal Article
In: vol. 11315, pp. 330-335, 2020.
@article{fichtinger2020o,
title = {Robotic tissue scanning with biophotonic probe},
author = {Lauren Yates and Laura Connolly and Amoon Jamzad and Mark Asselin and Rachel Rubino and Scott Yam and Tamas Ungi and Andras Lasso and Christopher Nicol and Parvin Mousavi and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/11315/1131519/Robotic-tissue-scanning-with-biophotonic-probe/10.1117/12.2549635.short},
year = {2020},
date = {2020-01-01},
volume = {11315},
pages = {330-335},
publisher = {SPIE},
abstract = {PURPOSE
Raman spectroscopy is an optical imaging technique used to characterize tissue via molecular analysis. The use of Raman spectroscopy for real-time intraoperative tissue classification requires fast analysis with minimal human intervention. In order to have accurate predictions and classifications, a large and reliable database of tissue classifications with spectra results is required. We have developed a system that can be used to generate an efficient scanning path for robotic scanning of tissues using Raman spectroscopy.
METHODS
A camera mounted to a robotic controller is used to take an image of a tissue slide. The corners of the tissue slides within the sample image are identified, and the size of the slide is calculated. The image is cropped to fit the size of the slide and the image is manipulated to identify the tissue contour. A grid set to fit around the size of the tissue is calculated and a grid …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Raman spectroscopy is an optical imaging technique used to characterize tissue via molecular analysis. The use of Raman spectroscopy for real-time intraoperative tissue classification requires fast analysis with minimal human intervention. In order to have accurate predictions and classifications, a large and reliable database of tissue classifications with spectra results is required. We have developed a system that can be used to generate an efficient scanning path for robotic scanning of tissues using Raman spectroscopy.
METHODS
A camera mounted to a robotic controller is used to take an image of a tissue slide. The corners of the tissue slides within the sample image are identified, and the size of the slide is calculated. The image is cropped to fit the size of the slide and the image is manipulated to identify the tissue contour. A grid set to fit around the size of the tissue is calculated and a grid …
Pinter, Csaba; Lasso, Andras; Choueib, Saleh; Asselin, Mark; Fillion-Robin, Jean-Christophe; Vimort, Jean-Baptiste; Martin, Ken; Jolley, Matthew A; Fichtinger, Gabor
SlicerVR for medical intervention training and planning in immersive virtual reality Journal Article
In: IEEE transactions on medical robotics and bionics, vol. 2, iss. 2, pp. 108-117, 2020.
@article{fichtinger2020,
title = {SlicerVR for medical intervention training and planning in immersive virtual reality},
author = {Csaba Pinter and Andras Lasso and Saleh Choueib and Mark Asselin and Jean-Christophe Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and Matthew A Jolley and Gabor Fichtinger},
url = {https://ieeexplore.ieee.org/abstract/document/9047949/},
year = {2020},
date = {2020-01-01},
journal = {IEEE transactions on medical robotics and bionics},
volume = {2},
issue = {2},
pages = {108-117},
publisher = {IEEE},
abstract = {Virtual reality (VR) provides immersive visualization that has proved to be useful in a variety of medical applications. Currently, however, no free open-source software platform exists that would provide comprehensive support for translational clinical researchers in prototyping experimental VR scenarios in training, planning or guiding medical interventions. By integrating VR functions in 3D Slicer, an established medical image analysis and visualization platform, SlicerVR enables virtual reality experience by a single click. It provides functions to navigate and manipulate the virtual scene, as well as various settings to abate the feeling of motion sickness. SlicerVR allows for shared collaborative VR experience both locally and remotely. We present illustrative scenarios created with SlicerVR in a wide spectrum of applications, including echocardiography, neurosurgery, spine surgery, brachytherapy, intervention …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Pinter, Csaba; Lasso, Andras; Asselin, Mark; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Fichtinger, Gabor
SlicerVR for image-guided therapy planning in immersive virtual reality Conference
The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK, London, UK, 2019.
@conference{Pinter2019a,
title = {SlicerVR for image-guided therapy planning in immersive virtual reality},
author = {Csaba Pinter and Andras Lasso and Mark Asselin and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2019a_0.pdf},
year = {2019},
date = {2019-06-01},
urldate = {2019-06-01},
booktitle = {The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK},
pages = {91-92},
address = {London, UK},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Holden, M.; Tan, Pearl; Hookey, Lawrence; Fichtinger, Gabor
Analyzing the curvature of the colon in different patient positions Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Laframboise2019a,
title = {Analyzing the curvature of the colon in different patient positions},
author = {Jacob Laframboise and Tamas Ungi and Andras Lasso and Mark Asselin and M. Holden and Pearl Tan and Lawrence Hookey and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Laframboise2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Ungi, Tamas; Lasso, Andras; Vaughan, Thomas; Asselin, Mark; Mousavi, Parvin; Yam, Scott; Fichtinger, Gabor
Mechanically-Controlled Spectroscopic Imaging for Tissue Classification Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Connolly2019a,
title = {Mechanically-Controlled Spectroscopic Imaging for Tissue Classification},
author = {Laura Connolly and Tamas Ungi and Andras Lasso and Thomas Vaughan and Mark Asselin and Parvin Mousavi and Scott Yam and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2019a_3.pdf},
doi = {https://doi.org/10.1117/12.2512481},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Holden, M.; Tan, Pearl; Hookey, Lawrence; Fichtinger, Gabor
Quantifying the effect of patient position on the curvature of colons Conference
17th Annual Imaging Network Ontario Symposium (ImNO), Imaging Network Ontario (ImNO), London, Ontario, 2019.
@conference{Laframboise2019b,
title = {Quantifying the effect of patient position on the curvature of colons},
author = {Jacob Laframboise and Tamas Ungi and Andras Lasso and Mark Asselin and M. Holden and Pearl Tan and Lawrence Hookey and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Laframboise2019b.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {17th Annual Imaging Network Ontario Symposium (ImNO)},
publisher = {Imaging Network Ontario (ImNO)},
address = {London, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Ungi, Tamas; Lasso, Andras; Vaughan, Thomas; Asselin, Mark; Mousavi, Parvin; Yam, Scott; Fichtinger, Gabor
Mechanically controlled spectroscopic imaging for tissue classification Journal Article
In: vol. 10951, pp. 632-640, 2019.
@article{fichtinger2019n,
title = {Mechanically controlled spectroscopic imaging for tissue classification},
author = {Laura Connolly and Tamas Ungi and Andras Lasso and Thomas Vaughan and Mark Asselin and Parvin Mousavi and Scott Yam and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/10951/109512E/Mechanically-controlled-spectroscopic-imaging-for-tissue-classification/10.1117/12.2512481.short},
year = {2019},
date = {2019-01-01},
volume = {10951},
pages = {632-640},
publisher = {SPIE},
abstract = {PURPOSE
Raman Spectroscopy is amongst several optical imaging techniques that have the ability to characterize tissue non-invasively. To use these technologies for intraoperative tissue classification, fast and efficient analysis of optical data is required with minimal operator intervention. Additionally, there is a need for a reliable database of optical signatures to account for variable conditions. We developed a software system with an inexpensive, flexible mechanical framework to facilitate automated scanning of tissue and validate spectroscopic scans with histologic ground truths. This system will be used, in the future, to train a machine learning algorithm to distinguish between different tissue types using Raman Spectroscopy.
METHODS
A sample of chicken breast tissue is mounted to a microscope slide following a biopsy of fresh frozen tissue. Landmarks for registration and evaluation are marked on the …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Raman Spectroscopy is amongst several optical imaging techniques that have the ability to characterize tissue non-invasively. To use these technologies for intraoperative tissue classification, fast and efficient analysis of optical data is required with minimal operator intervention. Additionally, there is a need for a reliable database of optical signatures to account for variable conditions. We developed a software system with an inexpensive, flexible mechanical framework to facilitate automated scanning of tissue and validate spectroscopic scans with histologic ground truths. This system will be used, in the future, to train a machine learning algorithm to distinguish between different tissue types using Raman Spectroscopy.
METHODS
A sample of chicken breast tissue is mounted to a microscope slide following a biopsy of fresh frozen tissue. Landmarks for registration and evaluation are marked on the …
Asselin, Mark; Kaufmann, Martin; Wiercigroch, Julia; Ungi, Tamas; Lasso, Andras; Rudan, John; Fichtinger, Gabor
Navigated real-time molecular analysis in the operating theatre: demonstration of concept Journal Article
In: vol. 10951, pp. 618-624, 2019.
@article{fichtinger2019h,
title = {Navigated real-time molecular analysis in the operating theatre: demonstration of concept},
author = {Mark Asselin and Martin Kaufmann and Julia Wiercigroch and Tamas Ungi and Andras Lasso and John Rudan and Gabor Fichtinger},
url = {https://www.spiedigitallibrary.org/conference-proceedings-of-spie/10951/109512C/Navigated-real-time-molecular-analysis-in-the-operating-theatre/10.1117/12.2512586.short},
year = {2019},
date = {2019-01-01},
volume = {10951},
pages = {618-624},
publisher = {SPIE},
abstract = {PURPOSE
In the operating theatre surgeons are accustomed to using spatially navigated tools in conjunction with standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the dissected tissue using mass spectrometry.
METHODS
Using the 3D Slicer software package, we have integrated a commercially available neurosurgical navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife) that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated patient comprised of an MRI scan from a brain cancer patient …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
In the operating theatre surgeons are accustomed to using spatially navigated tools in conjunction with standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the dissected tissue using mass spectrometry.
METHODS
Using the 3D Slicer software package, we have integrated a commercially available neurosurgical navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife) that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated patient comprised of an MRI scan from a brain cancer patient …
Wu, Victoria; Asselin, Mark; Ungi, Tamas; Fichtinger, Gabor
Detection of Spinal Ultrasound Landmarks Using Convolutional Neural Networks Conference
33rd International Congress & Exhibition on Computer Assisted Radiology and Surgery (CARS), vol. 14, Int J CARS, Rennes, France, 2019.
@conference{Wu2019b,
title = {Detection of Spinal Ultrasound Landmarks Using Convolutional Neural Networks},
author = {Victoria Wu and Mark Asselin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wu2019b.pdf},
doi = {https://doi.org/10.1007/s11548-019-01969-3},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {33rd International Congress & Exhibition on Computer Assisted Radiology and Surgery (CARS)},
volume = {14},
publisher = {Int J CARS},
address = {Rennes, France},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lund, Shaun; Vaughan, Thomas; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Yeo, Caitlin; Engel, C. Jay; Fichtinger, Gabor
Controlling virtual views in navigated breast conserving surgery Conference
SPIE Medical Imaging 2019, 2019.
@conference{Lund2019a,
title = {Controlling virtual views in navigated breast conserving surgery},
author = {Shaun Lund and Thomas Vaughan and Tamas Ungi and Andras Lasso and Mark Asselin and Caitlin Yeo and C. Jay Engel and Gabor Fichtinger},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019},
abstract = {<p><strong>PURPOSE</strong>: Lumpectomy is the resection of a tumor in the breast while retaining as much healthy tissue as possible.<br />
Navigated lumpectomy seeks to improve on the traditional technique by employing computer guidance to achieve the<br />
complete excision of the cancer with optimal retention of healthy tissue. Setting up navigation in the OR relies on the<br />
manual interactions of a trained technician to align three-dimensional virtual views to the patient’s physical position<br />
and maintain their alignment throughout surgery. This work develops automatic alignment tools to improve the<br />
operability of navigation software for lumpectomies.<br />
<strong>METHODS</strong>: Preset view buttons were developed to refine view setup to a single interaction. These buttons were<br />
tested by measuring the reduction in setup time and the number of manual interactions avoided through their use. An<br />
auto-center feature was created to ensure that three-dimensional models of anatomy and instruments were in the center<br />
of view throughout surgery. Recorded data from 32 lumpectomy cases were replayed and the number of auto-center<br />
view shifts was counted from the first cautery incision until the completion of the excision of cancerous tissue.<br />
<strong>RESULTS</strong>: View setup can now be performed in a single interaction compared to an average of 13 interactions<br />
(taking 83 seconds) when performed manually. The auto-center feature was activated an average of 33 times in the<br /> cases studied (n=32).<br />
<strong>CONCLUSION</strong>: The auto-center feature enhances the operability of the surgical navigation system, reducing the<br />
number of manual interactions required by a technician during the surgery. This feature along with preset camera view<br />
options are instrumental in the shift towards a completely surgeon-operable navigated lumpectomy system.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Navigated lumpectomy seeks to improve on the traditional technique by employing computer guidance to achieve the<br />
complete excision of the cancer with optimal retention of healthy tissue. Setting up navigation in the OR relies on the<br />
manual interactions of a trained technician to align three-dimensional virtual views to the patient’s physical position<br />
and maintain their alignment throughout surgery. This work develops automatic alignment tools to improve the<br />
operability of navigation software for lumpectomies.<br />
<strong>METHODS</strong>: Preset view buttons were developed to refine view setup to a single interaction. These buttons were<br />
tested by measuring the reduction in setup time and the number of manual interactions avoided through their use. An<br />
auto-center feature was created to ensure that three-dimensional models of anatomy and instruments were in the center<br />
of view throughout surgery. Recorded data from 32 lumpectomy cases were replayed and the number of auto-center<br />
view shifts was counted from the first cautery incision until the completion of the excision of cancerous tissue.<br />
<strong>RESULTS</strong>: View setup can now be performed in a single interaction compared to an average of 13 interactions<br />
(taking 83 seconds) when performed manually. The auto-center feature was activated an average of 33 times in the<br /> cases studied (n=32).<br />
<strong>CONCLUSION</strong>: The auto-center feature enhances the operability of the surgical navigation system, reducing the<br />
number of manual interactions required by a technician during the surgery. This feature along with preset camera view<br />
options are instrumental in the shift towards a completely surgeon-operable navigated lumpectomy system.</p>
Asselin, Mark; Kaufmann, Martin; Wiercigroch, Julia; Ungi, Tamas; Lasso, Andras; Rudan, John; Fichtinger, Gabor
Navigated real-time molecular analysis in the operating theatre, demonstration of concept Conference
SPIE Medical Imaging 2019, 2019.
@conference{Asselin2019a,
title = {Navigated real-time molecular analysis in the operating theatre, demonstration of concept},
author = {Mark Asselin and Martin Kaufmann and Julia Wiercigroch and Tamas Ungi and Andras Lasso and John Rudan and Gabor Fichtinger},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019},
abstract = {<p><strong>PURPOSE</strong>: In the operating theatre surgeons are accustomed to using spatially navigated tools in conjunction with<br />
standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but<br />
doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated<br />
system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the<br />
dissected tissue using mass spectrometry.<br />
<strong>METHODS</strong>: Using the 3D Slicer software package, we have integrated a commercially available neurosurgical<br />
navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife)<br />
that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated<br />
patient comprised of an MRI scan from a brain cancer patient deformably registered to a plastic skull model. On the<br />
skull model we placed porcine and bovine tissues to simulate cancerous and healthy tissue, respectively. We built a<br />
PCA/LDA model to distinguish between these tissue types. The tissue classifications were displayed in a spatially<br />
localized manner in the pre-operative imaging, in both 2D and 3D views.<br />
<strong>RESULTS</strong>: We have demonstrated the feasibility of performing spatially navigated intra-operative analysis of tissues by<br />
mass spectrometry. We show that machine learning can classify our sample tissues, with an average computed<br />
confidence of 99.37 % for porcine tissue and 99.36% for bovine tissue.<br />
<strong>CONCLUSION</strong>: In this paper we demonstrate a proof of concept system for navigated intra-operative molecular<br />
analysis. This system may enable intra-operative awareness of spatially localized tissue classification during dissection,<br />
information that is especially useful in tumor surgeries where margins may not be visible to the unassisted eye.<br />
<strong>Keywords</strong>: image guided therapy, intra-operative mass spectrometry, iKnife, 3D Slicer, open-source, rapid evaporative<br />
ionization mass spectrometry (REIMS)</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but<br />
doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated<br />
system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the<br />
dissected tissue using mass spectrometry.<br />
<strong>METHODS</strong>: Using the 3D Slicer software package, we have integrated a commercially available neurosurgical<br />
navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife)<br />
that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated<br />
patient comprised of an MRI scan from a brain cancer patient deformably registered to a plastic skull model. On the<br />
skull model we placed porcine and bovine tissues to simulate cancerous and healthy tissue, respectively. We built a<br />
PCA/LDA model to distinguish between these tissue types. The tissue classifications were displayed in a spatially<br />
localized manner in the pre-operative imaging, in both 2D and 3D views.<br />
<strong>RESULTS</strong>: We have demonstrated the feasibility of performing spatially navigated intra-operative analysis of tissues by<br />
mass spectrometry. We show that machine learning can classify our sample tissues, with an average computed<br />
confidence of 99.37 % for porcine tissue and 99.36% for bovine tissue.<br />
<strong>CONCLUSION</strong>: In this paper we demonstrate a proof of concept system for navigated intra-operative molecular<br />
analysis. This system may enable intra-operative awareness of spatially localized tissue classification during dissection,<br />
information that is especially useful in tumor surgeries where margins may not be visible to the unassisted eye.<br />
<strong>Keywords</strong>: image guided therapy, intra-operative mass spectrometry, iKnife, 3D Slicer, open-source, rapid evaporative<br />
ionization mass spectrometry (REIMS)</p>
Perrin, Sydney; Baum, Zachary M C; Asselin, Mark; Underwood, Grace; Choueib, Saleh; Lia, H.; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
Reproducibility of freehand calibrations for ultrasound-guided needle navigation Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Perrin2019a,
title = {Reproducibility of freehand calibrations for ultrasound-guided needle navigation},
author = {Sydney Perrin and Zachary M C Baum and Mark Asselin and Grace Underwood and Saleh Choueib and H. Lia and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Perrin2019a.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Asselin, Mark; Lasso, Andras; Ungi, Tamas; Fichtinger, Gabor
Towards webcam-based tracking for interventional navigation Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, Houston, Texas, 2018.
@conference{Asselin2018a,
title = {Towards webcam-based tracking for interventional navigation},
author = {Mark Asselin and Andras Lasso and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Asselin2018a.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
address = {Houston, Texas},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lasso, Andras; Nam, Hannah H; Dinh, Patrick V; Pinter, Csaba; Fillion-Robin, Jean-Christophe; Pieper, Steve; Jhaveri, Sankhesh; Vimort, Jean-Baptiste; Martin, Ken; Asselin, Mark; McGowan, Francis X; Kikinis, Ron; Fichtinger, Gabor; Jolley, Matthew A
Interaction with volume-rendered three-dimensional echocardiographic images in virtual reality Journal Article
In: Journal of the American Society of Echocardiography, vol. 31, iss. 10, pp. 1158-1160, 2018.
@article{fichtinger2018f,
title = {Interaction with volume-rendered three-dimensional echocardiographic images in virtual reality},
author = {Andras Lasso and Hannah H Nam and Patrick V Dinh and Csaba Pinter and Jean-Christophe Fillion-Robin and Steve Pieper and Sankhesh Jhaveri and Jean-Baptiste Vimort and Ken Martin and Mark Asselin and Francis X McGowan and Ron Kikinis and Gabor Fichtinger and Matthew A Jolley},
url = {https://www.onlinejase.com/article/S0894-7317(18)30343-2/abstract},
year = {2018},
date = {2018-01-01},
journal = {Journal of the American Society of Echocardiography},
volume = {31},
issue = {10},
pages = {1158-1160},
publisher = {Elsevier},
abstract = {Three-dimensional (3D) imaging is increasingly important in echocardiography. However, viewing of 3D images on a flat, two-dimensional screen is a barrier to comprehension of latent information. There have been previous attempts to visualize the full 3D nature of the data, but they have not been widely adopted. For example, 3D printing offers realistic interaction but is time consuming, has limited means for the observer to move into or through the model, and is not yet practical for routine clinical use. Furthermore, the heart beats, and 3D printed models are static. Stereoscopic viewing on 2D screens (as at a movie theater) is possible but is expensive, may not provide an immersive experience, and does not have integrated 3D input devices (controllers).
Stereoscopic virtual reality (VR) is developing rapidly but is being driven by the video gaming industry, with features not directly applicable to the visualization of …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Stereoscopic virtual reality (VR) is developing rapidly but is being driven by the video gaming industry, with features not directly applicable to the visualization of …
Asselin, Mark; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
A training tool for ultrasound-guided central line insertion with webcam-based position tracking Journal Article
In: pp. 12-20, 2018.
@article{fichtinger2018n,
title = {A training tool for ultrasound-guided central line insertion with webcam-based position tracking},
author = {Mark Asselin and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://link.springer.com/chapter/10.1007/978-3-030-01045-4_2},
year = {2018},
date = {2018-01-01},
pages = {12-20},
publisher = {Springer International Publishing},
abstract = {PURPOSE: This paper describes an open-source ultrasound-guided central line insertion training system. Modern clinical guidelines are increasingly recommending ultrasound guidance for this procedure due to the decrease in morbidity it provides. However, there are no adequate low-cost systems for helping new clinicians train their inter-hand coordination for this demanding procedure. METHODS: This paper details a training platform which can be recreated with any standard ultrasound machine using inexpensive components. We describe the hardware, software, and calibration procedures with the intention that a reader can recreate this system themselves. RESULTS: The reproducibility and accuracy of the ultrasound calibration for this system was examined. We found that across the ultrasound image the calibration error was less than 2 mm. In a small feasibility study, two participants performed 5 …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Lasso, Andras; Nam, HannahH; Dinh, Patrick V.; Pinter, Csaba; Fillion-Robin, Jean-ChristopheC.; Pieper, Steve; Jhaveri, Sankhesh; Vimort, Jean-Baptiste; Martin, Ken; Asselin, Mark; McGowan, FrancisX; Kikinis, Ron; Fichtinger, Gabor; Jolley, MatthewA
Interaction with Volume-Rendered Three-Dimensional Echocardiographic Images in Virtual Reality Journal Article
In: J Am Soc Echocardiogr, vol. 31, no. 10, pp. 1158-60–, 2018.
@article{Lasso2018,
title = {Interaction with Volume-Rendered Three-Dimensional Echocardiographic Images in Virtual Reality},
author = {Andras Lasso and HannahH Nam and Patrick V. Dinh and Csaba Pinter and Jean-ChristopheC. Fillion-Robin and Steve Pieper and Sankhesh Jhaveri and Jean-Baptiste Vimort and Ken Martin and Mark Asselin and FrancisX McGowan and Ron Kikinis and Gabor Fichtinger and MatthewA Jolley},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
journal = {J Am Soc Echocardiogr},
volume = {31},
number = {10},
pages = {1158-60–},
abstract = {<p>Three-dimensional (3D) imaging is increasingly important in echocardiography. However, viewing of 3D images on a flat, two-dimensional screen is a barrier to comprehension of latent information. There have been previous attempts to visualize the full 3D nature of the data, but they have not been widely adopted. For example, 3D printing offers realistic interaction but is time consuming, has limited means for the observer to move into or through the model, and is not yet practical for routine clinical use. Furthermore, the heart beats, and 3D printed models are static. Stereoscopic viewing on 2D screens (as at a movie theater) is possible but is expensive, may not provide an immersive experience, and does not have integrated 3D input devices (controllers).</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Asselin, Mark; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
A Training Tool for Ultrasound-Guided Central Line Insertion with Webcam-Based Position Tracking Conference
Simulation, Image Processing, and Ultrasound Systems for Assisted Diagnosis and Navigation, 2018.
@conference{Asselin2018c,
title = {A Training Tool for Ultrasound-Guided Central Line Insertion with Webcam-Based Position Tracking},
author = {Mark Asselin and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {http://dx.doi.org/10.1007/978-3-030-01045-4_2
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Asselin2018c.pdf},
doi = {10.1007/978-3-030-01045-4_2},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Simulation, Image Processing, and Ultrasound Systems for Assisted Diagnosis and Navigation},
abstract = {<p>PURPOSE: This paper describes an open-source ultrasound-guided central line insertion training system. Modern clinical guidelines are increasingly recommending ultrasound guidance for this procedure due to the decrease in morbidity it provides. However, there are no adequate low-cost systems for helping new clinicians train their inter-hand coordination for this demanding procedure. METHODS: This paper details a training platform which can be recreated with any standard ultrasound machine using inexpensive components. We describe the hardware, software, and calibration procedures with the intention that a reader can recreate this system themselves. RESULTS: The reproducibility and accuracy of the ultrasound calibration for this system was examined. We found that across the ultrasound image the calibration error was less than 2 mm. In a small feasibility study, two participants performed 5 needle insertions each with an average of slightly above 2 mm error. CONCLUSION: We conclude that the accuracy of the system is sufficient for clinician training.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}