Mark Asselin
Mark is a masters student in biomedical computing with previous experience in electrical engineering. Please see his website at https://markasselin.github.io/ for more information about him.
Pinter, Csaba; Lasso, Andras; Choueib, Saleh; Asselin, Mark; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Jolley, MatthewA; Fichtinger, Gabor
SlicerVR for Medical Intervention Training and Planning in Immersive Virtual Reality Journal Article
In: IEEE Transactions on Medical Robotics and Bionics, vol. 2, no. 2, pp. 108-117, 2020.
@article{Pinter2020,
title = {SlicerVR for Medical Intervention Training and Planning in Immersive Virtual Reality},
author = {Csaba Pinter and Andras Lasso and Saleh Choueib and Mark Asselin and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and MatthewA Jolley and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/03/Pinter2020a_0.pdf},
doi = {10.1109/TMRB.2020.2983199},
year = {2020},
date = {2020-03-01},
urldate = {2020-03-01},
journal = {IEEE Transactions on Medical Robotics and Bionics},
volume = {2},
number = {2},
pages = {108-117},
abstract = {<p>Virtual reality (VR) provides immersive visualization that has proved to be useful in a variety of medical applications. Currently, however, no free open-source software platform exists that would provide comprehensive support for translational clinical researchers in prototyping experimental VR scenarios in training, planning or guiding medical interventions. By integrating VR functions in 3D Slicer, an established medical image analysis and visualization platform, SlicerVR enables virtual reality experience by a single click. It provides functions to navigate and manipulate the virtual scene, as well as various settings to abate the feeling of motion sickness. SlicerVR allows for shared collaborative VR experience both locally and remotely. We present illustrative scenarios created with SlicerVR in a wide spectrum of applications, including echocardiography, neurosurgery, spine surgery, brachytherapy, intervention training and personalized patient education. SlicerVR is freely available under BSD type license as an extension to 3D Slicer and it has been downloaded over 7,800 times at the time of writing this article.</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Connolly, Laura; Jamzad, Amoon; Kaufmann, Martin; Rubino, Rachel; Sedghi, Alireza; Ungi, Tamas; Asselin, Mark; Yam, Scott; Rudan, John; Nicol, Christopher; Fichtinger, Gabor; Mousavi, Parvin
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{Connolly2020a,
title = {Classification of tumor signatures from electrosurgical vapors using mass spectrometry and machine learning: a feasibility study},
author = {Laura Connolly and Amoon Jamzad and Martin Kaufmann and Rachel Rubino and Alireza Sedghi and Tamas Ungi and Mark Asselin and Scott Yam and John Rudan and Christopher Nicol and Gabor Fichtinger and Parvin Mousavi},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2020a.pdf},
doi = {https://doi.org/10.1117/12.2549343},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Barr, Colton; Lasso, Andras; Asselin, Mark; Pieper, Steve; Robertson, Faith C.; Gormley, William B.; Fichtinger, Gabor
Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling, vol. 11315, SPIE SPIE, Houston, Texas, United States, 2020.
@conference{BarrC2020,
title = {Towards portable image guidance and automatic patient registration using an RGB-D camera and video projector},
author = {Colton Barr and Andras Lasso and Mark Asselin and Steve Pieper and Faith C. Robertson and William B. Gormley and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Barr2020.pdf},
doi = {10.1117/12.2549723},
year = {2020},
date = {2020-01-01},
urldate = {2020-01-01},
booktitle = {Medical Imaging 2020: Image-Guided Procedures, Robotic Interventions and Modeling},
volume = {11315},
publisher = {SPIE},
address = {Houston, Texas, United States},
organization = {SPIE},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Pinter, Csaba; Lasso, Andras; Asselin, Mark; Fillion-Robin, Jean-ChristopheC.; Vimort, Jean-Baptiste; Martin, Ken; Fichtinger, Gabor
SlicerVR for image-guided therapy planning in immersive virtual reality Conference
The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK, London, UK, 2019.
@conference{Pinter2019a,
title = {SlicerVR for image-guided therapy planning in immersive virtual reality},
author = {Csaba Pinter and Andras Lasso and Mark Asselin and Jean-ChristopheC. Fillion-Robin and Jean-Baptiste Vimort and Ken Martin and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Pinter2019a_0.pdf},
year = {2019},
date = {2019-06-01},
urldate = {2019-06-01},
booktitle = {The 12th Hamlyn Symposium on Medical Robotics, 23-26 June 2019, Imperial College, London, UK},
pages = {91-92},
address = {London, UK},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Holden, M.; Tan, Pearl; Hookey, Lawrence; Fichtinger, Gabor
Analyzing the curvature of the colon in different patient positions Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Laframboise2019a,
title = {Analyzing the curvature of the colon in different patient positions},
author = {Jacob Laframboise and Tamas Ungi and Andras Lasso and Mark Asselin and M. Holden and Pearl Tan and Lawrence Hookey and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Laframboise2019a.pdf},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Connolly, Laura; Ungi, Tamas; Lasso, Andras; Vaughan, Thomas; Asselin, Mark; Mousavi, Parvin; Yam, Scott; Fichtinger, Gabor
Mechanically-Controlled Spectroscopic Imaging for Tissue Classification Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Connolly2019a,
title = {Mechanically-Controlled Spectroscopic Imaging for Tissue Classification},
author = {Laura Connolly and Tamas Ungi and Andras Lasso and Thomas Vaughan and Mark Asselin and Parvin Mousavi and Scott Yam and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Connolly2019a_3.pdf},
doi = {https://doi.org/10.1117/12.2512481},
year = {2019},
date = {2019-03-01},
urldate = {2019-03-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Perrin, Sydney; Baum, Zachary M C; Asselin, Mark; Underwood, Grace; Choueib, Saleh; Lia, H.; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
Reproducibility of freehand calibrations for ultrasound-guided needle navigation Conference
SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling, vol. 10951, San Diego, California, 2019.
@conference{Perrin2019a,
title = {Reproducibility of freehand calibrations for ultrasound-guided needle navigation},
author = {Sydney Perrin and Zachary M C Baum and Mark Asselin and Grace Underwood and Saleh Choueib and H. Lia and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Perrin2019a.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019: Image-Guided Procedures, Robotic Interventions, and Modeling},
volume = {10951},
address = {San Diego, California},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Asselin, Mark; Kaufmann, Martin; Wiercigroch, Julia; Ungi, Tamas; Lasso, Andras; Rudan, John; Fichtinger, Gabor
Navigated real-time molecular analysis in the operating theatre, demonstration of concept Conference
SPIE Medical Imaging 2019, 2019.
@conference{Asselin2019a,
title = {Navigated real-time molecular analysis in the operating theatre, demonstration of concept},
author = {Mark Asselin and Martin Kaufmann and Julia Wiercigroch and Tamas Ungi and Andras Lasso and John Rudan and Gabor Fichtinger},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019},
abstract = {<p><strong>PURPOSE</strong>: In the operating theatre surgeons are accustomed to using spatially navigated tools in conjunction with<br />
standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but<br />
doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated<br />
system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the<br />
dissected tissue using mass spectrometry.<br />
<strong>METHODS</strong>: Using the 3D Slicer software package, we have integrated a commercially available neurosurgical<br />
navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife)<br />
that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated<br />
patient comprised of an MRI scan from a brain cancer patient deformably registered to a plastic skull model. On the<br />
skull model we placed porcine and bovine tissues to simulate cancerous and healthy tissue, respectively. We built a<br />
PCA/LDA model to distinguish between these tissue types. The tissue classifications were displayed in a spatially<br />
localized manner in the pre-operative imaging, in both 2D and 3D views.<br />
<strong>RESULTS</strong>: We have demonstrated the feasibility of performing spatially navigated intra-operative analysis of tissues by<br />
mass spectrometry. We show that machine learning can classify our sample tissues, with an average computed<br />
confidence of 99.37 % for porcine tissue and 99.36% for bovine tissue.<br />
<strong>CONCLUSION</strong>: In this paper we demonstrate a proof of concept system for navigated intra-operative molecular<br />
analysis. This system may enable intra-operative awareness of spatially localized tissue classification during dissection,<br />
information that is especially useful in tumor surgeries where margins may not be visible to the unassisted eye.<br />
<strong>Keywords</strong>: image guided therapy, intra-operative mass spectrometry, iKnife, 3D Slicer, open-source, rapid evaporative<br />
ionization mass spectrometry (REIMS)</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
standard clinical imaging during a procedure. This gives them a good idea where they are in the patients’ anatomy but<br />
doesn’t provide information about the type of tissue they are dissecting. In this paper we demonstrate an integrated<br />
system consisting of a spatially navigated surgical electrocautery combined with real-time molecular analysis of the<br />
dissected tissue using mass spectrometry.<br />
<strong>METHODS</strong>: Using the 3D Slicer software package, we have integrated a commercially available neurosurgical<br />
navigation system with an intra-operative mass spectrometer (colloquially referred to as the intelligent knife, or iKnife)<br />
that analyzes the charged ions in the smoke created during cauterization. We demonstrate this system using a simulated<br />
patient comprised of an MRI scan from a brain cancer patient deformably registered to a plastic skull model. On the<br />
skull model we placed porcine and bovine tissues to simulate cancerous and healthy tissue, respectively. We built a<br />
PCA/LDA model to distinguish between these tissue types. The tissue classifications were displayed in a spatially<br />
localized manner in the pre-operative imaging, in both 2D and 3D views.<br />
<strong>RESULTS</strong>: We have demonstrated the feasibility of performing spatially navigated intra-operative analysis of tissues by<br />
mass spectrometry. We show that machine learning can classify our sample tissues, with an average computed<br />
confidence of 99.37 % for porcine tissue and 99.36% for bovine tissue.<br />
<strong>CONCLUSION</strong>: In this paper we demonstrate a proof of concept system for navigated intra-operative molecular<br />
analysis. This system may enable intra-operative awareness of spatially localized tissue classification during dissection,<br />
information that is especially useful in tumor surgeries where margins may not be visible to the unassisted eye.<br />
<strong>Keywords</strong>: image guided therapy, intra-operative mass spectrometry, iKnife, 3D Slicer, open-source, rapid evaporative<br />
ionization mass spectrometry (REIMS)</p>
Lund, Shaun; Vaughan, Thomas; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Yeo, Caitlin; Engel, C. Jay; Fichtinger, Gabor
Controlling virtual views in navigated breast conserving surgery Conference
SPIE Medical Imaging 2019, 2019.
@conference{Lund2019a,
title = {Controlling virtual views in navigated breast conserving surgery},
author = {Shaun Lund and Thomas Vaughan and Tamas Ungi and Andras Lasso and Mark Asselin and Caitlin Yeo and C. Jay Engel and Gabor Fichtinger},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {SPIE Medical Imaging 2019},
abstract = {<p><strong>PURPOSE</strong>: Lumpectomy is the resection of a tumor in the breast while retaining as much healthy tissue as possible.<br />
Navigated lumpectomy seeks to improve on the traditional technique by employing computer guidance to achieve the<br />
complete excision of the cancer with optimal retention of healthy tissue. Setting up navigation in the OR relies on the<br />
manual interactions of a trained technician to align three-dimensional virtual views to the patient’s physical position<br />
and maintain their alignment throughout surgery. This work develops automatic alignment tools to improve the<br />
operability of navigation software for lumpectomies.<br />
<strong>METHODS</strong>: Preset view buttons were developed to refine view setup to a single interaction. These buttons were<br />
tested by measuring the reduction in setup time and the number of manual interactions avoided through their use. An<br />
auto-center feature was created to ensure that three-dimensional models of anatomy and instruments were in the center<br />
of view throughout surgery. Recorded data from 32 lumpectomy cases were replayed and the number of auto-center<br />
view shifts was counted from the first cautery incision until the completion of the excision of cancerous tissue.<br />
<strong>RESULTS</strong>: View setup can now be performed in a single interaction compared to an average of 13 interactions<br />
(taking 83 seconds) when performed manually. The auto-center feature was activated an average of 33 times in the<br /> cases studied (n=32).<br />
<strong>CONCLUSION</strong>: The auto-center feature enhances the operability of the surgical navigation system, reducing the<br />
number of manual interactions required by a technician during the surgery. This feature along with preset camera view<br />
options are instrumental in the shift towards a completely surgeon-operable navigated lumpectomy system.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Navigated lumpectomy seeks to improve on the traditional technique by employing computer guidance to achieve the<br />
complete excision of the cancer with optimal retention of healthy tissue. Setting up navigation in the OR relies on the<br />
manual interactions of a trained technician to align three-dimensional virtual views to the patient’s physical position<br />
and maintain their alignment throughout surgery. This work develops automatic alignment tools to improve the<br />
operability of navigation software for lumpectomies.<br />
<strong>METHODS</strong>: Preset view buttons were developed to refine view setup to a single interaction. These buttons were<br />
tested by measuring the reduction in setup time and the number of manual interactions avoided through their use. An<br />
auto-center feature was created to ensure that three-dimensional models of anatomy and instruments were in the center<br />
of view throughout surgery. Recorded data from 32 lumpectomy cases were replayed and the number of auto-center<br />
view shifts was counted from the first cautery incision until the completion of the excision of cancerous tissue.<br />
<strong>RESULTS</strong>: View setup can now be performed in a single interaction compared to an average of 13 interactions<br />
(taking 83 seconds) when performed manually. The auto-center feature was activated an average of 33 times in the<br /> cases studied (n=32).<br />
<strong>CONCLUSION</strong>: The auto-center feature enhances the operability of the surgical navigation system, reducing the<br />
number of manual interactions required by a technician during the surgery. This feature along with preset camera view<br />
options are instrumental in the shift towards a completely surgeon-operable navigated lumpectomy system.</p>
Wu, Victoria; Asselin, Mark; Ungi, Tamas; Fichtinger, Gabor
Detection of Spinal Ultrasound Landmarks Using Convolutional Neural Networks Conference
33rd International Congress & Exhibition on Computer Assisted Radiology and Surgery (CARS), vol. 14, Int J CARS, Rennes, France, 2019.
@conference{Wu2019b,
title = {Detection of Spinal Ultrasound Landmarks Using Convolutional Neural Networks},
author = {Victoria Wu and Mark Asselin and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Wu2019b.pdf},
doi = {https://doi.org/10.1007/s11548-019-01969-3},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {33rd International Congress & Exhibition on Computer Assisted Radiology and Surgery (CARS)},
volume = {14},
publisher = {Int J CARS},
address = {Rennes, France},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Laframboise, Jacob; Ungi, Tamas; Lasso, Andras; Asselin, Mark; Holden, M.; Tan, Pearl; Hookey, Lawrence; Fichtinger, Gabor
Quantifying the effect of patient position on the curvature of colons Conference
17th Annual Imaging Network Ontario Symposium (ImNO), Imaging Network Ontario (ImNO), London, Ontario, 2019.
@conference{Laframboise2019b,
title = {Quantifying the effect of patient position on the curvature of colons},
author = {Jacob Laframboise and Tamas Ungi and Andras Lasso and Mark Asselin and M. Holden and Pearl Tan and Lawrence Hookey and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Laframboise2019b.pdf},
year = {2019},
date = {2019-01-01},
urldate = {2019-01-01},
booktitle = {17th Annual Imaging Network Ontario Symposium (ImNO)},
publisher = {Imaging Network Ontario (ImNO)},
address = {London, Ontario},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Asselin, Mark; Lasso, Andras; Ungi, Tamas; Fichtinger, Gabor
Towards webcam-based tracking for interventional navigation Conference
SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling, Houston, Texas, 2018.
@conference{Asselin2018a,
title = {Towards webcam-based tracking for interventional navigation},
author = {Mark Asselin and Andras Lasso and Tamas Ungi and Gabor Fichtinger},
url = {https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Asselin2018a.pdf},
year = {2018},
date = {2018-03-01},
urldate = {2018-03-01},
booktitle = {SPIE Medical Imaging 2018: Image-Guided Procedures, Robotic Interventions, and Modeling},
address = {Houston, Texas},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Asselin, Mark; Ungi, Tamas; Lasso, Andras; Fichtinger, Gabor
A Training Tool for Ultrasound-Guided Central Line Insertion with Webcam-Based Position Tracking Conference
Simulation, Image Processing, and Ultrasound Systems for Assisted Diagnosis and Navigation, 2018.
@conference{Asselin2018c,
title = {A Training Tool for Ultrasound-Guided Central Line Insertion with Webcam-Based Position Tracking},
author = {Mark Asselin and Tamas Ungi and Andras Lasso and Gabor Fichtinger},
url = {http://dx.doi.org/10.1007/978-3-030-01045-4_2
https://labs.cs.queensu.ca/perklab/wp-content/uploads/sites/3/2024/02/Asselin2018c.pdf},
doi = {10.1007/978-3-030-01045-4_2},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
booktitle = {Simulation, Image Processing, and Ultrasound Systems for Assisted Diagnosis and Navigation},
abstract = {<p>PURPOSE: This paper describes an open-source ultrasound-guided central line insertion training system. Modern clinical guidelines are increasingly recommending ultrasound guidance for this procedure due to the decrease in morbidity it provides. However, there are no adequate low-cost systems for helping new clinicians train their inter-hand coordination for this demanding procedure. METHODS: This paper details a training platform which can be recreated with any standard ultrasound machine using inexpensive components. We describe the hardware, software, and calibration procedures with the intention that a reader can recreate this system themselves. RESULTS: The reproducibility and accuracy of the ultrasound calibration for this system was examined. We found that across the ultrasound image the calibration error was less than 2 mm. In a small feasibility study, two participants performed 5 needle insertions each with an average of slightly above 2 mm error. CONCLUSION: We conclude that the accuracy of the system is sufficient for clinician training.</p>},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Lasso, Andras; Nam, HannahH; Dinh, Patrick V.; Pinter, Csaba; Fillion-Robin, Jean-ChristopheC.; Pieper, Steve; Jhaveri, Sankhesh; Vimort, Jean-Baptiste; Martin, Ken; Asselin, Mark; McGowan, FrancisX; Kikinis, Ron; Fichtinger, Gabor; Jolley, MatthewA
Interaction with Volume-Rendered Three-Dimensional Echocardiographic Images in Virtual Reality Journal Article
In: J Am Soc Echocardiogr, vol. 31, no. 10, pp. 1158-60–, 2018.
@article{Lasso2018,
title = {Interaction with Volume-Rendered Three-Dimensional Echocardiographic Images in Virtual Reality},
author = {Andras Lasso and HannahH Nam and Patrick V. Dinh and Csaba Pinter and Jean-ChristopheC. Fillion-Robin and Steve Pieper and Sankhesh Jhaveri and Jean-Baptiste Vimort and Ken Martin and Mark Asselin and FrancisX McGowan and Ron Kikinis and Gabor Fichtinger and MatthewA Jolley},
year = {2018},
date = {2018-01-01},
urldate = {2018-01-01},
journal = {J Am Soc Echocardiogr},
volume = {31},
number = {10},
pages = {1158-60–},
abstract = {<p>Three-dimensional (3D) imaging is increasingly important in echocardiography. However, viewing of 3D images on a flat, two-dimensional screen is a barrier to comprehension of latent information. There have been previous attempts to visualize the full 3D nature of the data, but they have not been widely adopted. For example, 3D printing offers realistic interaction but is time consuming, has limited means for the observer to move into or through the model, and is not yet practical for routine clinical use. Furthermore, the heart beats, and 3D printed models are static. Stereoscopic viewing on 2D screens (as at a movie theater) is possible but is expensive, may not provide an immersive experience, and does not have integrated 3D input devices (controllers).</p>},
keywords = {},
pubstate = {published},
tppubtype = {article}
}