Biography
My research interests are in Computer-aided diagnosis and interventions. These include: Machine learning techniques for in silico inference and prediction Analysis of ultrasound images and signals for enhancement of cancer detection Image-aided, computer-assisted diagnosis of disease Ultrasound-guided interventions Knowledge discovery from high throughput biological data Quantitative modeling and reverse engineering of gene regulatory networks Analysis, segmentation and classification of fluorescence microscopy images Chromosome and cell imaging.
Affiliations
- Professor, School of Computing
- Member, College of New Scholars, Artists and Scientists, Royal Society of Canada
- c/a Department of Electrical and Computer Engineering
- c/a Human Mobility Research Centre, Kingston General Hospital
Admissions
The Med-I Lab is always looking for the brightest students to join us. We continually have openings for high-school, undergraduate, graduate, and postdoctoral fellowship positions.
Publications
2024
Fooladgar, Fahimeh; To, Minh Nguyen Nhat; Mousavi, Parvin; Abolmaesumi, Purang
Manifold DivideMix: A semi-supervised contrastive learning framework for severe label noise Proceedings Article
In: pp. 4012-4021, 2024.
@inproceedings{fooladgar2024,
title = {Manifold DivideMix: A semi-supervised contrastive learning framework for severe label noise},
author = {Fahimeh Fooladgar and Minh Nguyen Nhat To and Parvin Mousavi and Purang Abolmaesumi},
year = {2024},
date = {2024-01-01},
pages = {4012-4021},
abstract = {Deep neural networks have proven to be highly effective when large amounts of data with clean labels are available. However their performance degrades when training data contains noisy labels leading to poor generalization on the test set. Real-world datasets contain noisy label samples that either have similar visual semantics to other classes (in-distribution) or have no semantic relevance to any class (out-of-distribution) in the dataset. Most state-of-the-art methods leverage ID labeled noisy samples as unlabeled data for semi-supervised learning but OOD labeled noisy samples cannot be used in this way because they do not belong to any class within the dataset. Hence in this paper we propose incorporating the information from all the training data by leveraging the benefits of self-supervised training. Our method aims to extract a meaningful and generalizable embedding space for each sample regardless of its label. Then we employ a simple yet effective K-nearest neighbor method to remove portions of out-of-distribution samples. By discarding these samples we propose an iterative" Manifold DivideMix" algorithm to find clean and noisy samples and train our model in a semi-supervised way. In addition we propose" MixEMatch" a new algorithm for the semi-supervised step that involves mixup augmentation at the input and final hidden representations of the model. This will extract better representations by interpolating both in the input and manifold spaces. Extensive experiments on multiple synthetic-noise image benchmarks and real-world web-crawled datasets demonstrate the effectiveness of our proposed framework. Code is …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Mannina, Sophia; Addas, Shamel; Abolmaesumi, Purang; Mousavi, Parvin; Maghsoodi, Nooshin; Nassar, Sarah; Maslove, David
Digital Nudges in Healthcare Contexts: An Information Systems Perspective Journal Article
In: 2024.
@article{mannina2024b,
title = {Digital Nudges in Healthcare Contexts: An Information Systems Perspective},
author = {Sophia Mannina and Shamel Addas and Purang Abolmaesumi and Parvin Mousavi and Nooshin Maghsoodi and Sarah Nassar and David Maslove},
year = {2024},
date = {2024-01-01},
abstract = {Digital transformation has presented healthcare providers with new tools, roles, and challenges related to patient care. Although digital technologies like electronic health records can offer valuable information concerning patients' needs, the growing volume of data that healthcare providers receive through these tools can contribute to information overload and alert fatigue. Nudging is a behavioural economics technique that can be applied to guide healthcare providers toward optimal care decisions while limiting information overload. To better understand the application of this technique, we perform a systematic literature review that explores digital nudges oriented toward healthcare providers from an information systems perspective. This review identifies positive and negative outcomes of digital nudges and presents design principles that can guide development of nudges directed toward healthcare providers. Opportunities are discussed to further assess digital nudges through the information systems lens.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Harmanani, Mohamed; Wilson, Paul FR; Fooladgar, Fahimeh; Jamzad, Amoon; Gilany, Mahdi; To, Minh Nguyen Nhat; Wodlinger, Brian; Abolmaesumi, Purang; Mousavi, Parvin
Benchmarking image transformers for prostate cancer detection from ultrasound data Proceedings Article
In: pp. 245-251, SPIE, 2024.
@inproceedings{harmanani2024b,
title = {Benchmarking image transformers for prostate cancer detection from ultrasound data},
author = {Mohamed Harmanani and Paul FR Wilson and Fahimeh Fooladgar and Amoon Jamzad and Mahdi Gilany and Minh Nguyen Nhat To and Brian Wodlinger and Purang Abolmaesumi and Parvin Mousavi},
year = {2024},
date = {2024-01-01},
volume = {12928},
pages = {245-251},
publisher = {SPIE},
abstract = {PURPOSE
Deep learning methods for classifying prostate cancer (PCa) in ultrasound images typically employ convolutional neural networks (CNN) to detect cancer in small regions of interest (ROI) along a needle trace region. However, this approach suffers from weak labelling, since the ground-truth histopathology labels do not describe the properties of individual ROIs. Recently, multi-scale approaches have sought to mitigate this issue by combining the context awareness of transformers with a convolutional feature extractor to detect cancer from multiple ROIs using multiple-instance learning (MIL). In this work, we present a detailed study of several image transformer architectures for both ROI-scale and multi-scale classification, and a comparison of the performance of CNNs and transformers for ultrasound-based prostate cancer classification. We also design a novel multi-objective learning strategy that …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Deep learning methods for classifying prostate cancer (PCa) in ultrasound images typically employ convolutional neural networks (CNN) to detect cancer in small regions of interest (ROI) along a needle trace region. However, this approach suffers from weak labelling, since the ground-truth histopathology labels do not describe the properties of individual ROIs. Recently, multi-scale approaches have sought to mitigate this issue by combining the context awareness of transformers with a convolutional feature extractor to detect cancer from multiple ROIs using multiple-instance learning (MIL). In this work, we present a detailed study of several image transformer architectures for both ROI-scale and multi-scale classification, and a comparison of the performance of CNNs and transformers for ultrasound-based prostate cancer classification. We also design a novel multi-objective learning strategy that …
To, Minh Nguyen Nhat; Fooladgar, Fahimeh; Wilson, Paul; Harmanani, Mohamed; Gilany, Mahdi; Sojoudi, Samira; Jamzad, Amoon; Chang, Silvia; Black, Peter; Mousavi, Parvin; Abolmaesumi, Purang
LensePro: Label noise-tolerant prototype-based network for improving cancer detection in prostate ultrasound with limited annotations Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, pp. 1-8, 2024.
@article{to2024b,
title = {LensePro: Label noise-tolerant prototype-based network for improving cancer detection in prostate ultrasound with limited annotations},
author = {Minh Nguyen Nhat To and Fahimeh Fooladgar and Paul Wilson and Mohamed Harmanani and Mahdi Gilany and Samira Sojoudi and Amoon Jamzad and Silvia Chang and Peter Black and Parvin Mousavi and Purang Abolmaesumi},
year = {2024},
date = {2024-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
pages = {1-8},
publisher = {Springer International Publishing},
abstract = {Purpose
The standard of care for prostate cancer (PCa) diagnosis is the histopathological analysis of tissue samples obtained via transrectal ultrasound (TRUS) guided biopsy. Models built with deep neural networks (DNNs) hold the potential for direct PCa detection from TRUS, which allows targeted biopsy and subsequently enhances outcomes. Yet, there are ongoing challenges with training robust models, stemming from issues such as noisy labels, out-of-distribution (OOD) data, and limited labeled data.
Methods
This study presents LensePro, a unified method that not only excels in label efficiency but also demonstrates robustness against label noise and OOD data. LensePro comprises two key stages: first, self-supervised learning to extract high-quality feature representations from abundant unlabeled TRUS data and, second, label noise-tolerant prototype-based learning to classify the extracted features …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
The standard of care for prostate cancer (PCa) diagnosis is the histopathological analysis of tissue samples obtained via transrectal ultrasound (TRUS) guided biopsy. Models built with deep neural networks (DNNs) hold the potential for direct PCa detection from TRUS, which allows targeted biopsy and subsequently enhances outcomes. Yet, there are ongoing challenges with training robust models, stemming from issues such as noisy labels, out-of-distribution (OOD) data, and limited labeled data.
Methods
This study presents LensePro, a unified method that not only excels in label efficiency but also demonstrates robustness against label noise and OOD data. LensePro comprises two key stages: first, self-supervised learning to extract high-quality feature representations from abundant unlabeled TRUS data and, second, label noise-tolerant prototype-based learning to classify the extracted features …
Connolly, Laura; Fooladgar, Fahimeh; Jamzad, Amoon; Kaufmann, Martin; Syeda, Ayesha; Ren, Kevin; Abolmaesumi, Purang; Rudan, John F; McKay, Doug; Fichtinger, Gabor; Mousavi, Parvin
ImSpect: Image-driven self-supervised learning for surgical margin evaluation with mass spectrometry Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, pp. 1-8, 2024.
@article{connolly2024b,
title = {ImSpect: Image-driven self-supervised learning for surgical margin evaluation with mass spectrometry},
author = {Laura Connolly and Fahimeh Fooladgar and Amoon Jamzad and Martin Kaufmann and Ayesha Syeda and Kevin Ren and Purang Abolmaesumi and John F Rudan and Doug McKay and Gabor Fichtinger and Parvin Mousavi},
year = {2024},
date = {2024-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
pages = {1-8},
publisher = {Springer International Publishing},
abstract = {Purpose
Real-time assessment of surgical margins is critical for favorable outcomes in cancer patients. The iKnife is a mass spectrometry device that has demonstrated potential for margin detection in cancer surgery. Previous studies have shown that using deep learning on iKnife data can facilitate real-time tissue characterization. However, none of the existing literature on the iKnife facilitate the use of publicly available, state-of-the-art pretrained networks or datasets that have been used in computer vision and other domains.
Methods
In a new framework we call ImSpect, we convert 1D iKnife data, captured during basal cell carcinoma (BCC) surgery, into 2D images in order to capitalize on state-of-the-art image classification networks. We also use self-supervision to leverage large amounts of unlabeled, intraoperative data to accommodate the data requirements of these networks.
Results
Through extensive ablation …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Real-time assessment of surgical margins is critical for favorable outcomes in cancer patients. The iKnife is a mass spectrometry device that has demonstrated potential for margin detection in cancer surgery. Previous studies have shown that using deep learning on iKnife data can facilitate real-time tissue characterization. However, none of the existing literature on the iKnife facilitate the use of publicly available, state-of-the-art pretrained networks or datasets that have been used in computer vision and other domains.
Methods
In a new framework we call ImSpect, we convert 1D iKnife data, captured during basal cell carcinoma (BCC) surgery, into 2D images in order to capitalize on state-of-the-art image classification networks. We also use self-supervision to leverage large amounts of unlabeled, intraoperative data to accommodate the data requirements of these networks.
Results
Through extensive ablation …
Yeung, Chris; Ungi, Tamas; Hu, Zoe; Jamzad, Amoon; Kaufmann, Martin; Walker, Ross; Merchant, Shaila; Engel, Cecil Jay; Jabs, Doris; Rudan, John; Mousavi, Parvin; Fichtinger, Gabor
From quantitative metrics to clinical success: assessing the utility of deep learning for tumor segmentation in breast surgery Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, pp. 1-9, 2024.
@article{yeung2024b,
title = {From quantitative metrics to clinical success: assessing the utility of deep learning for tumor segmentation in breast surgery},
author = {Chris Yeung and Tamas Ungi and Zoe Hu and Amoon Jamzad and Martin Kaufmann and Ross Walker and Shaila Merchant and Cecil Jay Engel and Doris Jabs and John Rudan and Parvin Mousavi and Gabor Fichtinger},
year = {2024},
date = {2024-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
pages = {1-9},
publisher = {Springer International Publishing},
abstract = {Purpose
Preventing positive margins is essential for ensuring favorable patient outcomes following breast-conserving surgery (BCS). Deep learning has the potential to enable this by automatically contouring the tumor and guiding resection in real time. However, evaluation of such models with respect to pathology outcomes is necessary for their successful translation into clinical practice.
Methods
Sixteen deep learning models based on established architectures in the literature are trained on 7318 ultrasound images from 33 patients. Models are ranked by an expert based on their contours generated from images in our test set. Generated contours from each model are also analyzed using recorded cautery trajectories of five navigated BCS cases to predict margin status. Predicted margins are compared with pathology reports.
Results
The best-performing model using both quantitative evaluation and our visual …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Preventing positive margins is essential for ensuring favorable patient outcomes following breast-conserving surgery (BCS). Deep learning has the potential to enable this by automatically contouring the tumor and guiding resection in real time. However, evaluation of such models with respect to pathology outcomes is necessary for their successful translation into clinical practice.
Methods
Sixteen deep learning models based on established architectures in the literature are trained on 7318 ultrasound images from 33 patients. Models are ranked by an expert based on their contours generated from images in our test set. Generated contours from each model are also analyzed using recorded cautery trajectories of five navigated BCS cases to predict margin status. Predicted margins are compared with pathology reports.
Results
The best-performing model using both quantitative evaluation and our visual …
Kaufmann, Martin; Jamzad, Amoon; Ungi, Tamas; Rodgers, Jessica R; Koster, Teaghan; Yeung, Chris; Ehrlich, Josh; Santilli, Alice; Asselin, Mark; Janssen, Natasja; McMullen, Julie; Solberg, Kathryn; Cheesman, Joanna; Carlo, Alessia Di; Ren, Kevin Yi Mi; Varma, Sonal; Merchant, Shaila; Engel, Cecil Jay; Walker, G Ross; Gallo, Andrea; Jabs, Doris; Mousavi, Parvin; Fichtinger, Gabor; Rudan, John F
Abstract PO2-23-07: Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery Journal Article
In: Cancer Research, vol. 84, no. 9_Supplement, pp. PO2-23-07-PO2-23-07, 2024.
@article{kaufmann2024c,
title = {Abstract PO2-23-07: Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery},
author = {Martin Kaufmann and Amoon Jamzad and Tamas Ungi and Jessica R Rodgers and Teaghan Koster and Chris Yeung and Josh Ehrlich and Alice Santilli and Mark Asselin and Natasja Janssen and Julie McMullen and Kathryn Solberg and Joanna Cheesman and Alessia Di Carlo and Kevin Yi Mi Ren and Sonal Varma and Shaila Merchant and Cecil Jay Engel and G Ross Walker and Andrea Gallo and Doris Jabs and Parvin Mousavi and Gabor Fichtinger and John F Rudan},
year = {2024},
date = {2024-01-01},
journal = {Cancer Research},
volume = {84},
number = {9_Supplement},
pages = {PO2-23-07-PO2-23-07},
publisher = {The American Association for Cancer Research},
abstract = {Positive resection margins occur in approximately 25% of breast cancer (BCa) surgeries, requiring re-operation. Margin status is not routinely available during surgery; thus, technologies that identify residual cancer on the specimen or cavity are needed to provide intraoperative decision support that may reduce positive margin rates. Rapid evaporative ionization mass spectrometry (REIMS) is an emerging technique that chemically profiles the plume generated by tissue cauterization to classify the ablated tissue as either cancerous or non-cancerous, on the basis of detected lipid species. Although REIMS can distinguish cancer and non-cancerous breast tissue by the signals generated, it does not indicate the location of the classified tissue in real-time. Our objective was to combine REIMS with spatio-temporal navigation (navigated REIMS), and to compare performance of navigated REIMS with conventional …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kaufmann, Martin; Jamzad, Amoon; Ungi, Tamas; Rodgers, Jessica; Koster, Teaghan; Chris, Yeung; Janssen, Natasja; McMullen, Julie; Solberg, Kathryn; Cheesman, Joanna; Ren, Kevin Ti Mi; Varma, Sonal; Merchant, Shaila; Engel, Cecil Jay; Walker, G Ross; Gallo, Andrea; Jabs, Doris; Mousavi, Parvin; Fichtinger, Gabor; Rudan, John
Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery Journal Article
In: vol. 31, no. 1, pp. S10-S10, 2024.
@article{kaufmann2024b,
title = {Three-dimensional navigated mass spectrometry for intraoperative margin assessment during breast cancer surgery},
author = {Martin Kaufmann and Amoon Jamzad and Tamas Ungi and Jessica Rodgers and Teaghan Koster and Yeung Chris and Natasja Janssen and Julie McMullen and Kathryn Solberg and Joanna Cheesman and Kevin Ti Mi Ren and Sonal Varma and Shaila Merchant and Cecil Jay Engel and G Ross Walker and Andrea Gallo and Doris Jabs and Parvin Mousavi and Gabor Fichtinger and John Rudan},
year = {2024},
date = {2024-01-01},
volume = {31},
number = {1},
pages = {S10-S10},
publisher = {SPRINGER},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Kim, Andrew; Yeung, Chris; Szabo, Robert; Sunderland, Kyle; Hisey, Rebecca; Morton, David; Kikinis, Ron; Diao, Babacar; Mousavi, Parvin; Ungi, Tamas; Fichtinger, Gabor
Percutaneous nephrostomy needle guidance using real-time 3D anatomical visualization with live ultrasound segmentation Proceedings Article
In: pp. 163-168, SPIE, 2024.
@inproceedings{kim2024,
title = {Percutaneous nephrostomy needle guidance using real-time 3D anatomical visualization with live ultrasound segmentation},
author = {Andrew Kim and Chris Yeung and Robert Szabo and Kyle Sunderland and Rebecca Hisey and David Morton and Ron Kikinis and Babacar Diao and Parvin Mousavi and Tamas Ungi and Gabor Fichtinger},
year = {2024},
date = {2024-01-01},
volume = {12928},
pages = {163-168},
publisher = {SPIE},
abstract = {PURPOSE
Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software.
METHODS
Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Percutaneous nephrostomy is a commonly performed procedure to drain urine to provide relief in patients with hydronephrosis. Conventional percutaneous nephrostomy needle guidance methods can be difficult, expensive, or not portable. We propose an open-source real-time 3D anatomical visualization aid for needle guidance with live ultrasound segmentation and 3D volume reconstruction using free, open-source software.
METHODS
Basic hydronephrotic kidney phantoms were created, and recordings of these models were manually segmented and used to train a deep learning model that makes live segmentation predictions to perform live 3D volume reconstruction of the fluid-filled cavity. Participants performed 5 needle insertions with the visualization aid and 5 insertions with ultrasound needle guidance on a kidney phantom in randomized order, and these were recorded. Recordings of the …
Akbarifar, Faranak; Dukelow, Sean P; Jin, Albert; Mousavi, Parvin; Scott, Stephen H
Optimizing Stroke Detection Using Evidential Networks and Uncertainty-Based Refinement Journal Article
In: 2024.
@article{akbarifar2024,
title = {Optimizing Stroke Detection Using Evidential Networks and Uncertainty-Based Refinement},
author = {Faranak Akbarifar and Sean P Dukelow and Albert Jin and Parvin Mousavi and Stephen H Scott},
year = {2024},
date = {2024-01-01},
abstract = {Background:
Technologies such as interactive robotics and motion capture systems permit the development of kinematic-based approaches to assess motor impairments in stroke survivors. Here we utilise the Kinarm Exoskeleton robotic system and deep learning techniques to explore differences in motor performance between healthy controls, individuals with stroke and transient ischemic attacks (TIA).
Methods:
Building upon previous research that employed deep learning methods to distinguish between minimally impaired stroke patients and healthy controls using Kinarm data, this study introduces a novel dimension by estimating the confidence or uncertainty of the model's predictions. An evidential network is employed to measure this confidence, which subsequently aids in the refinement of training and testing datasets.
Results:
The application of deep learning techniques in this context proves to be promising …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Technologies such as interactive robotics and motion capture systems permit the development of kinematic-based approaches to assess motor impairments in stroke survivors. Here we utilise the Kinarm Exoskeleton robotic system and deep learning techniques to explore differences in motor performance between healthy controls, individuals with stroke and transient ischemic attacks (TIA).
Methods:
Building upon previous research that employed deep learning methods to distinguish between minimally impaired stroke patients and healthy controls using Kinarm data, this study introduces a novel dimension by estimating the confidence or uncertainty of the model's predictions. An evidential network is employed to measure this confidence, which subsequently aids in the refinement of training and testing datasets.
Results:
The application of deep learning techniques in this context proves to be promising …
Wilson, Paul FR; Harmanani, Mohamed; To, Minh Nguyen Nhat; Gilany, Mahdi; Jamzad, Amoon; Fooladgar, Fahimeh; Wodlinger, Brian; Abolmaesumi, Purang; Mousavi, Parvin
Toward confident prostate cancer detection using ultrasound: a multi-center study Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, pp. 1-9, 2024.
@article{wilson2024,
title = {Toward confident prostate cancer detection using ultrasound: a multi-center study},
author = {Paul FR Wilson and Mohamed Harmanani and Minh Nguyen Nhat To and Mahdi Gilany and Amoon Jamzad and Fahimeh Fooladgar and Brian Wodlinger and Purang Abolmaesumi and Parvin Mousavi},
year = {2024},
date = {2024-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
pages = {1-9},
publisher = {Springer International Publishing},
abstract = {Purpose
Deep learning-based analysis of micro-ultrasound images to detect cancerous lesions is a promising tool for improving prostate cancer (PCa) diagnosis. An ideal model should confidently identify cancer while responding with appropriate uncertainty when presented with out-of-distribution inputs that arise during deployment due to imaging artifacts and the biological heterogeneity of patients and prostatic tissue.
Methods
Using micro-ultrasound data from 693 patients across 5 clinical centers who underwent micro-ultrasound guided prostate biopsy, we train and evaluate convolutional neural network models for PCa detection. To improve robustness to out-of-distribution inputs, we employ and comprehensively benchmark several state-of-the-art uncertainty estimation methods.
Results
PCa detection models achieve performance scores up to average AUROC with a 10-fold cross validation setup. Models with …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Deep learning-based analysis of micro-ultrasound images to detect cancerous lesions is a promising tool for improving prostate cancer (PCa) diagnosis. An ideal model should confidently identify cancer while responding with appropriate uncertainty when presented with out-of-distribution inputs that arise during deployment due to imaging artifacts and the biological heterogeneity of patients and prostatic tissue.
Methods
Using micro-ultrasound data from 693 patients across 5 clinical centers who underwent micro-ultrasound guided prostate biopsy, we train and evaluate convolutional neural network models for PCa detection. To improve robustness to out-of-distribution inputs, we employ and comprehensively benchmark several state-of-the-art uncertainty estimation methods.
Results
PCa detection models achieve performance scores up to average AUROC with a 10-fold cross validation setup. Models with …
2023
Radcliffe, Olivia; Connolly, Laura; Ungi, Tamas; Yeo, Caitlin; Rudan, John F; Fichtinger, Gabor; Mousavi, Parvin
Navigated surgical resection cavity inspection for breast conserving surgery Proceedings Article
In: pp. 234-241, SPIE, 2023.
@inproceedings{radcliffe2023,
title = {Navigated surgical resection cavity inspection for breast conserving surgery},
author = {Olivia Radcliffe and Laura Connolly and Tamas Ungi and Caitlin Yeo and John F Rudan and Gabor Fichtinger and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {234-241},
publisher = {SPIE},
abstract = {Up to 40% of Breast Conserving Surgery (BCS) patients must undergo repeat surgery because cancer is left behind in the resection cavity. The mobility of the breast resection cavity makes it difficult to localize residual cancer and, therefore, cavity shaving is a common technique for cancer removal. Cavity shaving involves removing an additional layer of tissue from the entire resection cavity, often resulting in unnecessary healthy tissue loss. In this study, we demonstrated a navigation system and open-source software module that facilitates visualization of the breast resection cavity for targeted localization of residual cancer.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Syeda, Ayesha; Fooladgar, Fahimeh; Jamzad, Amoon; Srikanthan, Dilakshan; Kaufmann, Martin; Ren, Kevin; Engel, Jay; Walker, Ross; Merchant, Shaila; McKay, Doug; Varma, Sonal; Fichtinger, Gabor; Rudan, John; Mousavi, Parvin
Self-supervised learning and uncertainty estimation for surgical margin detection Proceedings Article
In: pp. 76-83, SPIE, 2023.
@inproceedings{syeda2023,
title = {Self-supervised learning and uncertainty estimation for surgical margin detection},
author = {Ayesha Syeda and Fahimeh Fooladgar and Amoon Jamzad and Dilakshan Srikanthan and Martin Kaufmann and Kevin Ren and Jay Engel and Ross Walker and Shaila Merchant and Doug McKay and Sonal Varma and Gabor Fichtinger and John Rudan and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {76-83},
publisher = {SPIE},
abstract = {Up to 35% of breast-conserving surgeries fail to resect all the tumors completely. Ideally, machine learning methods using the iKnife data, which uses Rapid Evaporative Ionization Mass Spectrometry (REIMS), can be utilized to predict tissue type in real-time during surgery, resulting in better tumor resections. As REIMS data is heterogeneous and weakly labeled, and datasets are often small, model performance and reliability can be adversely affected. Self-supervised training and uncertainty estimation of the prediction can be used to mitigate these challenges by learning the signatures of input data without their label as well as including predictive confidence in output reporting. We first design an autoencoder model using a reconstruction pretext task as a self-supervised pretraining step without considering tissue type. Next, we construct our uncertainty-aware classifier using the encoder part of the model with …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Yeung, Chris; Ehrlich, Joshua; Jamzad, Amoon; Kaufmann, Martin; Rudan, John; Engel, Cecil Jay; Mousavi, Parvin; Ungi, Tamas; Fichtinger, Gabor
Cautery trajectory analysis for evaluation of resection margins in breast-conserving surgery Proceedings Article
In: pp. 495-501, SPIE, 2023.
@inproceedings{yeung2023,
title = {Cautery trajectory analysis for evaluation of resection margins in breast-conserving surgery},
author = {Chris Yeung and Joshua Ehrlich and Amoon Jamzad and Martin Kaufmann and John Rudan and Cecil Jay Engel and Parvin Mousavi and Tamas Ungi and Gabor Fichtinger},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {495-501},
publisher = {SPIE},
abstract = {After breast-conserving surgery, positive margins occur when breast cancer cells are found on the resection margin, leading to a higher chance of recurrence and the need for repeat surgery. The NaviKnife is an electromagnetic tracking-based surgical navigation system that helps to provide visual and spatial feedback to the surgeon. In this study, we conduct a gross evaluation of this navigation system with respect to resection margins. The trajectory of the surgical cautery relative to ultrasound-visible tumor will be visualized, and its distance and location from the tumor will be compared with pathology reports. Six breast-conserving surgery cases that resulted in positive margins were performed using the NaviKnife system. Trackers were placed on the surgical tools and their positions in three-dimensional space were recorded throughout the procedure. The closest distance between the cautery and the tumor …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
March, Lucas; Rodgers, Jessica R; Hisey, Rebecca; Jamzad, Amoon; Santilli, Alice ML; McKay, Doug; Rudan, John F; Kaufmann, Martin; Ren, Kevin Yi Mi; Fichtinger, Gabor; Mousavi, Parvin
Cautery tool state detection using deep learning on intraoperative surgery videos Proceedings Article
In: pp. 89-95, SPIE, 2023.
@inproceedings{march2023,
title = {Cautery tool state detection using deep learning on intraoperative surgery videos},
author = {Lucas March and Jessica R Rodgers and Rebecca Hisey and Amoon Jamzad and Alice ML Santilli and Doug McKay and John F Rudan and Martin Kaufmann and Kevin Yi Mi Ren and Gabor Fichtinger and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {89-95},
publisher = {SPIE},
abstract = {Treatment for Basal Cell Carcinoma (BCC) includes an excisional surgery to remove cancerous tissues, using a cautery tool to make burns along a defined resection margin around the tumor. Margin evaluation occurs post-surgically, requiring repeat surgery if positive margins are detected. Rapid Evaporative Ionization Mass Spectrometry (REIMS) can help distinguish healthy and cancerous tissue but does not provide spatial information about the cautery tool location where the spectra are acquired. We propose using intraoperative surgical video recordings and deep learning to provide surgeons with guidance to locate sites of potential positive margins. Frames from 14 intraoperative videos of BCC surgery were extracted and used to train a sequence of networks. The first network extracts frames showing surgery in-progress, then, an object detection network localizes the cautery tool and resection margin. Finally …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Groves, Leah A; Keita, Mohamed; Talla, Saidou; Kikinis, Ron; Fichtinger, Gabor; Mousavi, Parvin; Camara, Mamadou
A review of low-cost ultrasound compatible phantoms Journal Article
In: vol. 70, no. 12, pp. 3436-3448, 2023.
@article{groves2023,
title = {A review of low-cost ultrasound compatible phantoms},
author = {Leah A Groves and Mohamed Keita and Saidou Talla and Ron Kikinis and Gabor Fichtinger and Parvin Mousavi and Mamadou Camara},
year = {2023},
date = {2023-01-01},
volume = {70},
number = {12},
pages = {3436-3448},
publisher = {IEEE},
abstract = {Ultrasound-compatible phantoms are used to develop novel US-based systems and train simulated medical interventions. The price difference between lab-made and commercially available ultrasound-compatible phantoms lead to the publication of many papers categorized as low-cost in the literature. The aim of this review was to improve the phantom selection process by summarizing the pertinent literature. We compiled papers on US-compatible spine, prostate, vascular, breast, kidney, and li ver phantoms. We reviewed papers for cost and accessibility, providing an overview of the materials, construction time, shelf life, needle insertion limits, and manufacturing and evaluation methods. This information was summarized by anatomy. The clinical application associated with each phantom was also reported for those interested in a particular intervention. Techniques and common practices for building low-cost …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Gilany, Mahdi; Wilson, Paul; Perera-Ortega, Andrea; Jamzad, Amoon; To, Minh Nguyen Nhat; Fooladgar, Fahimeh; Wodlinger, Brian; Abolmaesumi, Purang; Mousavi, Parvin
TRUSformer: Improving prostate cancer detection from micro-ultrasound using attention and self-supervision Journal Article
In: International Journal of Computer Assisted Radiology and Surgery, vol. 18, no. 7, pp. 1193-1200, 2023.
@article{gilany2023,
title = {TRUSformer: Improving prostate cancer detection from micro-ultrasound using attention and self-supervision},
author = {Mahdi Gilany and Paul Wilson and Andrea Perera-Ortega and Amoon Jamzad and Minh Nguyen Nhat To and Fahimeh Fooladgar and Brian Wodlinger and Purang Abolmaesumi and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
journal = {International Journal of Computer Assisted Radiology and Surgery},
volume = {18},
number = {7},
pages = {1193-1200},
publisher = {Springer International Publishing},
abstract = {Purpose
A large body of previous machine learning methods for ultrasound-based prostate cancer detection classify small regions of interest (ROIs) of ultrasound signals that lie within a larger needle trace corresponding to a prostate tissue biopsy (called biopsy core). These ROI-scale models suffer from weak labeling as histopathology results available for biopsy cores only approximate the distribution of cancer in the ROIs. ROI-scale models do not take advantage of contextual information that are normally considered by pathologists, i.e., they do not consider information about surrounding tissue and larger-scale trends when identifying cancer. We aim to improve cancer detection by taking a multi-scale, i.e., ROI-scale and biopsy core-scale, approach.
Methods
Our multi-scale approach combines (i) an “ROI-scale” model trained using self-supervised learning to extract features from small ROIs and (ii) a “core-scale …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
A large body of previous machine learning methods for ultrasound-based prostate cancer detection classify small regions of interest (ROIs) of ultrasound signals that lie within a larger needle trace corresponding to a prostate tissue biopsy (called biopsy core). These ROI-scale models suffer from weak labeling as histopathology results available for biopsy cores only approximate the distribution of cancer in the ROIs. ROI-scale models do not take advantage of contextual information that are normally considered by pathologists, i.e., they do not consider information about surrounding tissue and larger-scale trends when identifying cancer. We aim to improve cancer detection by taking a multi-scale, i.e., ROI-scale and biopsy core-scale, approach.
Methods
Our multi-scale approach combines (i) an “ROI-scale” model trained using self-supervised learning to extract features from small ROIs and (ii) a “core-scale …
Zhou, Meng; Jamzad, Amoon; Izard, Jason; Menard, Alexandre; Siemens, Robert; Mousavi, Parvin
Domain Transfer Through Image-to-Image Translation for Uncertainty-Aware Prostate Cancer Classification Journal Article
In: arXiv preprint arXiv:2307.00479, 2023.
@article{zhou2023,
title = {Domain Transfer Through Image-to-Image Translation for Uncertainty-Aware Prostate Cancer Classification},
author = {Meng Zhou and Amoon Jamzad and Jason Izard and Alexandre Menard and Robert Siemens and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
journal = {arXiv preprint arXiv:2307.00479},
abstract = {Prostate Cancer (PCa) is often diagnosed using High-resolution 3.0 Tesla(T) MRI, which has been widely established in clinics. However, there are still many medical centers that use 1.5T MRI units in the actual diagnostic process of PCa. In the past few years, deep learning-based models have been proven to be efficient on the PCa classification task and can be successfully used to support radiologists during the diagnostic process. However, training such models often requires a vast amount of data, and sometimes it is unobtainable in practice. Additionally, multi-source MRIs can pose challenges due to cross-domain distribution differences. In this paper, we have presented a novel approach for unpaired image-to-image translation of prostate mp-MRI for classifying clinically significant PCa, to be applied in data-constrained settings. First, we introduce domain transfer, a novel pipeline to translate unpaired 3.0T multi-parametric prostate MRIs to 1.5T, to increase the number of training data. Second, we estimate the uncertainty of our models through an evidential deep learning approach; and leverage the dataset filtering technique during the training process. Furthermore, we introduce a simple, yet efficient Evidential Focal Loss that incorporates the focal loss with evidential uncertainty to train our model. Our experiments demonstrate that the proposed method significantly improves the Area Under ROC Curve (AUC) by over 20% compared to the previous work (98.4% vs. 76.2%). We envision that providing prediction uncertainty to radiologists may help them focus more on uncertain cases and thus expedite the diagnostic process effectively. Our …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fooladgar, Fahimeh; to, Minh Nguyen Nhat; Javadi, Golara; Sojoudi, Samira; Eshumani, Walid; Chang, Silvia; Black, Peter; Mousavi, Parvin; Abolmaesumi, Purang
Semi-supervised learning from coarse histopathology labels Journal Article
In: Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization, vol. 11, no. 4, pp. 1143-1150, 2023.
@article{fooladgar2023,
title = {Semi-supervised learning from coarse histopathology labels},
author = {Fahimeh Fooladgar and Minh Nguyen Nhat to and Golara Javadi and Samira Sojoudi and Walid Eshumani and Silvia Chang and Peter Black and Parvin Mousavi and Purang Abolmaesumi},
year = {2023},
date = {2023-01-01},
journal = {Computer Methods in Biomechanics and Biomedical Engineering: Imaging & Visualization},
volume = {11},
number = {4},
pages = {1143-1150},
publisher = {Taylor & Francis},
abstract = {Ultrasound imaging is commonly used to guide sampling the prostate tissue in transrectal biopsies, followed by detection of cancer through histopathological analysis and coarse labelling of sampled tissue. Ideally, the procedure should be improved by developing machine learning solutions that can identify the presence of cancer in ultrasound images to guide the biopsy procedure. Training a fully supervised learning model using coarse histopathology labels suffers from weakly annotated data which introduce label noise for each image pixel. To address this challenge, we propose a semi-supervised framework for learning with noisy labels. We leverage a two-component mixture model to cluster the training data into clean and noisy label samples based on their loss values. Then, during the semi-supervised training phase, we utilise the well-known MixMatch algorithm which incorporates consistency …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Ndiaye, Fatou Bintou; Groves, Leah; Hisey, Rebecca; Ungi, Tamas; Diop, Idy; Mousavi, Parvin; Fichtinger, Gabor; Camara, Mamadou Samba
Desing and realization of a computer-assisted nephrostomy guidance system Journal Article
In: pp. 1-6, 2023.
@article{ndiaye2023,
title = {Desing and realization of a computer-assisted nephrostomy guidance system},
author = {Fatou Bintou Ndiaye and Leah Groves and Rebecca Hisey and Tamas Ungi and Idy Diop and Parvin Mousavi and Gabor Fichtinger and Mamadou Samba Camara},
year = {2023},
date = {2023-01-01},
pages = {1-6},
publisher = {IEEE},
abstract = {Background and purpose
Nowadays, computerized nephrostomy techniques exist. Although relatively safe, several factors make it difficult for inexperienced users. A computer-assisted nephrostomy guidance system has been studied to increase the success rate of this intervention and reduce the work and difficulties encountered by the actors.
Methods
To design the system, two methods will be studied. Following this study, this system was designed based on method 2. SmartSysNephro is composed of a hardware part whose manipulations made by the user are visualized and assisted by the computer. This nephrostomy procedure that the user simulates is monitored by webcam. Using the data from this Intel Real Sense webcam, allowed to propose a CNN YOLO model.
Results
The results obtained show that the objectives set have been achieved globally. The SmartSysNephro system gives real time warning …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Nowadays, computerized nephrostomy techniques exist. Although relatively safe, several factors make it difficult for inexperienced users. A computer-assisted nephrostomy guidance system has been studied to increase the success rate of this intervention and reduce the work and difficulties encountered by the actors.
Methods
To design the system, two methods will be studied. Following this study, this system was designed based on method 2. SmartSysNephro is composed of a hardware part whose manipulations made by the user are visualized and assisted by the computer. This nephrostomy procedure that the user simulates is monitored by webcam. Using the data from this Intel Real Sense webcam, allowed to propose a CNN YOLO model.
Results
The results obtained show that the objectives set have been achieved globally. The SmartSysNephro system gives real time warning …
Wilson, Paul FR; Gilany, Mahdi; Jamzad, Amoon; Fooladgar, Fahimeh; To, Minh Nguyen Nhat; Wodlinger, Brian; Abolmaesumi, Purang; Mousavi, Parvin
Self-supervised learning with limited labeled data for prostate cancer detection in high frequency ultrasound Journal Article
In: IEEE Transactions on Ultrasonics, Ferroelectrics, and Frequency Control, 2023.
@article{wilson2023,
title = {Self-supervised learning with limited labeled data for prostate cancer detection in high frequency ultrasound},
author = {Paul FR Wilson and Mahdi Gilany and Amoon Jamzad and Fahimeh Fooladgar and Minh Nguyen Nhat To and Brian Wodlinger and Purang Abolmaesumi and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
journal = {IEEE Transactions on Ultrasonics, Ferroelectrics, and Frequency Control},
publisher = {IEEE},
abstract = {Deep learning-based analysis of high-frequency, high-resolution micro-ultrasound data shows great promise for prostate cancer (PCa) detection. Previous approaches to analysis of ultrasound data largely follow a supervised learning (SL) paradigm. Ground truth labels for ultrasound images used for training deep networks often include coarse annotations generated from the histopathological analysis of tissue samples obtained via biopsy. This creates inherent limitations on the availability and quality of labeled data, posing major challenges to the success of SL methods. However, unlabeled prostate ultrasound data are more abundant. In this work, we successfully apply self-supervised representation learning to micro-ultrasound data. Using ultrasound data from 1028 biopsy cores of 391 subjects obtained in two clinical centers, we demonstrate that feature representations learned with this method can be used to …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Greenspan, Hayit; Madabhushi, Anant; Mousavi, Parvin; Salcudean, Septimiu; Duncan, James; Syeda-Mahmood, Tanveer; Taylor, Russell
Medical Image Computing and Computer Assisted Intervention–MICCAI 2023: 26th International Conference, Vancouver, BC, Canada, October 8–12, 2023, Proceedings, Part V Journal Article
In: vol. 14224, 2023.
@article{greenspan2023,
title = {Medical Image Computing and Computer Assisted Intervention–MICCAI 2023: 26th International Conference, Vancouver, BC, Canada, October 8–12, 2023, Proceedings, Part V},
author = {Hayit Greenspan and Anant Madabhushi and Parvin Mousavi and Septimiu Salcudean and James Duncan and Tanveer Syeda-Mahmood and Russell Taylor},
year = {2023},
date = {2023-01-01},
volume = {14224},
publisher = {Springer Nature},
abstract = {The ten-volume set LNCS 14220, 14221, 14222, 14223, 14224, 14225, 14226, 14227, 14228, and 14229 constitutes the refereed proceedings of the 26th International Conference on Medical Image Computing and Computer-Assisted Intervention, MICCAI 2023, which was held in Vancouver, Canada, in October 2023. The 730 revised full papers presented were carefully reviewed and selected from a total of 2250 submissions. The papers are organized in the following topical sections: Part I: Machine learning with limited supervision and machine learning–transfer learning; Part II: Machine learning–learning strategies; machine learning–explainability, bias, and uncertainty; Part III: Machine learning–explainability, bias and uncertainty; image segmentation; Part IV: Image segmentation; Part V: Computer-aided diagnosis; Part VI: Computer-aided diagnosis; computational pathology; Part VII: Clinical applications–abdomen; clinical applications–breast; clinical applications–cardiac; clinical applications–dermatology; clinical applications–fetal imaging; clinical applications–lung; clinical applications–musculoskeletal; clinical applications–oncology; clinical applications–ophthalmology; clinical applications–vascular; Part VIII: Clinical applications–neuroimaging; microscopy; Part IX: Image-guided intervention, surgical planning, and data science; Part X: Image reconstruction and image registration.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Jamzad, Amoon; Fooladgar, Fahimeh; Connolly, Laura; Srikanthan, Dilakshan; Syeda, Ayesha; Kaufmann, Martin; Ren, Kevin YM; Merchant, Shaila; Engel, Jay; Varma, Sonal; Fichtinger, Gabor; Rudan, John F; Mousavi, Parvin
Bridging Ex-Vivo Training and Intra-operative Deployment for Surgical Margin Assessment with Evidential Graph Transformer Proceedings Article
In: pp. 562-571, Springer Nature Switzerland, 2023.
@inproceedings{jamzad2023,
title = {Bridging Ex-Vivo Training and Intra-operative Deployment for Surgical Margin Assessment with Evidential Graph Transformer},
author = {Amoon Jamzad and Fahimeh Fooladgar and Laura Connolly and Dilakshan Srikanthan and Ayesha Syeda and Martin Kaufmann and Kevin YM Ren and Shaila Merchant and Jay Engel and Sonal Varma and Gabor Fichtinger and John F Rudan and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
pages = {562-571},
publisher = {Springer Nature Switzerland},
abstract = {PURPOSE
The use of intra-operative mass spectrometry along with Graph Transformer models showed promising results for margin detection on ex-vivo data. Although highly interpretable, these methods lack the ability to handle the uncertainty associated with intra-operative decision making. In this paper for the first time, we propose Evidential Graph Transformer network, a combination of attention mapping and uncertainty estimation to increase the performance and interpretability of surgical margin assessment.
METHODS
The Evidential Graph Transformer was formulated to output the uncertainty estimation along with intermediate attentions. The performance of the model was compared with different baselines in an ex-vivo cross-validation scheme, with extensive ablation study. The association of the model with clinical features were explored. The model was further validated for a prospective ex-vivo data, as …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
The use of intra-operative mass spectrometry along with Graph Transformer models showed promising results for margin detection on ex-vivo data. Although highly interpretable, these methods lack the ability to handle the uncertainty associated with intra-operative decision making. In this paper for the first time, we propose Evidential Graph Transformer network, a combination of attention mapping and uncertainty estimation to increase the performance and interpretability of surgical margin assessment.
METHODS
The Evidential Graph Transformer was formulated to output the uncertainty estimation along with intermediate attentions. The performance of the model was compared with different baselines in an ex-vivo cross-validation scheme, with extensive ablation study. The association of the model with clinical features were explored. The model was further validated for a prospective ex-vivo data, as …
Tomalty, Diane; Giovannetti, Olivia; Velikonja, Leah; Munday, Jasica; Kaufmann, Martin; Iaboni, Natasha; Jamzad, Amoon; Rubino, Rachel; Fichtinger, Gabor; Mousavi, Parvin; Nicol, Christopher JB; Rudan, John F; Adams, Michael A
Molecular characterization of human peripheral nerves using desorption electrospray ionization mass spectrometry imaging Journal Article
In: Journal of Anatomy, vol. 243, no. 5, pp. 758-769, 2023.
@article{tomalty2023,
title = {Molecular characterization of human peripheral nerves using desorption electrospray ionization mass spectrometry imaging},
author = {Diane Tomalty and Olivia Giovannetti and Leah Velikonja and Jasica Munday and Martin Kaufmann and Natasha Iaboni and Amoon Jamzad and Rachel Rubino and Gabor Fichtinger and Parvin Mousavi and Christopher JB Nicol and John F Rudan and Michael A Adams},
year = {2023},
date = {2023-01-01},
journal = {Journal of Anatomy},
volume = {243},
number = {5},
pages = {758-769},
abstract = {Desorption electrospray ionization mass spectrometry imaging (DESI‐MSI) is a molecular imaging method that can be used to elucidate the small‐molecule composition of tissues and map their spatial information using two‐dimensional ion images. This technique has been used to investigate the molecular profiles of variety of tissues, including within the central nervous system, specifically the brain and spinal cord. To our knowledge, this technique has yet to be applied to tissues of the peripheral nervous system (PNS). Data generated from such analyses are expected to advance the characterization of these structures. The study aimed to: (i) establish whether DESI‐MSI can discriminate the molecular characteristics of peripheral nerves and distinguish them from surrounding tissues and (ii) assess whether different peripheral nerve subtypes are characterized by unique molecular profiles. Four different nerves for …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Imtiaz, Tashifa; Nanayakkara, Jina; Fang, Alexis; Jomaa, Danny; Mayotte, Harrison; Damiani, Simona; Javed, Fiza; Jones, Tristan; Kaczmarek, Emily; Adebayo, Flourish Omolara; Imtiaz, Uroosa; Li, Yiheng; Zhang, Richard; Mousavi, Parvin; Renwick, Neil; Tyryshkin, Kathrin
A user-driven machine learning approach for RNA-based sample discrimination and hierarchical classification Journal Article
In: STAR Protocols, vol. 4, no. 4, pp. 102661, 2023.
@article{imtiaz2023,
title = {A user-driven machine learning approach for RNA-based sample discrimination and hierarchical classification},
author = {Tashifa Imtiaz and Jina Nanayakkara and Alexis Fang and Danny Jomaa and Harrison Mayotte and Simona Damiani and Fiza Javed and Tristan Jones and Emily Kaczmarek and Flourish Omolara Adebayo and Uroosa Imtiaz and Yiheng Li and Richard Zhang and Parvin Mousavi and Neil Renwick and Kathrin Tyryshkin},
year = {2023},
date = {2023-01-01},
journal = {STAR Protocols},
volume = {4},
number = {4},
pages = {102661},
publisher = {Elsevier},
abstract = {RNA-based sample discrimination and classification can be used to provide biological insights and/or distinguish between clinical groups. However, finding informative differences between sample groups can be challenging due to the multidimensional and noisy nature of sequencing data. Here, we apply a machine learning approach for hierarchical discrimination and classification of samples with high-dimensional miRNA expression data. Our protocol comprises data preprocessing, unsupervised learning, feature selection, and machine-learning-based hierarchical classification, alongside open-source MATLAB code.},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Elmi, Hanad; Jamzad, Amoon; Sharp, Mackenzie; Rodgers, Jessica R; Kaufmann, Martin; Jamaspishvili, Tamara; Iseman, Rachael; Berman, David; Rudan, J; Fichtinger, Gabor; Mousavi, Parvin
ViPRE: an open-source software implementation for end-to-end analysis of mass spectrometry data Proceedings Article
In: pp. 487-494, SPIE, 2023.
@inproceedings{elmi2023,
title = {ViPRE: an open-source software implementation for end-to-end analysis of mass spectrometry data},
author = {Hanad Elmi and Amoon Jamzad and Mackenzie Sharp and Jessica R Rodgers and Martin Kaufmann and Tamara Jamaspishvili and Rachael Iseman and David Berman and J Rudan and Gabor Fichtinger and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {487-494},
publisher = {SPIE},
abstract = {Mass Spectrometry Imaging (MSI) is a powerful tool capable of visualizing molecular patterns to identify disease markers in tissue analysis. However, data analysis is computationally heavy and currently time-consuming as there is no single platform capable of performing the entire preprocessing, visualization, and analysis pipeline end-to-end. Using different software tools and file formats required for such tools also makes the process prone to error. The purpose of this work is to develop a free, open-source software implementation called “Visualization, Preprocessing, and Registration Environment” (ViPRE), capable of end-to-end analysis of MSI data. ViPRE was developed to provide various functionalities required for MSI analysis including data import, data visualization, data registration, Region of Interest (ROI) selection, spectral data alignment and data analysis. The software implementation is offered as an …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Ehrlich, Josh; Yeung, Chris; Kaufman, Martin; Jamzad, Amoon; Rudan, J; Mousavi, Parvin; Fichtinger, Gabor; Ungi, Tamas
Determining the time-delay of a mass spectrometry-based tissue sensor Proceedings Article
In: pp. 324-327, SPIE, 2023.
@inproceedings{ehrlich2023,
title = {Determining the time-delay of a mass spectrometry-based tissue sensor},
author = {Josh Ehrlich and Chris Yeung and Martin Kaufman and Amoon Jamzad and J Rudan and Parvin Mousavi and Gabor Fichtinger and Tamas Ungi},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {324-327},
publisher = {SPIE},
abstract = {Breast cancer commonly requires surgical treatment. A procedure used to remove breast cancer is lumpectomy, which removes a minimal healthy tissue margin surrounding the tumor, called a negative margin. A cancer-free margin is difficult to achieve because tumors are not visible or palpable, and the breast deforms during surgery. One notable solution is Rapid Evaporative Ionization Mass Spectrometry (REIMS), which differentiates tumor from healthy tissue with high accuracy from the vapor generated by the surgical cautery. REIMS combined with navigation could detect where the surgical cautery breaches tumor tissue. However, fusing position tracking and REIMS data for navigation is challenging. REIMS has a time-delay dependent on a series of factors. Our objective was to evaluate REIMS time-delay for surgical navigation. The average time-delay of REIMS classifications was measured by video …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Morton, David; Connolly, Laura; Groves, Leah; Sunderland, Kyle; Jamzad, Amoon; Rudan, John F; Fichtinger, Gabor; Ungi, Tamas; Mousavi, Parvin
Tracked tissue sensing for tumor bed inspection Proceedings Article
In: pp. 378-385, SPIE, 2023.
@inproceedings{morton2023,
title = {Tracked tissue sensing for tumor bed inspection},
author = {David Morton and Laura Connolly and Leah Groves and Kyle Sunderland and Amoon Jamzad and John F Rudan and Gabor Fichtinger and Tamas Ungi and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {378-385},
publisher = {SPIE},
abstract = {Up to 30% of breast-conserving surgery patients require secondary surgery to remove cancerous tissue missed in the initial intervention. We hypothesize that tracked tissue sensing can improve the success rate of breast-conserving surgery. Tissue sensor tracking allows the surgeon to intraoperatively scan the tumor bed for leftover cancerous tissue. In this study, we characterize the performance of our tracked optical scanning testbed using an experimental pipeline. We assess the Dice similarity coefficient, accuracy, and latency of the testbed.},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Srikanthan, Dilakshan; Kaufmann, Martin; Jamzad, Amoon; Syeda, Ayesha; Santilli, Alice; Sedghi, Alireza; Fichtinger, Gabor; Purzner, Jamie; Rudan, John; Purzner, Teresa; Mousavi, Parvin
Attention-based multi-instance learning for improved glioblastoma detection using mass spectrometry Proceedings Article
In: pp. 248-253, SPIE, 2023.
@inproceedings{srikanthan2023,
title = {Attention-based multi-instance learning for improved glioblastoma detection using mass spectrometry},
author = {Dilakshan Srikanthan and Martin Kaufmann and Amoon Jamzad and Ayesha Syeda and Alice Santilli and Alireza Sedghi and Gabor Fichtinger and Jamie Purzner and John Rudan and Teresa Purzner and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {248-253},
publisher = {SPIE},
abstract = {Glioblastoma Multiforme (GBM) is the most common and most lethal primary brain tumor in adults with a five-year survival rate of 5%. The current standard of care and survival rate have remained largely unchanged due to the degree of difficulty in surgically removing these tumors, which plays a crucial role in survival, as better surgical resection leads to longer survival times. Thus, novel technologies need to be identified to improve resection accuracy. Our study features a curated database of GBM and normal brain tissue specimens, which we used to train and validate a multi-instance learning model for GBM detection via rapid evaporative ionization mass spectrometry. This method enables real-time tissue typing. The specimens were collected by a surgeon, reviewed by a pathologist, and sampled with an electrocautery device. The dataset comprised 276 normal tissue burns and 321 GBM tissue burns. Our multi …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Kitner, Nicole; Rodgers, Jessica R; Ungi, Tamas; Korzeniowski, Martin; Olding, Timothy; Mousavi, Parvin; Fichtinger, Gabor
Multi-catheter modelling in reconstructed 3D transrectal ultrasound images from prostate brachytherapy Proceedings Article
In: pp. 126-135, SPIE, 2023.
@inproceedings{kitner2023,
title = {Multi-catheter modelling in reconstructed 3D transrectal ultrasound images from prostate brachytherapy},
author = {Nicole Kitner and Jessica R Rodgers and Tamas Ungi and Martin Korzeniowski and Timothy Olding and Parvin Mousavi and Gabor Fichtinger},
year = {2023},
date = {2023-01-01},
volume = {12466},
pages = {126-135},
publisher = {SPIE},
abstract = {High-dose-rate brachytherapy is an accepted standard-of-care treatment for prostate cancer. In this procedure, catheters are inserted using three-dimensional (3D) transrectal ultrasound image-guidance. Their positions are manually segmented for treatment planning and delivery. The transverse ultrasound sweep, which is subject to tip and depth error for catheter localization, is a commonly used ultrasound imaging option available for image acquisition. We propose a two-step pipeline that uses a deep-learning network and curve fitting to automatically localize and model catheters in transversely reconstructed 3D ultrasound images. In the first step, a 3D U-Net was trained to automatically segment all catheters in a 3D ultrasound image. Following this step, curve fitting was implemented to detect the shapes of individual catheters using polynomial fitting. Of the 343 catheters (from 20 patients) in the testing data, the …},
keywords = {},
pubstate = {published},
tppubtype = {inproceedings}
}
Kaufmann, Martin; Iaboni, Natasha; Jamzad, Amoon; Hurlbut, David; Ren, Kevin Yi Mi; Rudan, John F; Mousavi, Parvin; Fichtinger, Gabor; Varma, Sonal; Caycedo-Marulanda, Antonio; Nicol, Christopher JB
Metabolically active zones involving fatty acid elongation delineated by DESI-MSI correlate with pathological and prognostic features of colorectal cancer Journal Article
In: Metabolites, vol. 13, no. 4, pp. 508, 2023.
@article{kaufmann2023,
title = {Metabolically active zones involving fatty acid elongation delineated by DESI-MSI correlate with pathological and prognostic features of colorectal cancer},
author = {Martin Kaufmann and Natasha Iaboni and Amoon Jamzad and David Hurlbut and Kevin Yi Mi Ren and John F Rudan and Parvin Mousavi and Gabor Fichtinger and Sonal Varma and Antonio Caycedo-Marulanda and Christopher JB Nicol},
year = {2023},
date = {2023-01-01},
journal = {Metabolites},
volume = {13},
number = {4},
pages = {508},
publisher = {MDPI},
abstract = {Colorectal cancer (CRC) is the second leading cause of cancer deaths. Despite recent advances, five-year survival rates remain largely unchanged. Desorption electrospray ionization mass spectrometry imaging (DESI) is an emerging nondestructive metabolomics-based method that retains the spatial orientation of small-molecule profiles on tissue sections, which may be validated by ‘gold standard’ histopathology. In this study, CRC samples were analyzed by DESI from 10 patients undergoing surgery at Kingston Health Sciences Center. The spatial correlation of the mass spectral profiles was compared with histopathological annotations and prognostic biomarkers. Fresh frozen sections of representative colorectal cross sections and simulated endoscopic biopsy samples containing tumour and non-neoplastic mucosa for each patient were generated and analyzed by DESI in a blinded fashion. Sections were then hematoxylin and eosin (H and E) stained, annotated by two independent pathologists, and analyzed. Using PCA/LDA-based models, DESI profiles of the cross sections and biopsies achieved 97% and 75% accuracies in identifying the presence of adenocarcinoma, using leave-one-patient-out cross validation. Among the m/z ratios exhibiting the greatest differential abundance in adenocarcinoma were a series of eight long-chain or very-long-chain fatty acids, consistent with molecular and targeted metabolomics indicators of de novo lipogenesis in CRC tissue. Sample stratification based on the presence of lympovascular invasion (LVI), a poor CRC prognostic indicator, revealed the abundance of oxidized phospholipids, suggestive …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Chen, Brian; Maslove, David M; Curran, Jeffrey D; Hamilton, Alexander; Laird, Philip R; Mousavi, Parvin; Sibley, Stephanie
A deep learning model for the classification of atrial fibrillation in critically ill patients Journal Article
In: Intensive care medicine experimental, vol. 11, no. 1, pp. 2, 2023.
@article{chen2023,
title = {A deep learning model for the classification of atrial fibrillation in critically ill patients},
author = {Brian Chen and David M Maslove and Jeffrey D Curran and Alexander Hamilton and Philip R Laird and Parvin Mousavi and Stephanie Sibley},
year = {2023},
date = {2023-01-01},
journal = {Intensive care medicine experimental},
volume = {11},
number = {1},
pages = {2},
publisher = {Springer International Publishing},
abstract = {Background
Atrial fibrillation (AF) is the most common cardiac arrhythmia in the intensive care unit and is associated with increased morbidity and mortality. New-onset atrial fibrillation (NOAF) is often initially paroxysmal and fleeting, making it difficult to diagnose, and therefore difficult to understand the true burden of disease. Automated algorithms to detect AF in the ICU have been advocated as a means to better quantify its true burden.
Results
We used a publicly available 12-lead ECG dataset to train a deep learning model for the classification of AF. We then conducted an external independent validation of the model using continuous telemetry data from 984 critically ill patients collected in our institutional database. Performance metrics were stratified by signal quality, classified as either clean or noisy. The deep learning model was able to classify AF with an overall sensitivity of 84%, specificity of 89%, positive …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Atrial fibrillation (AF) is the most common cardiac arrhythmia in the intensive care unit and is associated with increased morbidity and mortality. New-onset atrial fibrillation (NOAF) is often initially paroxysmal and fleeting, making it difficult to diagnose, and therefore difficult to understand the true burden of disease. Automated algorithms to detect AF in the ICU have been advocated as a means to better quantify its true burden.
Results
We used a publicly available 12-lead ECG dataset to train a deep learning model for the classification of AF. We then conducted an external independent validation of the model using continuous telemetry data from 984 critically ill patients collected in our institutional database. Performance metrics were stratified by signal quality, classified as either clean or noisy. The deep learning model was able to classify AF with an overall sensitivity of 84%, specificity of 89%, positive …
Morton, David; Connolly, Laura; Groves, Leah; Sunderland, Kyle; Ungi, Tamas; Jamzad, Amoon; Kaufmann, Martin; Ren, Kevin; Rudan, John F; Fichtinger, Gabor; Mousavi, Parvin
Development of a Research Testbed for Intraoperative Optical Spectroscopy Tumor Margin Assessment Journal Article
In: Acta Polytechnica Hungarica, vol. 20, no. 8, 2023.
@article{morton2023b,
title = {Development of a Research Testbed for Intraoperative Optical Spectroscopy Tumor Margin Assessment},
author = {David Morton and Laura Connolly and Leah Groves and Kyle Sunderland and Tamas Ungi and Amoon Jamzad and Martin Kaufmann and Kevin Ren and John F Rudan and Gabor Fichtinger and Parvin Mousavi},
year = {2023},
date = {2023-01-01},
journal = {Acta Polytechnica Hungarica},
volume = {20},
number = {8},
abstract = {Surgical intervention is a primary treatment option for early-stage cancers. However, the difficulty of intraoperative tumor margin assessment contributes to a high rate of incomplete tumor resection, necessitating revision surgery. This work aims to develop and evaluate a prototype of a tracked tissue sensing research testbed for navigated tumor margin assessment. Our testbed employs diffuse reflection broadband optical spectroscopy for tissue characterization and electromagnetic tracking for navigation. Spectroscopy data and a trained classifier are used to predict tissue types. Navigation allows these predictions to be superimposed on the scanned tissue, creating a spatial classification map. We evaluate the real-time operation of our testbed using an ex vivo tissue phantom. Furthermore, we use the testbed to interrogate ex vivo human kidney tissue and establish a modeling pipeline to classify cancerous and non-neoplastic tissue. The testbed recorded latencies of 125±11 ms and 167±26 ms for navigation and classification respectively. The testbed achieved a Dice similarity coefficient of 93%, and an accuracy of 94% for the spatial classification. These results demonstrated the capabilities of our testbed for the real-time interrogation of an arbitrary tissue volume. Our modeling pipeline attained a balanced accuracy of 91%±4% on the classification of cancerous and non-neoplastic human kidney tissue. Our tracked tissue sensing research testbed prototype shows potential for facilitating the development and evaluation of intraoperative tumor margin assessment technologies across tissue types. The capacity to assess tumor margin status …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Greenspan, Hayit; Taylor, Russell; Madabhushi, Anant; Deike-Hofmann, Katerina; Syeda-Mahmood, Tanveer; Radbruch, Alexander; Pinetz, Thomas; Mousavi, Parvin; Duncan, James; Kobler, Erich; Salcudean, Septimiu; Effland, Alexander; Haase, Robert
Faithful Synthesis of Low-Dose Contrast-Enhanced Brain MRI Scans Using Noise-Preserving Conditional GANs Journal Article
In: no. DZNE-2023-01039, 2023.
@article{greenspan2023a,
title = {Faithful Synthesis of Low-Dose Contrast-Enhanced Brain MRI Scans Using Noise-Preserving Conditional GANs},
author = {Hayit Greenspan and Russell Taylor and Anant Madabhushi and Katerina Deike-Hofmann and Tanveer Syeda-Mahmood and Alexander Radbruch and Thomas Pinetz and Parvin Mousavi and James Duncan and Erich Kobler and Septimiu Salcudean and Alexander Effland and Robert Haase},
year = {2023},
date = {2023-01-01},
number = {DZNE-2023-01039},
publisher = {Clinical Neuroimaging},
abstract = {Today Gadolinium-based contrast agents (GBCA) are indispensable in Magnetic Resonance Imaging (MRI) for diagnosing various diseases. However, GBCAs are expensive and may accumulate in patients with potential side effects, thus dose-reduction is recommended. Still, it is unclear to which extent the GBCA dose can be reduced while preserving the diagnostic value–especially in pathological regions. To address this issue, we collected brain MRI scans at numerous non-standard GBCA dosages and developed a conditional GAN model for synthesizing corresponding images at fractional dose levels. Along with the adversarial loss, we advocate a novel content loss function based on the Wasserstein distance of locally paired patch statistics for the faithful preservation of noise. Our numerical experiments show that conditional GANs are suitable for generating images at different GBCA dose levels and can be …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Bayat, Sharareh; Jamzad, Amoon; Zobeiry, Navid; Poursartip, Anoush; Mousavi, Parvin; Abolmaesumi, Purang
Temporal enhanced Ultrasound: A new method for detection of porosity defects in composites Journal Article
In: Composites Part A: Applied Science and Manufacturing, vol. 164, pp. 107259, 2023.
@article{bayat2023,
title = {Temporal enhanced Ultrasound: A new method for detection of porosity defects in composites},
author = {Sharareh Bayat and Amoon Jamzad and Navid Zobeiry and Anoush Poursartip and Parvin Mousavi and Purang Abolmaesumi},
year = {2023},
date = {2023-01-01},
journal = {Composites Part A: Applied Science and Manufacturing},
volume = {164},
pages = {107259},
publisher = {Elsevier},
abstract = {Non-Destructive Evaluation (NDE) methods are commonly employed for identifying porosity, which is one of the most common manufacturing defects observed in composite structures. Among current widely used approaches are conventional ultrasonic methods such as pulse-echo analysis based on loss of signal amplitude. Application of these conventional ultrasonic methods, however, can be challenging in cases where the loss of signal is negligible, such as with porosity. In this paper, we propose Temporal-enhanced Ultrasound (TeUS) as a novel ultrasound-based imaging technique for NDE of composites. TeUS represents the analysis of a sequence of ultrasound images obtained from composites by varying an image acquisition parameter, such as the focal point, over the sequence. We present details on the analytical formulation of TeUS, followed by extensive simulation and experimental results to …},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
2022
Gilany, Mahdi; Wilson, Paul; Jamzad, Amoon; Fooladgar, Fahimeh; To, Minh Nguyen Nhat; Wodlinger, Brian; Abolmaesumi, Purang; Mousavi, Parvin
Towards Confident Detection of Prostate Cancer using High Resolution Micro-ultrasound Proceedings Article Forthcoming
In: Medical Image Computing and Computer Assisted Interventions (MICCAI 2022), Forthcoming.
@inproceedings{nokey,
title = {Towards Confident Detection of Prostate Cancer using High Resolution Micro-ultrasound},
author = {Mahdi Gilany and Paul Wilson and Amoon Jamzad and Fahimeh Fooladgar and Minh Nguyen Nhat To and Brian Wodlinger and Purang Abolmaesumi and Parvin Mousavi},
year = {2022},
date = {2022-09-18},
urldate = {2022-09-18},
booktitle = {Medical Image Computing and Computer Assisted Interventions (MICCAI 2022)},
keywords = {},
pubstate = {forthcoming},
tppubtype = {inproceedings}
}
Javadi, Golara; Samadi, Samareh; Bayat, Sharareh; Sojoudi, Samira; Hurtado, Antonio; Eshumani, Walid; Chang, Silvia; Black, Peter; Mousavi, Parvin; Abolmaesumi, Purang
Training Deep Neural Networks with Noisy Clinical Labels: Towards Accurate Detection of Prostate Cancer in US Data Journal Article
In: International Journal of Computer Assisted Radiology and Surgery (IJCARS), 2022.
@article{nokeyb,
title = {Training Deep Neural Networks with Noisy Clinical Labels: Towards Accurate Detection of Prostate Cancer in US Data},
author = {Golara Javadi and Samareh Samadi and Sharareh Bayat and Samira Sojoudi and Antonio Hurtado and Walid Eshumani and Silvia Chang and Peter Black and Parvin Mousavi and Purang Abolmaesumi},
year = {2022},
date = {2022-06-21},
urldate = {2022-06-21},
journal = {International Journal of Computer Assisted Radiology and Surgery (IJCARS)},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Hu, Zoe; Fauerbach, Paola Nasute; Yeung, Chris; Ungi, Tamas; Rudan, John; Engel, Cecil Jay; Mousavi, Parvin; Fichtinger, Gabor; Jabs, Doris
Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation Journal Article
In: International Journal of Computer Assisted Radiology and Surgery (IJCARS), 2022.
@article{Hu2022,
title = {Real-time automatic tumor segmentation for ultrasound-guided breast-conserving surgery navigation},
author = {Zoe Hu and Paola Nasute Fauerbach and Chris Yeung and Tamas Ungi and John Rudan and Cecil Jay Engel and Parvin Mousavi and Gabor Fichtinger and Doris Jabs},
doi = {10.1007/s11548-022-02658-4},
year = {2022},
date = {2022-05-19},
journal = {International Journal of Computer Assisted Radiology and Surgery (IJCARS)},
keywords = {},
pubstate = {published},
tppubtype = {article}
}
Fooladgar, Fahimeh; To, Minh Nguyen Nhat; Javadi, Golara; Samadi, Samareh; Bayat, Sharareh; Sojoudi, Samira; Eshumani, Walid; Hurtado, Antonio; Chang, Silvia; Black, Peter; Mousavi, Parvin; Abolmaesumi, Purang
Uncertainty-aware Deep Ensemble Model for Targeted Ultrasound-guided Prostate Biopsy Proceedings Article Forthcoming
In: IEEE International Symposium on Biomedical Imaging (ISBI) 2022, Forthcoming.
@inproceedings{Fooladgar2022,
title = {Uncertainty-aware Deep Ensemble Model for Targeted Ultrasound-guided Prostate Biopsy},
author = {Fahimeh Fooladgar and Minh Nguyen Nhat To and Golara Javadi and Samareh Samadi and Sharareh Bayat and Samira Sojoudi and Walid Eshumani and Antonio Hurtado and Silvia Chang and Peter Black and Parvin Mousavi and Purang Abolmaesumi},
year = {2022},
date = {2022-03-28},
urldate = {2022-03-28},
booktitle = {IEEE International Symposium on Biomedical Imaging (ISBI) 2022},
keywords = {},
pubstate = {forthcoming},
tppubtype = {inproceedings}
}
Perera-Ortega, Andrea; Fooladgar, Fahimeh; To, Minh Nguyen Nhat; Jamzad, Amoon; Mousavi, Parvin; Abolmaesumi, Purang
Unsupervised Learning for Classification of Prostate Cancer Severity Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) - BEST PITCH AWARD, 2022.
@conference{Perera-Ortega2022,
title = {Unsupervised Learning for Classification of Prostate Cancer Severity},
author = {Andrea Perera-Ortega and Fahimeh Fooladgar and Minh Nguyen Nhat To and Amoon Jamzad and Parvin Mousavi and Purang Abolmaesumi},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) - BEST PITCH AWARD},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Patel, Dhruv; Ziegler, Erik; Lewis, Rob; Mousavi, Parvin; Sedghi, Alireza
Deep Image Clustering for Standardization of Radiological Workflows Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022, 2022.
@conference{Patel2022b,
title = {Deep Image Clustering for Standardization of Radiological Workflows},
author = {Dhruv Patel and Erik Ziegler and Rob Lewis and Parvin Mousavi and Alireza Sedghi},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Zhou, Meng; Jamzad, Amoon; Izard, Jason; Menard, Alexandre; Siemens, Robert; Mousavi, Parvin
Domain Transfer through Image-to-Image Translation in Prostate Cancer Detection Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022, 2022.
@conference{Zhou2022,
title = {Domain Transfer through Image-to-Image Translation in Prostate Cancer Detection},
author = {Meng Zhou and Amoon Jamzad and Jason Izard and Alexandre Menard and Robert Siemens and Parvin Mousavi},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Kitner, Nicole; Rodgers, Jessica; Ungi, Tamas; Korzeniowski, Martin; Olding, Tim; Joshi, Chandra; Mousavi, Parvin; Fichtinger, Gabor
Automated catheter segmentation in 3D ultrasound images from high-dose-rate prostate brachytherapy Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) - BEST PITCH AWARD, 2022.
@conference{Kitner2022,
title = {Automated catheter segmentation in 3D ultrasound images from high-dose-rate prostate brachytherapy},
author = {Nicole Kitner and Jessica Rodgers and Tamas Ungi and Martin Korzeniowski and Tim Olding and Chandra Joshi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) - BEST PITCH AWARD},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ehrlich, Joshua; Asselin, Mark; Jamzad, Amoon; Kaufmann, Martin; Rodgers, Jessica; Rudan, John; Ungi, Tamas; Mousavi, Parvin; Fichtinger, Gabor
Cautery state classification for navigated iKnife surgery Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022, 2022.
@conference{nokeyc,
title = {Cautery state classification for navigated iKnife surgery},
author = {Joshua Ehrlich and Mark Asselin and Amoon Jamzad and Martin Kaufmann and Jessica Rodgers and John Rudan and Tamas Ungi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wilson, Paul; Gilany, Mahdi; Jamzad, Amoon; To, Minh Nguyen Nhat; Wodlinger, Brian; Abolmaesumi, Purang; Mousavi, Parvin
Vision Transformers for Prostate Cancer Detection from Ultrasound Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022, 2022.
@conference{Wilson2022,
title = {Vision Transformers for Prostate Cancer Detection from Ultrasound},
author = {Paul Wilson and Mahdi Gilany and Amoon Jamzad and Minh Nguyen Nhat To and Brian Wodlinger and Purang Abolmaesumi and Parvin Mousavi},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Gilany, Mahdi; Jamzad, Amoon; To, Minh Nguyen Nhat; Wodlinger, Brian; Abolmaesumi, Purang; Mousavi, Parvin
Label Noise Compensation in Prostate Cancer Classification Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022, 2022.
@conference{Gilany2022,
title = {Label Noise Compensation in Prostate Cancer Classification},
author = {Mahdi Gilany and Amoon Jamzad and Minh Nguyen Nhat To and Brian Wodlinger and Purang Abolmaesumi and Parvin Mousavi},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Barr, Colton; Groves, Leah; Ungi, Tamas; Diao, Babacar; Kikinis, Ron; Mousavi, Parvin; Fichtinger, Gabor
Prostate segmentation and reconstruction for integration in an ultrasound-guided prostate biopsy system for nationwide implementation in Senegal Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022, 2022.
@conference{Barr2022,
title = {Prostate segmentation and reconstruction for integration in an ultrasound-guided prostate biopsy system for nationwide implementation in Senegal},
author = {Colton Barr and Leah Groves and Tamas Ungi and Babacar Diao and Ron Kikinis and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022},
journal = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Wu, Catherine O; Diao, Babacar; Groves, Leah; Ungi, Tamas; Sedghi, Alireza; Kikinis, Ron; Mousavi, Parvin; Fichtinger, Gabor
Visualization of the zonal anatomy for transrectal ultrasound guided prostate biopsy Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) - BEST PITCH AWARD, 2022.
@conference{Wu2022,
title = {Visualization of the zonal anatomy for transrectal ultrasound guided prostate biopsy},
author = {Catherine O Wu and Babacar Diao and Leah Groves and Tamas Ungi and Alireza Sedghi and Ron Kikinis and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) - BEST PITCH AWARD},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
March, Lucas; Rodgers, Jessica; Jamzad, Amoon; Hisey, Rebecca; McKay, Doug; Rudan, John; Kaufmann, Martin; Ren, Kevin YM; Fichtinger, Gabor; Mousavi, Parvin
Semi-supervised cautery detection with preprocessing in basal cell carcinoma surgical videos Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022, 2022.
@conference{March2022,
title = {Semi-supervised cautery detection with preprocessing in basal cell carcinoma surgical videos},
author = {Lucas March and Jessica Rodgers and Amoon Jamzad and Rebecca Hisey and Doug McKay and John Rudan and Martin Kaufmann and Kevin YM Ren and Gabor Fichtinger and Parvin Mousavi},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}
Ehrlich, Joshua; Jamzad, Amoon; Kaufmann, Martin; Rodgers, Jessica; Rudan, John; Ungi, Tamas; Mousavi, Parvin; Fichtinger, Gabor
Determining the location of tumor classifications in breast cancer surgery Conference
20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022, 2022.
@conference{Ehrlich2022,
title = {Determining the location of tumor classifications in breast cancer surgery},
author = {Joshua Ehrlich and Amoon Jamzad and Martin Kaufmann and Jessica Rodgers and John Rudan and Tamas Ungi and Parvin Mousavi and Gabor Fichtinger},
year = {2022},
date = {2022-03-22},
urldate = {2022-03-22},
booktitle = {20th Annual Symposium of the Imaging Network of Ontario (ImNO) 2022},
keywords = {},
pubstate = {published},
tppubtype = {conference}
}