2023
Kobayashi, Satoshi; King, Franklin; Hata, Nobuhiko
Automatic segmentation of prostate and extracapsular structures in MRI to predict needle deflection in percutaneous prostate intervention Journal Article
In: INTERNATIONAL JOURNAL OF COMPUTER ASSISTED RADIOLOGY AND SURGERY, vol. 18, no. 3, pp. 449–460, 2023, ISSN: 1861-6410, 1861-6429, (Num Pages: 12 Place: Heidelberg Publisher: Springer Heidelberg Web of Science ID: WOS:000857906200002).
Abstract | Links | BibTeX | Tags: 3-D, 3D U-Net, biopsy, CANCER, Deep learning, guidance, Percutaneous intervention, Prostate, RISK, Segmentation, Ultrasound
@article{kobayashi_automatic_2023,
title = {Automatic segmentation of prostate and extracapsular structures in MRI to predict needle deflection in percutaneous prostate intervention},
author = {Satoshi Kobayashi and Franklin King and Nobuhiko Hata},
doi = {10.1007/s11548-022-02757-2},
issn = {1861-6410, 1861-6429},
year = {2023},
date = {2023-03-01},
journal = {INTERNATIONAL JOURNAL OF COMPUTER ASSISTED RADIOLOGY AND SURGERY},
volume = {18},
number = {3},
pages = {449–460},
abstract = {Purpose Understanding the three-dimensional anatomy of percutaneous intervention in prostate cancer is essential to avoid complications. Recently, attempts have been made to use machine learning to automate the segmentation of functional structures such as the prostate gland, rectum, and bladder. However, a paucity of material is available to segment extracapsular structures that are known to cause needle deflection during percutaneous interventions. This research aims to explore the feasibility of the automatic segmentation of prostate and extracapsular structures to predict needle deflection. Methods Using pelvic magnetic resonance imagings (MRIs), 3D U-Net was trained and optimized for the prostate and extracapsular structures (bladder, rectum, pubic bone, pelvic diaphragm muscle, bulbospongiosus muscle, bull of the penis, ischiocavernosus muscle, crus of the penis, transverse perineal muscle, obturator internus muscle, and seminal vesicle). The segmentation accuracy was validated by putting intra-procedural MRIs into the 3D U-Net to segment the prostate and extracapsular structures in the image. Then, the segmented structures were used to predict deflected needle path in in-bore MRI-guided biopsy using a model-based approach. Results The 3D U-Net yielded Dice scores to parenchymal organs (0.61-0.83), such as prostate, bladder, rectum, bulb of the penis, crus of the penis, but lower in muscle structures (0.03-0.31), except and obturator internus muscle (0.71). The 3D U-Net showed higher Dice scores for functional structures (p <0.001) and complication-related structures (p <0.001). The segmentation of extracapsular anatomies helped to predict the deflected needle path in MRI-guided prostate interventions of the prostate with the accuracy of 0.9 to 4.9 mm. Conclusion Our segmentation method using 3D U-Net provided an accurate anatomical understanding of the prostate and extracapsular structures. In addition, our method was suitable for segmenting functional and complication-related structures. Finally, 3D images of the prostate and extracapsular structures could simulate the needle pathway to predict needle deflections.},
note = {Num Pages: 12
Place: Heidelberg
Publisher: Springer Heidelberg
Web of Science ID: WOS:000857906200002},
keywords = {3-D, 3D U-Net, biopsy, CANCER, Deep learning, guidance, Percutaneous intervention, Prostate, RISK, Segmentation, Ultrasound},
pubstate = {published},
tppubtype = {article}
}
2021
Banach, Artur; King, Franklin; Masaki, Fumitaro; Tsukada, Hisashi; Hata, Nobuhiko
Visually Navigated Bronchoscopy using three cycle-Consistent generative adversarial network for depth estimation Journal Article
In: MEDICAL IMAGE ANALYSIS, vol. 73, pp. 102164, 2021, ISSN: 1361-8415, 1361-8423, (Num Pages: 12 Place: Amsterdam Publisher: Elsevier Web of Science ID: WOS:000701725200004).
Abstract | Links | BibTeX | Tags: Bronchoscopy, CANCER, CT Imaging, DIAGNOSTIC BRONCHOSCOPY, guidance, GUIDED BRONCHOSCOPY, Image-guided surgery, Lung cancer, Motion tracking, NODULES, PERIPHERAL LUNG LESIONS, RECONSTRUCTION, SYSTEM, VIDEO REGISTRATION
@article{banach_visually_2021,
title = {Visually Navigated Bronchoscopy using three cycle-Consistent generative adversarial network for depth estimation},
author = {Artur Banach and Franklin King and Fumitaro Masaki and Hisashi Tsukada and Nobuhiko Hata},
doi = {10.1016/j.media.2021.102164},
issn = {1361-8415, 1361-8423},
year = {2021},
date = {2021-10-01},
journal = {MEDICAL IMAGE ANALYSIS},
volume = {73},
pages = {102164},
abstract = {[Background] Electromagnetically Navigated Bronchoscopy (ENB) is currently the state-of-the art diagnostic and interventional bronchoscopy. CT-to-body divergence is a critical hurdle in ENB, causing navigation error and ultimately limiting the clinical efficacy of diagnosis and treatment. In this study, Visually Navigated Bronchoscopy (VNB) is proposed to address the aforementioned issue of CT-to-body divergence. [Materials and Methods] We extended and validated an unsupervised learning method to generate a depth map directly from bronchoscopic images using a Three Cycle-Consistent Generative Adversarial Network (3cGAN) and registering the depth map to preprocedural CTs. We tested the working hypothesis that the proposed VNB can be integrated to the navigated bronchoscopic system based on 3D Slicer, and accurately register bronchoscopic images to pre-procedural CTs to navigate transbronchial biopsies. The quantitative metrics to asses the hypothesis we set was Absolute Tracking Error (ATE) of the tracking and the Target Registration Error (TRE) of the total navigation system. We validated our method on phantoms produced from the pre-procedural CTs of five patients who underwent ENB and on two ex-vivo pig lung specimens. [Results] The ATE using 3cGAN was 6.2 +/-2.9 [mm]. The ATE of 3cGAN was statistically significantly lower than that of cGAN, particularly in the trachea and lobar bronchus (p < 0.001). The TRE of the proposed method had a range of 11.7 to 40.5 [mm]. The TRE computed by 3cGAN was statistically significantly smaller than those computed by cGAN in two of the five cases enrolled (p < 0.05). [Conclusion] VNB, using 3cGAN to generate the depth maps was technically and clinically feasible. While the accuracy of tracking by cGAN was acceptable, the TRE warrants further investigation and improvement. (c) 2021 Elsevier B.V. All rights reserved.},
note = {Num Pages: 12
Place: Amsterdam
Publisher: Elsevier
Web of Science ID: WOS:000701725200004},
keywords = {Bronchoscopy, CANCER, CT Imaging, DIAGNOSTIC BRONCHOSCOPY, guidance, GUIDED BRONCHOSCOPY, Image-guided surgery, Lung cancer, Motion tracking, NODULES, PERIPHERAL LUNG LESIONS, RECONSTRUCTION, SYSTEM, VIDEO REGISTRATION},
pubstate = {published},
tppubtype = {article}
}
2018
Moreira, Pedro; Patel, Niravkumar; Wartenberg, Marek; Li, Gang; Tuncali, Kemal; Heffter, Tamas; Burdette, Everette C.; Iordachita, Iulian; Fischer, Gregory S.; Hata, Nobuhiko; Tempany, Clare M.; Tokuda, Junichi
Evaluation of robot-assisted MRI-guided prostate biopsy: needle path analysis during clinical trials Journal Article
In: PHYSICS IN MEDICINE AND BIOLOGY, vol. 63, no. 20, pp. 20NT02, 2018, ISSN: 0031-9155, 1361-6560, (Num Pages: 9 Place: Bristol Publisher: IoP Publishing Ltd Web of Science ID: WOS:000448103100002).
Abstract | Links | BibTeX | Tags: Accuracy, bore prostate biopsy, CANCER, guidance, needle deflection, needle path analysis, robot-assisted biopsy, SYSTEM
@article{moreira_evaluation_2018,
title = {Evaluation of robot-assisted MRI-guided prostate biopsy: needle path analysis during clinical trials},
author = {Pedro Moreira and Niravkumar Patel and Marek Wartenberg and Gang Li and Kemal Tuncali and Tamas Heffter and Everette C. Burdette and Iulian Iordachita and Gregory S. Fischer and Nobuhiko Hata and Clare M. Tempany and Junichi Tokuda},
doi = {10.1088/1361-6560/aae214},
issn = {0031-9155, 1361-6560},
year = {2018},
date = {2018-10-01},
journal = {PHYSICS IN MEDICINE AND BIOLOGY},
volume = {63},
number = {20},
pages = {20NT02},
abstract = {While the interaction between a needle and the surrounding tissue is known to cause a significant targeting error in prostate biopsy leading to false-negative results, few studies have demonstrated how it impacts in the actual procedure. We performed a pilot study on robot-assisted MRI-guided prostate biopsy with an emphasis on the in-depth analysis of the needle-tissue interaction in vivo. The data were acquired during in-bore transperineal prostate biopsies in patients using a 4 degrees-of-freedom (DoF) MRI-compatible robot. The anatomical structures in the pelvic area and the needle path were reconstructed from MR images, and quantitatively analyzed. We analyzed each structure individually and also proposed a mathematical model to investigate the influence of those structures in the targeting error using the mixed-model regression. The median targeting error in 188 insertions (27 patients) was 6.3 mm. Both the individual anatomical structure analysis and the mixed-model analysis showed that the deviation resulted from the contact between the needle and the skin as the main source of error. On contrary, needle bending inside the tissue (expressed as needle curvature) did not vary among insertions with targeting errors above and below the average. The analysis indicated that insertions crossing the bulbospongiosus presented a targeting error lower than the average. The mixed-model analysis demonstrated that the distance between the needle guide and the patient skin, the deviation at the entry point, and the path length inside the pelvic diaphragm had a statistically significant contribution to the targeting error (p < 0.05). Our results indicate that the errors associated with the elastic contact between the needle and the skin were more prominent than the needle bending along the insertion. Our findings will help to improve the preoperative planning of transperineal prostate biopsies.},
note = {Num Pages: 9
Place: Bristol
Publisher: IoP Publishing Ltd
Web of Science ID: WOS:000448103100002},
keywords = {Accuracy, bore prostate biopsy, CANCER, guidance, needle deflection, needle path analysis, robot-assisted biopsy, SYSTEM},
pubstate = {published},
tppubtype = {article}
}
Tokuda, Junichi; Chauvin, Laurent; Ninni, Brian; Kato, Takahisa; King, Franklin; Tuncali, Kemal; Hata, Nobuhiko
Motion compensation for MRI-compatible patient-mounted needle guide device: estimation of targeting accuracy in MRI-guided kidney cryoablations Journal Article
In: PHYSICS IN MEDICINE AND BIOLOGY, vol. 63, no. 8, pp. 085010, 2018, ISSN: 0031-9155, 1361-6560, (Num Pages: 16 Place: Bristol Publisher: IoP Publishing Ltd Web of Science ID: WOS:000429950600005).
Abstract | Links | BibTeX | Tags: ABLATION, CRYOTHERAPY, CT, guidance, LIVER-TUMORS, manipulator, MRI-compatible robot, MRI-guided interventions, PERCUTANEOUS CRYOABLATION, renal cryoablation, Robot, SYSTEM, THERAPY
@article{tokuda_motion_2018,
title = {Motion compensation for MRI-compatible patient-mounted needle guide device: estimation of targeting accuracy in MRI-guided kidney cryoablations},
author = {Junichi Tokuda and Laurent Chauvin and Brian Ninni and Takahisa Kato and Franklin King and Kemal Tuncali and Nobuhiko Hata},
doi = {10.1088/1361-6560/aab736},
issn = {0031-9155, 1361-6560},
year = {2018},
date = {2018-04-01},
journal = {PHYSICS IN MEDICINE AND BIOLOGY},
volume = {63},
number = {8},
pages = {085010},
abstract = {Patient-mounted needle guide devices for percutaneous ablation are vulnerable to patient motion. The objective of this study is to develop and evaluate a software system for an MRI-compatible patient-mounted needle guide device that can adaptively compensate for displacement of the device due to patient motion using a novel image-based automatic device-to-image registration technique. We have developed a software system for an MRI-compatible patient-mounted needle guide device for percutaneous ablation. It features fully-automated image-based device-to-image registration to track the device position, and a device controller to adjust the needle trajectory to compensate for the displacement of the device. We performed: (a) a phantom study using a clinical MR scanner to evaluate registration performance; (b) simulations using intraoperative time-series MR data acquired in 20 clinical cases of MRI-guided renal cryoablations to assess its impact on motion compensation; and (c) a pilot clinical study in three patients to test its feasibility during the clinical procedure. FRE, TRE, and success rate of device-to-image registration were 2.71 +/- 2.29 mm,1.74 +/- 1.13 mm, and 98.3% for the phantom images. The simulation study showed that the motion compensation reduced the targeting error for needle placement from 8.2 mm to 5.4 mm (p < 0.0005) in patients under general anesthesia (GA), and from 14.4 mm to 10.0 mm (p < 1.0 x 10 -5 ) in patients under monitored anesthesia care (MAC). The pilot study showed that the software registered the device successfully in a clinical setting. Our simulation study demonstrated that the software system could significantly improve targeting accuracy in patients treated under both MAC and GA. Intraprocedural image-based device-to-image registration was feasible.},
note = {Num Pages: 16
Place: Bristol
Publisher: IoP Publishing Ltd
Web of Science ID: WOS:000429950600005},
keywords = {ABLATION, CRYOTHERAPY, CT, guidance, LIVER-TUMORS, manipulator, MRI-compatible robot, MRI-guided interventions, PERCUTANEOUS CRYOABLATION, renal cryoablation, Robot, SYSTEM, THERAPY},
pubstate = {published},
tppubtype = {article}
}