Raw JSON
{'hasResults': False, 'derivedSection': {'miscInfoModule': {'versionHolder': '2026-03-25'}, 'conditionBrowseModule': {'meshes': [{'id': 'D001932', 'term': 'Brain Neoplasms'}, {'id': 'D005910', 'term': 'Glioma'}, {'id': 'D005909', 'term': 'Glioblastoma'}], 'ancestors': [{'id': 'D016543', 'term': 'Central Nervous System Neoplasms'}, {'id': 'D009423', 'term': 'Nervous System Neoplasms'}, {'id': 'D009371', 'term': 'Neoplasms by Site'}, {'id': 'D009369', 'term': 'Neoplasms'}, {'id': 'D001927', 'term': 'Brain Diseases'}, {'id': 'D002493', 'term': 'Central Nervous System Diseases'}, {'id': 'D009422', 'term': 'Nervous System Diseases'}, {'id': 'D018302', 'term': 'Neoplasms, Neuroepithelial'}, {'id': 'D017599', 'term': 'Neuroectodermal Tumors'}, {'id': 'D009373', 'term': 'Neoplasms, Germ Cell and Embryonal'}, {'id': 'D009370', 'term': 'Neoplasms by Histologic Type'}, {'id': 'D009375', 'term': 'Neoplasms, Glandular and Epithelial'}, {'id': 'D009380', 'term': 'Neoplasms, Nerve Tissue'}, {'id': 'D001254', 'term': 'Astrocytoma'}]}}, 'protocolSection': {'designModule': {'phases': ['PHASE3'], 'studyType': 'INTERVENTIONAL', 'designInfo': {'allocation': 'NA', 'maskingInfo': {'masking': 'NONE'}, 'primaryPurpose': 'DIAGNOSTIC', 'interventionModel': 'SINGLE_GROUP'}, 'enrollmentInfo': {'type': 'ESTIMATED', 'count': 100}}, 'statusModule': {'overallStatus': 'NOT_YET_RECRUITING', 'startDateStruct': {'date': '2026-03', 'type': 'ESTIMATED'}, 'expandedAccessInfo': {'hasExpandedAccess': False}, 'statusVerifiedDate': '2026-01', 'completionDateStruct': {'date': '2028-06', 'type': 'ESTIMATED'}, 'lastUpdateSubmitDate': '2026-01-22', 'studyFirstSubmitDate': '2026-01-22', 'studyFirstSubmitQcDate': '2026-01-22', 'lastUpdatePostDateStruct': {'date': '2026-01-29', 'type': 'ACTUAL'}, 'studyFirstPostDateStruct': {'date': '2026-01-29', 'type': 'ACTUAL'}, 'primaryCompletionDateStruct': {'date': '2027-12', 'type': 'ESTIMATED'}}, 'outcomesModule': {'primaryOutcomes': [{'measure': 'Diagnostic performance of BrainUS-AI for residual tumor detection at end of resection', 'timeFrame': 'During surgery (baseline, during resection, and end of resection), with the primary assessment at the end of resection on the final intraoperative ultrasound acquisition.', 'description': "Residual tumor presence/absence will be determined by the BrainUS-AI segmentation overlay during the final intraoperative ultrasound acquisition (when the surgeon considers the resection complete). This binary classification (residual present/absent) will be compared against early postoperative MRI when available (reference standard), and agreement with the surgeon's intraoperative assessment will also be recorded. Diagnostic performance will be reported as sensitivity, specificity, PPV, and NPV with 95% confidence intervals, and concordance will be assessed using Cohen's kappa."}]}, 'oversightModule': {'oversightHasDmc': False, 'isFdaRegulatedDrug': False, 'isFdaRegulatedDevice': False}, 'conditionsModule': {'keywords': ['brain tumor', 'glioma', 'glioblastoma', 'low-grade glioma', 'high-grade glioma', 'ious', 'computer vision', 'AI', 'ultrasound'], 'conditions': ['Brain Tumor Adult']}, 'referencesModule': {'references': [{'pmid': '39858097', 'type': 'RESULT', 'citation': 'Cepeda S, Esteban-Sinovas O, Singh V, Shetty P, Moiyadi A, Dixon L, Weld A, Anichini G, Giannarou S, Camp S, Zemmoura I, Giammalva GR, Del Bene M, Barbotti A, DiMeco F, West TR, Nahed BV, Romero R, Arrese I, Hornero R, Sarabia R. Deep Learning-Based Glioma Segmentation of 2D Intraoperative Ultrasound Images: A Multicenter Study Using the Brain Tumor Intraoperative Ultrasound Database (BraTioUS). Cancers (Basel). 2025 Jan 19;17(2):315. doi: 10.3390/cancers17020315.'}, {'pmid': '40449046', 'type': 'RESULT', 'citation': 'Cepeda S, Esteban-Sinovas O, Romero R, Singh V, Shett P, Moiyadi A, Zemmoura I, Giammalva GR, Del Bene M, Barbotti A, DiMeco F, West TR, Nahed BV, Arrese I, Hornero R, Sarabia R. Real-time brain tumor detection in intraoperative ultrasound: From model training to deployment in the operating room. Comput Biol Med. 2025 Jul;193:110481. doi: 10.1016/j.compbiomed.2025.110481. Epub 2025 May 30.'}]}, 'descriptionModule': {'briefSummary': 'Intraoperative ultrasound is a versatile, low-cost imaging tool that has been shown to improve safety and efficacy in brain tumor surgery. However, its widespread adoption remains limited due to operator dependency, the complexity of image interpretation, the presence of artifacts, and a restricted field of view.\n\nThis project aims to prospectively evaluate, in a multicenter and non-randomized setting, a prototype real-time deep learning-based segmentation model for brain tumor delineation in intraoperative ultrasound. The model is designed to facilitate the identification of tumor tissue during surgery, potentially enhancing intraoperative decision-making and surgical precision.\n\nBy increasing the precision and accessibility of ioUS, this innovation is expected to enable safer and more complete resections, with the potential to improve both survival and quality of life for patients with brain tumors.', 'detailedDescription': 'Brain tumor surgery presents major challenges due to the complex anatomy of the brain and the infiltrative nature of these lesions, which are often located near eloquent areas. One of the key determinants of patient survival is the extent of tumor resection, provided it can be achieved without compromising neurological function 1. To maximize safe resection, neurosurgeons rely on a variety of intraoperative adjuncts, including fluorescent agents, neuronavigation, direct electrical stimulation, and advanced intraoperative imaging techniques, most notably intraoperative magnetic resonance imaging (ioMRI) and intraoperative ultrasound (ioUS) 2.\n\nAlthough ioMRI offers excellent resolution and accuracy, its high cost, logistical demands, and complexity of integration limit its availability to a small number of specialized centers 3. In contrast, ioUS is a low-cost, versatile modality that integrates naturally into the surgical workflow 4-6. Nevertheless, its broader adoption has been limited by several factors: high operator dependency, a steep learning curve, and interpretation challenges related to artifacts, non-standard imaging planes, low contrast between tumor and normal brain, and a restricted field of view.\n\nOver the past decades, research on AI-based segmentation of brain tumors has advanced substantially, but most work has focused on MRI 7. In the context of ioUS, early studies such as Ritschel et al. 8 demonstrated that supervised classification models (e.g., support vector machines) could distinguish tumor from healthy tissue in contrast-enhanced ultrasound, but these approaches were labor-intensive and limited to small datasets. Ilunga-Mbuyamba et al. 9 later investigated multimodal registration between ioUS and MRI to enhance segmentation, but clinical feasibility was constrained by the need for accurate co-registration. More recently, deep learning-based approaches by Canalini et al. 10 and Carton et al. 11 have been applied to segment surgical cavities and tumor volumes in ioUS images.\n\nState-of-the-art methods such as those reported by Faanes et al. 12, using nnU-Net architectures, have achieved promising Dice similarity coefficients of 0.6-0.9 on public datasets such as RESECT-SEG 13 and ReMIND 14. However, these models were not designed for real-time inference and have not undergone validation in live surgical settings. Other approaches, such as that of Dorent et al. 15, have relied on synthetic ultrasound images derived from preoperative MRI, raising concerns about generalizability to real ioUS data. Overall, despite these advances, clinical translation remains limited due to the unique challenges of ioUS, including lower spatial resolution, image heterogeneity, and variability in acquisition protocols.\n\nIn other medical domains, AI-assisted ultrasound segmentation has demonstrated real-time feasibility. For example, Hu et al. 16 implemented U-Net-based models for breast lesion segmentation at 16 frames per second (FPS) with Dice scores exceeding 0.75, while Wei et al. 17 applied YOLO-based detection to identify carotid plaques with 98.5% accuracy at 39 FPS. Despite their efficiency and accuracy, similar approaches have yet to be implemented and clinically validated for brain tumor surgery using ioUS.\n\nOur project aims to address this gap by conducting a multicenter, prospective, non-randomized validation of a prototype deep learning-based segmentation model specifically designed for real-time intraoperative brain tumor ultrasound. The model operates at surgical frame rates, automatically delineating tumor boundaries directly on the live ultrasound feed, with the goal of assisting intraoperative decision-making, maximizing the extent of resection when oncologically appropriate, and preserving neurological function.\n\nThis study builds upon our prior work, which has established a strong scientific foundation for the proposed validation. In our recent publication, "Deep Learning-Based Glioma Segmentation of 2D Intraoperative Ultrasound Images: A Multicenter Study Using the Brain Tumor Intraoperative Ultrasound Database (BraTioUS)" 18, we trained and validated a deep learning model for brain tumor segmentation using multicenter data from BraTioUS-DB-marking a milestone in ioUS research. In a second study, "Real-Time Brain Tumor Detection in Intraoperative Ultrasound: From Model Training to Deployment in the Operating Room"19, we developed and prospectively evaluated a real-time computer vision detection model in the operating room. Together, these contributions provide a robust framework for advancing real-time AI-based segmentation in intraoperative ultrasound, directly aligned with the objectives of the present study.'}, 'eligibilityModule': {'sex': 'ALL', 'stdAges': ['ADULT', 'OLDER_ADULT'], 'minimumAge': '18 Years', 'healthyVolunteers': False, 'eligibilityCriteria': 'Inclusion criteria:\n\n* Age ≥ 18 years.\n* Scheduled for craniotomy and resection of a brain tumor with ioUS planned as part of the standard surgical workflow.\n* Preoperative MRI available for surgical planning.\n* Ability to obtain informed consent from the patient or legal representative.\n\nExclusion criteria:\n\n• Inadequate ioUS image acquisition due to technical failure or intraoperative complications unrelated to the tumor.'}, 'identificationModule': {'nctId': 'NCT07376304', 'acronym': 'BrainUS-AI', 'briefTitle': 'Intraoperative Ultrasound for Brain Tumor Surgery Enhanced by AI', 'organization': {'class': 'OTHER', 'fullName': 'Hospital del Rio Hortega'}, 'officialTitle': 'Optimization of Intraoperative Ultrasound Use in Brain Tumor Surgery Through Artificial Intelligence-Based Techniques', 'orgStudyIdInfo': {'id': 'PI-25-191-H'}}, 'armsInterventionsModule': {'armGroups': [{'type': 'EXPERIMENTAL', 'label': 'Real-time AI-assisted intraoperative ultrasound segmentation', 'description': 'Participants undergoing standard-of-care brain tumor resection with intraoperative ultrasound (ioUS) will use a prototype real-time deep learning-based segmentation system that overlays automated tumor delineation on the live ultrasound feed during surgery. The tool is used as an adjunct to routine intraoperative imaging and does not mandate changes to the surgical strategy; the surgeon remains fully responsible for intraoperative decision-making. Technical performance (e.g., segmentation accuracy, latency/FPS, operational stability), feasibility/workflow impact, residual tumor detection agreement, and surgeon-reported usability will be prospectively collected across participating centers.', 'interventionNames': ['Device: BrainUS-AI real-time intraoperative ultrasound segmentation system']}], 'interventions': [{'name': 'BrainUS-AI real-time intraoperative ultrasound segmentation system', 'type': 'DEVICE', 'description': "A prototype AI-based device (software system) that performs real-time deep learning segmentation of brain tumor tissue on intraoperative ultrasound (ioUS) and displays the segmentation as an overlay on the live ultrasound feed during surgery. The system is used as an adjunct to standard-of-care ioUS without mandating any change to the planned surgical strategy; intraoperative decisions remain under the surgeon's responsibility. System logs capture processing performance (e.g., FPS, end-to-end latency, operational uptime) and outputs used for subsequent technical validation and workflow/usability assessments.", 'armGroupLabels': ['Real-time AI-assisted intraoperative ultrasound segmentation']}]}, 'contactsLocationsModule': {'centralContacts': [{'name': 'Santiago Cepeda, MD., Ph.D.', 'role': 'CONTACT', 'email': 'scepedac@saludcastillayleon.es', 'phone': '+34651035158'}]}, 'ipdSharingStatementModule': {'ipdSharing': 'UNDECIDED'}, 'sponsorCollaboratorsModule': {'leadSponsor': {'name': 'Hospital del Rio Hortega', 'class': 'OTHER'}, 'collaborators': [{'name': 'Department of Neurosurgery, Bratislava University Hospital', 'class': 'UNKNOWN'}], 'responsibleParty': {'type': 'PRINCIPAL_INVESTIGATOR', 'investigatorTitle': 'Staff Neurosurgeon', 'investigatorFullName': 'Santiago Cepeda', 'investigatorAffiliation': 'Hospital del Rio Hortega'}}}}