-
Notifications
You must be signed in to change notification settings - Fork 10
/
Copy pathconv2Nifti_auto.py
474 lines (376 loc) · 19.4 KB
/
conv2Nifti_auto.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
"""
Created on 18/10/2023
@author: Marc Schneider
AG Neuroimaging and Neuroengineering of Experimental Stroke
Department of Neurology, University Hospital Cologne
This script automates the conversion from the raw bruker data format to the NIfTI
format for the whole dataset using brkraw. The raw
data needs to be stored in one folder.
All the data which is contained in the input folder will be converted to nifti. During the processing a new folder called proc_data is being
created in the same directory where the raw data folder is located. If you wish to save the output elsewhere you can specify the output directory with the -o flag when starting the script.
Example:
python conv2Nifti_auto.py -i /Volumes/Desktop/MRI/raw_data -o /Volumes/Desktop/MRI//proc_data
"""
import os
import csv
import json
import pandas as pd
import nibabel as nii
import glob as glob
from pathlib import Path
import numpy as np
import re
import concurrent.futures
from PV2NIfTiConverter import P2_IDLt2_mapping
import functools
import subprocess
import shlex
import logging
import shutil
import openpyxl
def create_slice_timings(method_file, scanid, out_file):
# read in method file to search for parameters
with open(method_file, "r") as infile:
lines = infile.readlines()
interleaved = False
repetition_time = None
slicepack_delay = None
slice_order = []
n_slices = 0
reverse = False
# iterate over line to find parameters
for idx, line in enumerate(lines):
if "RepetitionTime=" in line:
repetition_time = int(float(line.split("=")[1]))
repetition_time = int(repetition_time)
if "PackDel=" in line:
slicepack_delay = int(float(line.split("=")[1]))
if "ObjOrderScheme=" in line:
slice_order = line.split("=")[1]
if slice_order == 'Sequential':
interleaved = False
else:
interleaved = True
if "ObjOrderList=" in line:
n_slices = re.findall(r'\d+', line)
if len(n_slices) == 1:
n_slices = int(n_slices[0])
if lines[idx+1]:
slice_order = [int(float(s)) for s in re.findall(r'\d+', lines[idx+1])]
if slice_order[0] > slice_order[-1]:
reverse = True
# calculate actual slice timings
slice_timings = calculate_slice_timings(n_slices, repetition_time, slicepack_delay, slice_order, reverse)
# adjust slice order to start at 1
slice_order = [x+1 for x in slice_order]
#save metadata
mri_meta_data = {}
mri_meta_data["RepetitionTime"] = repetition_time
mri_meta_data["ObjOrderList"] = slice_order
mri_meta_data["n_slices"] = n_slices
mri_meta_data["costum_timings"] = slice_timings
mri_meta_data["ScanID"] = scanid
if os.path.exists(out_file):
with open(out_file, "r") as outfile:
content = json.load(outfile)
#update brkraw content with own slice timings
content.update(mri_meta_data)
with open(out_file, "w") as outfile:
json.dump(content, outfile)
# if json has different naming than usual adjust path
else:
parent_path = Path(out_file).parent
search_path = os.path.join(parent_path, "*.json")
json_files = glob.glob(search_path)
for json_file in json_files:
if os.path.exists(json_file):
with open(json_file, "r") as outfile:
content = json.load(outfile)
#update brkraw content with own slice timings
content.update(mri_meta_data)
with open(json_file, "w") as outfile:
json.dump(content, outfile)
def calculate_slice_timings(n_slices, repetition_time, slicepack_delay, slice_order, reverse=False):
n_slices_2 = int(n_slices / 2)
slice_spacing = float(repetition_time - slicepack_delay) / float(n_slices * repetition_time)
if n_slices % 2 == 1: # odd
slice_timings = list(range(n_slices_2, -n_slices_2 - 1, -1))
slice_timings = list(map(float, slice_timings))
else: # even
slice_timings = list(range(n_slices_2, -n_slices_2, -1))
slice_timings = list(map(lambda x: float(x) - 0.5, slice_timings))
if reverse:
slice_order.reverse()
slice_timings = list(slice_timings[x] for x in slice_order)
return list((slice_spacing * x) for x in slice_timings)
def get_visu_pars(path):
echotimes = []
if os.path.exists(path):
with open(path, 'r') as infile:
lines = infile.readlines()
for idx, line in enumerate(lines):
if "VisuAcqEchoTime=" in line:
if lines[idx+1]:
echotimes = [float(s) for s in re.findall(r'\d+', lines[idx+1])]
echotimes = np.array(echotimes)
return echotimes
def bids_convert(input_dir, output_dir):
## rearrange proc data in BIDS-format
temp_dir = os.path.join(input_dir,"temp")
command = f"brkraw bids_helper {input_dir} dataset -j"
command_args = shlex.split(command)
os.chdir(input_dir)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
logging.info(f"Output bids helper:\n{result.stdout}")
except Exception as e:
logging.error(f'Fehler bei der Ausführung des Befehls: {command_args}\nFehlermeldung: {str(e)}')
raise
# # adjust dataset.json template
dataset_json = glob.glob(os.path.join(os.getcwd(),"data*.json"))[0]
dataset_csv = glob.glob(os.path.join(os.getcwd(),"data*.csv"))[0]
if os.path.exists(dataset_json):
with open(dataset_json, 'r') as infile:
meta_data = json.load(infile)
if meta_data["common"]["EchoTime"]:
del meta_data["common"]["EchoTime"]
with open(dataset_json, 'w') as outfile:
json.dump(meta_data, outfile)
## convert to bids
command = f"brkraw bids_convert {input_dir} {dataset_csv} -j {dataset_json} -o {output_dir}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
logging.info(f"Output bids convert:\n{result.stdout}")
except Exception as e:
logging.error(f'Fehler bei der Ausführung des Befehls: {command_args}\nFehlermeldung: {str(e)}')
raise
shutil.rmtree(temp_dir)
def nifti_convert(input_dir, raw_data_list, output_dir):
# create list with full paths of raw data
list_of_paths = []
aidamri_dir = os.getcwd()
temp_dir = os.path.join(input_dir,"temp")
if not os.path.exists(temp_dir):
os.mkdir(temp_dir)
os.chdir(temp_dir)
with concurrent.futures.ProcessPoolExecutor() as executor:
futures = [executor.submit(brkraw_tonii, path) for path in raw_data_list]
concurrent.futures.wait(futures)
os.chdir(aidamri_dir)
def brkraw_tonii(input_path):
command = f"brkraw tonii {input_path}"
command_args = shlex.split(command)
try:
result = subprocess.run(command_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)
logging.info(f"Output nifti conversion of dataset {os.path.basename(input_path)}:\n{result.stdout}")
except Exception as e:
logging.error(f'Fehler bei der Ausführung des Befehls: {command_args}\nFehlermeldung: {str(e)}')
raise
def create_mems_and_map(mese_scan_ses, mese_scan_data, output_dir):
# iterate over every subject and ses to check if MEMS files are included
sub = os.path.basename(os.path.dirname(mese_scan_ses))
ses = os.path.basename(mese_scan_ses)
anat_data_path = os.path.join(mese_scan_ses, "anat", "*MESE.nii*")
mese_data_paths = glob.glob(anat_data_path, recursive=True)
#skip the subject if no MEMS files are found
if not mese_data_paths:
return 1
# collect data of all individual MEMS files of one subject and session
img_array_data = {}
for m_d_p in mese_data_paths:
# find slice numer in path. e.g.: *echo-10_MESE.nii.gz, extract number 10
slice_number = int(((Path(m_d_p).name).split('-')[-1]).split('_')[0])
# load nifti image and save the array in a dict while key is the slice number
data = nii.load(m_d_p)
img_array = data.dataobj.get_unscaled()
img_array_data[slice_number] = img_array
# remove single mese file
os.remove(m_d_p)
os.remove(m_d_p.replace(".nii.gz", ".json"))
# sort imgs into right order
sorted_imgs = []
for key in sorted(img_array_data):
sorted_imgs.append(img_array_data[key])
# stack all map related niftis
new_img = np.stack(sorted_imgs, axis=2)
qform = data.header.get_qform()
sform = data.header.get_sform()
data.header.set_qform(None)
data.header.set_sform(None)
nii_img = nii.Nifti1Image(new_img, None, data.header)
# save nifti file in anat folder
img_name = sub + "_" + ses + "_T2w_MEMS.nii.gz"
t2_mems_path = os.path.join(output_dir, sub, ses, "anat", img_name)
nii.save(nii_img, t2_mems_path)
# create t2 map
sub_num = sub.split("-")[1]
visu_pars_path = os.path.join(pathToRawData, mese_scan_data[sub_num]["RawData"], str(mese_scan_data[sub_num]["ScanID"]), "visu_pars")
# get echotimes of scan
echotimes = get_visu_pars(visu_pars_path)
if len(echotimes) > 3:
img_name = sub + "_" + ses + "_T2w_MAP.nii.gz"
t2map_path = os.path.join(output_dir, sub, ses, "t2map", img_name)
if not os.path.exists(os.path.join(output_dir, sub, ses, "t2map")):
os.mkdir(os.path.join(output_dir, sub, ses, "t2map"))
try:
P2_IDLt2_mapping.getT2mapping(t2_mems_path, 'T2_2p', 100, 1.5, 'Brummer', echotimes, t2map_path)
logging.info(f"Map created for: {os.path.basename(t2_mems_path)}")
except Exception as e:
logging.error(f"Error while computing T2w Map:\n{e}")
raise
correct_orientation(qform,sform,t2_mems_path,t2map_path)
# generate transposed MEMS img for later registration
org_mems_scan = nii.load(t2_mems_path)
mems_data = org_mems_scan.dataobj.get_unscaled()
mems_data_transposed = np.transpose(mems_data, axes=(0,1,3,2))
mems_data_first_slice = mems_data_transposed[:,:,:,1]
for i in range(mems_data_transposed.shape[3]):
mems_data_transposed[:,:,:,i] = mems_data_first_slice
transposed_copied_img = nii.Nifti1Image(mems_data_transposed, org_mems_scan.affine)
img_name = sub + "_" + ses + "_T2w_transposed_MEMS.nii.gz"
t2_mems_transposed_path = os.path.join(output_dir, sub, ses, "t2map", img_name)
if not os.path.exists(os.path.join(output_dir, sub, ses, "t2map")):
os.mkdir(os.path.join(output_dir, sub, ses, "t2map"))
nii.save(transposed_copied_img, t2_mems_transposed_path)
def correct_orientation(qform,sform, t2_mems_img, t2_map_img):
# overwrite img with correct orienation
mems_img = nii.load(t2_mems_img)
imgTemp = mems_img.dataobj.get_unscaled()
mems_img.header.set_qform(qform)
mems_img.header.set_sform(sform)
new_img = nii.Nifti1Image(imgTemp, None, mems_img.header)
nii.save(new_img, t2_mems_img)
# overwrite img with correct orienation
map_img = nii.load(t2_map_img)
imgTemp = map_img.dataobj.get_unscaled()
map_img.header.set_qform(qform)
map_img.header.set_sform(sform)
new_img = nii.Nifti1Image(imgTemp, None, map_img.header)
nii.save(new_img, t2_map_img)
#this is needed to be done for the bids converter to work correctly.
def fileCopy(list_of_data, input_path):
for ll in list_of_data:
if os.path.dirname(ll) != input_path: # Use '!=' for inequality
# Extract the filename from ll
filename = os.path.basename(ll)
# Create the destination path by combining input_path and the filename
destination_path = os.path.join(input_path, filename)
# Use shutil.copy to copy the file from ll to destination
try:
shutil.move(ll, destination_path)
print(f"File '{filename}' moved to '{input_path}' successfully.")
except Exception as e:
print(f"Error moving '{filename}' to '{input_path}': {str(e)}")
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description='This script automates the conversion from the raw bruker data format to the NIfTI format using 1_PV2NIfTiConverter/pv_conv2Nifti.py. The raw data needs to be in the following structure: projectfolder/days/subjects/data/. For this script to work, the groupMapping.csv needs to be adjusted, where the group name of every subject''s folder in the raw data structure needs to be specified. This script computes the converison either for all data in the raw project folder or for certain days and/or groups specified through the optional arguments -d and -g. During the processing a new folder called proc_data is being created in the same directory where the raw data folder is located. Example: python conv2Nifti_auto.py -f /Volumes/Desktop/MRI/raw_data -d Baseline P1 P7 P14 P28')
parser.add_argument('-i', '--input', required=True,
help='Path to the parent project folder of the dataset, e.g. raw_data, WARNING: all of the raw subjects have to be in one folder and not to have a subfolder structure. otherwise the conversion to bids wont work.', type=str)
parser.add_argument('-s', '--sessions',
help='Select which sessions of your data should be processed, if no days are given all data will be used.', type=str, required=False)
parser.add_argument('-o', '--output', type=str, required=False, help='Output directory where the results will be saved.')
## read out parameters
args = parser.parse_args()
pathToRawData = args.input
if args.output == None:
output_dir = os.path.join(pathToRawData, "proc_data")
else:
output_dir = args.output
if not os.path.exists(output_dir):
os.mkdir(output_dir)
# Create a new workbook and select the active sheet
workbook = openpyxl.Workbook()
sheet = workbook.active
# Enter data in Row 1
sheet['A1'] = "Subject"
sheet['B1'] = "Group"
# Save the workbook
workbook.save(os.path.join(output_dir,"GroupMapping.xlsx"))
# Konfiguriere das Logging-Modul
log_file_path = os.path.join(output_dir, "conv2nifti_log.txt")
logging.basicConfig(filename=log_file_path, level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
# get list of raw data in input folder
#list_of_raw = sorted([d for d in os.listdir(pathToRawData) if os.path.isdir(os.path.join(pathToRawData, d)) \
# or (os.path.isfile(os.path.join(pathToRawData, d)) and (('zip' in d) or ('PvDataset' in d)))])
#list_of_raw = glob.glob(os.path.join(pathToRawData,"**","subject"),recursive=True)
#list_of_data = []
#for path in list_of_raw:
# list_of_data.append(os.path.dirname(path))
#fileCopy(list_of_data,pathToRawData)
list_of_raw = glob.glob(os.path.join(pathToRawData,"**","subject"),recursive=True)
list_of_data = []
for path in list_of_raw:
list_of_data.append(os.path.dirname(path))
logging.info(f"Converting following datasets: {list_of_data}")
print(f"Converting following datasets: {list_of_data}")
# convert data into nifti format
print("Paravision to nifti conversion running \33[5m...\33[0m (wait!)")
#nifti_convert(output_dir, list_of_data)
nifti_convert(pathToRawData, list_of_data, output_dir)
print("\rNifti conversion \033[0;30;42m COMPLETED \33[0m ")
# convert data into BIDS format
print("BIDS conversion running \33[5m...\33[0m (wait!)")
bids_convert(pathToRawData, output_dir)
print("\rBIDS conversion \033[0;30;42m COMPLETED \33[0m ")
# find MEMS and fmri files
mese_scan_data = {}
mese_scan_ids = []
fmri_scan_ids = {}
dataset_csv = glob.glob(os.path.join(os.getcwd(), "data*.csv"))[0]
if os.path.exists(dataset_csv):
with open(dataset_csv, 'r') as csvfile:
df = pd.read_csv(csvfile, delimiter=',')
for index, row in df.iterrows():
# save every sub which has MEMS scans
if row["modality"] == "MESE":
mese_scan_ids.append(row["SubjID"])
mese_scan_data[row["SubjID"]] = {}
mese_scan_data[row["SubjID"]]["ScanID"] = row["ScanID"]
mese_scan_data[row["SubjID"]]["RawData"] = row["RawData"]
# save every sub and scanid wich is fmri scan
if row["DataType"] == "func":
fmri_scan_ids[row["RawData"]] = {}
fmri_scan_ids[row["RawData"]]["ScanID"] = row["ScanID"]
fmri_scan_ids[row["RawData"]]["SessID"] = row["SessID"]
fmri_scan_ids[row["RawData"]]["SubjID"] = row["SubjID"]
# iterate over all fmri scans to calculate and save costum slice timings
for sub, data in fmri_scan_ids.items():
scanid = str(data["ScanID"])
sessid = str(data["SessID"])
subjid = str(data["SubjID"])
# determine method file path
fmri_scan_method_file = os.path.join(pathToRawData, sub, scanid, "method")
# determine output json file path
out_file = os.path.join(output_dir, "sub-" + subjid, "ses-" + sessid, "func", "sub-" + subjid + "_ses-" + sessid + "_EPI.json")
# calculate slice timings
create_slice_timings(fmri_scan_method_file, scanid, out_file)
## use parallel computing for a faster generation of t2maps
mese_scan_sessions = []
for id in mese_scan_ids:
mese_scan_path = os.path.join(output_dir, "sub-" + id)
sessions = os.listdir(mese_scan_path)
for ses in sessions:
mese_scan_ses = os.path.join(mese_scan_path, ses)
if mese_scan_ses not in mese_scan_sessions:
mese_scan_sessions.append(os.path.join(mese_scan_path, ses))
print("T2 mapping running \33[5m...\33[0m (wait!)")
logging.info(f"Creating T2w maps for following datasets:\n{mese_scan_ids}")
with concurrent.futures.ProcessPoolExecutor() as executor:
futures = [executor.submit(create_mems_and_map, mese_scan_ses, mese_scan_data, output_dir) for mese_scan_ses in mese_scan_sessions]
concurrent.futures.wait(futures)
print('\rT2 mapping \033[0;30;42m COMPLETED \33[0m ')
logging.info(f"Finished creating T2w maps")
dataset_csv = glob.glob(os.path.join(os.getcwd(), "data*.csv"))[0]
dataset_json = glob.glob(os.path.join(os.getcwd(), "data*.json"))[0]
#os.remove(dataset_csv)
#os.remove(dataset_json)
print("\n")
print("###")
print("Finished converting raw data into nifti format!")
print("\n")
print("###")
print("For detailed information check logging file!")
print("\n")
print("###")
print("Thank you for using AIDAmri!")