elastic_processing.py 5.35 KB
import os
import pydicom as dicomio
import numpy as np
import argparse
import json

from do_elastix import do_elastix
from matched_pairs import matched_pairs

def elastix_processing(args):
    # 创建工作目录
    save_root = os.path.join(args.job_data_root, 'output/elastix')
    if not os.path.exists(save_root):
        os.makedirs(save_root)

    # 根据patientid筛选可以进行配准的uid
    dcm_root = os.path.join(args.job_data_root, 'output/tmp')
    uids = [uid for uid in os.listdir(dcm_root) if 'rpns' not in uid]
    patient_infos = {}
    for uid in uids:
        dcms = os.listdir(os.path.join(dcm_root, uid))
        dcm_meta = dicomio.read_file(os.path.join(dcm_root, uid, dcms[0]), force=True)
        patientid = dcm_meta.get('PatientID', '')
        acq_time = dcm_meta.get('AcquisitionDateTime', '')
        if patientid:
            if patientid not in patient_infos.keys():
                patient_infos[patientid] = [[],[]]
                patient_infos[patientid][0].append(os.path.join(dcm_root, uid))
                patient_infos[patientid][1].append(acq_time)
            else:
                patient_infos[patientid][0].append(os.path.join(dcm_root, uid))
                patient_infos[patientid][1].append(acq_time)

    ## 删除不符合要求的patient
    for key in list(patient_infos.keys()):
        if len(patient_infos[key][0]) < 2:
            del patient_infos[key]
    
    ## 根据拍摄时间进行排序
    for key in list(patient_infos.keys()):
        temp = []
        time_list = patient_infos[key][1]
        sort_idx = np.argsort(time_list)
        for idx in sort_idx:
            temp.append([patient_infos[key][0][idx], time_list[idx]])
        patient_infos[key] = temp
    
    # 开始处理每一个病人数据
    for key in list(patient_infos.keys()):
        uids = patient_infos[key]
        for start_id in range(len(uids) - 1):
            fixed_path, fixed_time = uids[start_id]
            moving_path, moving_time = uids[start_id + 1]
            save_path = os.path.join(save_root, 'pid-{}_elastix_step-{}'.format(key, start_id + 1))
            if not os.path.exists(save_path):
                os.makedirs(save_path)
            
            # 解析json文件得到target_points
            target_path = os.path.join(args.job_data_root, 'output/eval/eval_file_details')
            fixed_json_path = os.path.join(target_path, fixed_path.split('/')[-1] + '.json')
            json_info = json.load(open(fixed_json_path, 'r'))
            points = json_info['annotationSessions'][0]['annotationSet']
            point_list = []
            for point in points:
                left_point, right_point = point['coordinates'][0], point['coordinates'][1]
                center_point = [(left_point['x'] + right_point['x']) / 2.0,
                                (left_point['y'] + right_point['y']) / 2.0,
                                (left_point['z'] + right_point['z']) / 2.0]
                point_list.append(center_point)
            
            target_point_path = os.path.join(save_path, 'target_points.txt')
            with open(target_point_path, 'w+') as file:
                file.write('point')
                file.write('\n')
                file.write(str(len(point_list)))
                file.write('\n')
                for info in point_list:
                    file.write(str(round(info[0], 6)))
                    file.write(' ')
                    file.write(str(round(info[1], 6)))
                    file.write(' ')
                    file.write(str(round(info[2], 6)))
                    file.write('\n')
            
            # 开始配准,从moving到fixed, 并且可以将fixed上的target_points映射到moving上。
            do_elastix(fixed_path=fixed_path, 
                       moving_path=moving_path,
                       affine_param=args.elastix_params_affine,
                       bspline_param=args.elastix_param_bspline,
                       save_path=save_path,  
                       target_points=target_point_path)

            if os.path.exists(os.path.join(save_path, 'outputpoints.txt')):
                print('Finished elastix, and obtain in outputpoints-{}'.format(os.path.join(save_path,'outputpoints.txt')))

            # 开始匹配, 返回字典 key:matched, missing, new
            moving_json_path = os.path.join(target_path, moving_path.split('/')[-1] + '.json')
            match_results = matched_pairs(fixed_json_path, moving_json_path, save_path)

            match_results['info'] = [fixed_path.split('/')[-1], fixed_time, moving_path.split('/')[-1], moving_time]
            with open(os.path.join(save_path, 'match_results.json'), 'w+') as file:
                json.dump(match_results, file, indent=4)

    print('Finished all work !!!')

if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='elastix processing')
    parser.add_argument('--job_data_root', default='/data/job_715/job_data_preprocess', type=str, help='model')
    parser.add_argument('--elastix_params_affine', default='/shared/lung_cancer_registration/my_elastix_params/configuration_pulmonary_registration_param_parameters_Affine.txt', type=str)
    parser.add_argument('--elastix_param_bspline', default='/shared/lung_cancer_registration/my_elastix_params/configuration_pulmonary_registration_param_parameters_Bspline.txt', type=str)
    args = parser.parse_args()

    elastix_processing(args)