import numpy as np
import os
from keras.backend.tensorflow_backend import set_session
from random import randint,shuffle
import nibabel as nib
def read_file_list(domain='source'):
# train_path = './dataset/train_info_part.csv'
train_path = 'E:\\WinPython\\settings\\file\\DASS\\dataset\\train_info_part.csv'
with open(train_path, 'r') as f:
file_list = f.read()
file_list = file_list.split('\n')[1:-1]
file_list = [file.split(',') for file in file_list]
source_file_list=[]
target_file_list=[]
for file in file_list:
if int(file[0])%4:
source_file_list.append(file)
else:
target_file_list.append(file)
if domain=='all':
return file_list
if domain=='source':
return source_file_list
if domain=='target':
return target_file_list
if domain=='ss':
shuffle(source_file_list)
ss_file_list = target_file_list + source_file_list[:-len(target_file_list)]
shuffle(ss_file_list)
return ss_file_list
def read_img(file_name):
batch_img=[]
for i in range(len(file_name)):
# file_path = './dataset/c_data/'+ file_name[i]
file_path = 'E:\\WinPython\\settings\\file\\DASS\\dataset\\c_data\\'+ file_name[i]
img = nib.load(file_path).get_data()
start = randint(int(img.shape[-1]/3), int(img.shape[-1]/3*2)-3)
img = img[:,:,start:start+3]
img = (img - img.min()) / (img.max() - img.min())
# img = np.expand_dims(img,axis=-1)
batch_img.append(img)
# print(file_name[i])
return batch_img
def get_data(file_list):
img=[]
for file in file_list:
for idx in range(3):
# img.append(read_img(file[2:10]))
img.append(read_img(file[4:7]), idx)
x=np.array(img)
x = x.transpose(0, 1, 4, 2, 3)
y = np.array([file[1] for file in file_list], dtype=np.float32)
y[y==2]=1
return (x, y)
source_list = read_file_list(domain='source')
sourceX, sourceY = get_data(source_list)
target_list = read_file_list(domain='target')
targetX, targetY = get_data(target_list)