import os
import sys
import cv2
from PIL import Image
import h5py
import tensorflow as tf
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from time import time
from datetime import datetime
from tqdm import tqdm
# from utils import get_params_countfrom sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.applications import inception_v3, xception, resnet50, vgg16, vgg19
from tensorflow.keras.applications import InceptionV3, Xception, ResNet50, VGG16, VGG19
from tensorflow.keras.layers import Input, Dense, Dropout, Activation, Flatten, Lambda
from tensorflow.keras.callbacks import EarlyStopping, ModelCheckpoint, TensorBoard
from tensorflow.keras.models import Model
from tensorflow.keras.layers import GlobalAveragePooling2D,Dense
from tensorflow.keras.optimizers import SGD
from sklearn.preprocessing import LabelEncoder
from tensorflow.python.keras.utils import np_utils
import glob
import warnings
warnings.filterwarnings("ignore")
train_file_num =0
valid_file_num =0
test_file_num =0for f in os.listdir("./train"):file= glob.glob(pathname="./train/"+ f +"/*.jpg")
train_file_num +=len(file)print(train_file_num)for f in os.listdir("./val"):file= glob.glob(pathname="./val/"+ f +"/*.jpg")
valid_file_num +=len(file)print(valid_file_num)file= glob.glob(pathname="./test/*.jpg")
test_file_num +=len(file)print(test_file_num)
i =0for f1 in tqdm(os.listdir("./train")):for f2 in os.listdir("./train/"+ f1):
img = cv2.imread(f'./train/{f1}/{f2}')
img = cv2.resize(img,(height, height))
train[i]= img[:,:,::-1]
i +=1
i =0for f1 in os.listdir("./val"):for f2 in os.listdir("./val/"+ f1):
img = cv2.imread(f'./val/{f1}/{f2}')
img = cv2.resize(img,(height, height))
valid[i]= img[:,:,::-1]
i +=1
i =0for f1 in os.listdir("./test"):
img = cv2.imread(f'./test/{f1}')
img = cv2.resize(img,(height, height))
test[i]= img[:,:,::-1]
i +=1
train =(train-125)/125
valid =(valid-125)/125
print('Training Data Size = %.2f GB'%(sys.getsizeof(train)/1024**3))print('Testing Data Size = %.2f GB'%(sys.getsizeof(valid)/1024**3))print('Testing Data Size = %.2f GB'%(sys.getsizeof(test)/1024**3))
defsetup_to_transfer_learning(model,base_model):#base_modelfor layer in base_model.layers:
layer.trainable =False
lr=0.005
decay=1e-6
momentum=0.9
sgd = SGD(lr=lr, decay=decay, momentum=momentum, nesterov=True)
model.compile(optimizer=sgd,loss='categorical_crossentropy',metrics=['accuracy'])defsetup_to_fine_tune(model,base_model):
GAP_LAYER =10# max_pooling_2d_2for layer in base_model.layers[:GAP_LAYER+1]:
layer.trainable =Falsefor layer in base_model.layers[GAP_LAYER+1:]:
layer.trainable =True
model.compile(optimizer=Adagrad(lr=0.005),loss='categorical_crossentropy',metrics=['accuracy'])