我有两类图像。
- 伪造的
- 真实的
数据集拆分详细信息如下。
总训练假图像 3457
总训练真实图像 675
总验证假图像 642
总验证真实图像 336
我已经在上面应用了 CNN,但是我的验证准确度正在下降,而损失却在增加。我还对数据应用了数据增强和预处理。
train_dir = '/content/drive/MyDrive/Training/Training Data'
validations_dir = '/content/drive/MyDrive/Training/Validation Data'
train_fake_dir = '/content/drive/MyDrive/Training/Training Data/FAKE'
train_real_dir = '/content/drive/MyDrive/Training/Training Data/REAL'
validation_fake_dir = '/content/drive/MyDrive/Training/Validation Data/FAKE'
validation_real_dir = '/content/drive/MyDrive/Training/Validation Data/REAL'
num_fake_train = len(os.listdir(train_fake_dir))
num_real_train = len(os.listdir(train_real_dir))
num_fake_validation = len(os.listdir(validation_fake_dir))
num_real_validation = len(os.listdir(validation_real_dir))
print("Total Training FAKE Images",num_fake_train)
print("Total Training REAL Images",num_real_train)
print("--")
print("Total validation FAKE Images",num_fake_validation)
print("Total validation REAL Images",num_real_validation)
total_train = num_fake_train+num_real_train
total_validation = num_fake_validation+num_real_validation
print("Total Training Images",total_train)
print("--")
print("Total Validation Images",total_validation)
image_gen_train = ImageDataGenerator(rescale = 1./255,rotation_range = 40,width_shift_range=0.3,
height_shift_range=0.3,shear_range = 0.3,zoom_range = 0.2,
horizontal_flip = True,fill_mode = 'nearest')
train_data_gen = image_gen_train.flow_from_directory(batch_size = batch_size,
directory = train_dir,
shuffle= True,
target_size = (img_height,img_width),
class_mode = 'binary')
image_generator_validation = ImageDataGenerator(rescale=1./255)
val_data_gen = image_generator_validation.flow_from_directory(batch_size=batch_size,
directory=validations_dir,
target_size=(img_height,img_width),
shuffle= True,
class_mode='binary')
classifier = tf.keras.Sequential([
tf.keras.layers.Conv2D(16,(3,3),activation='relu',input_shape=(img_width,img_height, 3)),
tf.keras.layers.Dropout(0.5),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.MaxPooling2D(2,2),
tf.keras.layers.Flatten(),
tf.keras.layers.Dense(128,activation= 'relu'),
tf.keras.layers.Dense(2,activation='softmax')
])
classifier.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['acc'])
history_classifier = classifier.fit(train_data_gen,epochs = epochs,validation_data=val_data_gen,verbose = 1)