0

我想使用图像数据集创建具有图像增强功能的 cnn 模型。当我尝试加载图像增强数据时,它显示如下错误 ValueError: Shapes (None, None, None) and (None, 12, 28, 64) are incompatible

执行步骤: •训练数据:otrain_images:像素的归一化值 otrain_labels:存储为 One-Hot 编码数据•验证数据:oval_images:像素的归一化值椭圆标签:存储为 One-Hot 编码数据•类:“squiggle”、“窄带”、“narrowbanddrd”和“噪声”</p>

•使用CNN准备一个详细的python笔记本,用于使用来自SETI数据集的Keras对来自深空的无线电信号进行分类•导入所需的库•加载和预处理数据集o使用pandas read_csv函数加载数据集o检查训练和验证数据的形状o重塑训练和验证图像

使用 Keras ImageDataGenerator 函数创建训练和验证数据生成器•设计卷积神经网络 (CNN) 模型•使用 Adam 优化器、categorical_crossentropy 损失函数和准确度指标编译模型•打印模型摘要•使用 batch_size = 32 和 epochs = 12 训练模型•评估模型 o使用 model.evaluate 函数评估准确性o打印

这里的代码:`

import tensorflow
import keras
import tensorflow as tf
from tensorflow.keras.preprocessing.image import ImageDataGenerator
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt

x_train=pd.read_csv("/content/images1.csv")
y_train=pd.read_csv("/content/labels1.csv")

x_valid=pd.read_csv("/content/images_2.csv")
y_valid=pd.read_csv("/content/labels_2.csv")

x_train=x_train/255
x_valid=x_valid/255

x_valid.drop("Unnamed: 0",axis=1,inplace=True)
y_valid.drop("Unnamed: 0",axis=1,inplace=True)

x_train.drop("Unnamed: 0",axis=1,inplace=True)
y_train.drop("Unnamed: 0",axis=1,inplace=True)

x_train_img=np.array([x_train.iloc[i] for i in range(x_train.shape[0])])
x_train_imge=np.array([x_train_img[i].reshape(64,128,1) for i in range(x_train_img.shape[0])])

x_valid_img=np.array([x_valid.iloc[i] for i in range(x_valid.shape[0])])
x_valid_imge=np.array([x_valid_img[i].reshape(64,128,1) for i in range(x_valid_img.shape[0])])

y_train = tf.keras.utils.to_categorical(y_train)
y_valid = tf.keras.utils.to_categorical(y_valid)

datagen_train = tf.keras.preprocessing.image.ImageDataGenerator(
    featurewise_center=True,
    featurewise_std_normalization=True,
    rotation_range=20,
    width_shift_range=0.2,
    height_shift_range=0.2,
    horizontal_flip=True,
    validation_split=0.2)

datagen_train.fit(x_train_imge)

datagen_valid = tf.keras.preprocessing.image.ImageDataGenerator(
    featurewise_center=True,
    featurewise_std_normalization=True,
    rotation_range=20,
    width_shift_range=0.2,
    height_shift_range=0.2,
    horizontal_flip=True,
    validation_split=0.2)

datagen_valid.fit(x_valid_imge)

import tensorflow as tf
from tensorflow.keras import datasets, layers, models
model=models.Sequential()
model.add(layers.Conv2D(32,(3,3),activation="relu",input_shape=x_train_imge.shape[1:]))
model.add(layers.MaxPooling2D((2,2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))
model.add(layers.MaxPooling2D((2, 2)))
model.add(layers.Conv2D(64, (3, 3), activation='relu'))

model.compile(optimizer='adam',
              loss=tf.keras.losses.CategoricalCrossentropy(from_logits=True),
              metrics=['accuracy'])
model.fit(datagen_train.flow(x_train_imge,y_train),epochs=12,batch_size=32,validation_data=datagen_valid.flow(x_valid_imge,y_valid))

    

ValueError:在用户代码中:

/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:855 train_function  *
    return step_function(self, iterator)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:845 step_function  **
    outputs = model.distribute_strategy.run(run_step, args=(data,))
/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:1285 run
    return self._extended.call_for_each_replica(fn, args=args, kwargs=kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:2833 call_for_each_replica
    return self._call_for_each_replica(fn, args, kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/distribute/distribute_lib.py:3608 _call_for_each_replica
    return fn(*args, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:838 run_step  **
    outputs = model.train_step(data)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/training.py:797 train_step
    y, y_pred, sample_weight, regularization_losses=self.losses)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/engine/compile_utils.py:204 __call__
    loss_value = loss_obj(y_t, y_p, sample_weight=sw)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/losses.py:155 __call__
    losses = call_fn(y_true, y_pred)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/losses.py:259 call  **
    return ag_fn(y_true, y_pred, **self._fn_kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper
    return target(*args, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/losses.py:1644 categorical_crossentropy
    y_true, y_pred, from_logits=from_logits)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/util/dispatch.py:206 wrapper
    return target(*args, **kwargs)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/keras/backend.py:4862 categorical_crossentropy
    target.shape.assert_is_compatible_with(output.shape)
/usr/local/lib/python3.7/dist-packages/tensorflow/python/framework/tensor_shape.py:1161 assert_is_compatible_with
    raise ValueError("Shapes %s and %s are incompatible" % (self, other))

ValueError: Shapes (None, None, None) and (None, 12, 28, 64) are incompatible
4

0 回答 0