0
votes
def datagen_train(batch,X_train,y_train):
    lx = len(X_train)

    for i in range(lx):
        xx = []
        yy = []
        for j in range(batch):
            xx.append(resize(imread(X_train[j]),(720,1280,3)))
            yy.append(resize(imread(y_train[j]),(720,1280,3)))
        xx = np.array(xx)
        yy = np.array(yy)
        print(i,xx.shape,yy.shape)
        #yield np.array(xx),np.array(yy)
        yield xx,yy

x = np.array(glob.glob(r'val_blur/*/*'))
y = np.array(glob.glob(r'val_sharp/*/*'))
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(x, y, test_size=0.2,random_state=42)
print (X_train.shape, y_train.shape)
print(X_test.shape, y_test.shape)

(2400,) (2400,) (600,) (600,)

mo = sr.build(1280,720,3)
mo.compile(loss = "mse", optimizer = opt)
mo.summary()
Model: "sequential_7"

Layer (type) Output Shape Param #

conv2d_19 (Conv2D) (None, 712, 1272, 64) 15616


activation_19 (Activation) (None, 712, 1272, 64) 0


conv2d_20 (Conv2D) (None, 712, 1272, 32) 2080


activation_20 (Activation) (None, 712, 1272, 32) 0


conv2d_21 (Conv2D) (None, 708, 1268, 3) 2403


activation_21 (Activation) (None, 708, 1268, 3) 0

Total params: 20,099 Trainable params: 20,099 Non-trainable params: 0


h = mo.fit_generator(datagen_train(batch,X_train,y_train),
                     epochs=epo,steps_per_epoch=len(X_train)//batch)
Epoch 1/20
0 (1, 720, 1280, 3) (1, 720, 1280, 3)
---------------------------------------------------------------------------



ValueError                                Traceback (most recent call last)
<ipython-input-31-3abccc87c768> in <module>
      1 h = mo.fit_generator(datagen_train(batch,X_train,y_train),
----> 2                      epochs=epo,steps_per_epoch=len(X_train)//batch)

C:\ProgramData\Anaconda3\lib\site-packages\keras\legacy\interfaces.py in wrapper(*args, **kwargs)
     89                 warnings.warn('Update your `' + object_name + '` call to the ' +
     90                               'Keras 2 API: ' + signature, stacklevel=2)
---> 91             return func(*args, **kwargs)
     92         wrapper._original_function = func
     93         return wrapper

C:\ProgramData\Anaconda3\lib\site-packages\keras\engine\training.py in fit_generator(self, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, validation_freq, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)
   1730             use_multiprocessing=use_multiprocessing,
   1731             shuffle=shuffle,
-> 1732             initial_epoch=initial_epoch)
   1733 
   1734     @interfaces.legacy_generator_methods_support

C:\ProgramData\Anaconda3\lib\site-packages\keras\engine\training_generator.py in fit_generator(model, generator, steps_per_epoch, epochs, verbose, callbacks, validation_data, validation_steps, validation_freq, class_weight, max_queue_size, workers, use_multiprocessing, shuffle, initial_epoch)
    218                                             sample_weight=sample_weight,
    219                                             class_weight=class_weight,
--> 220                                             reset_metrics=False)
    221 
    222                 outs = to_list(outs)

C:\ProgramData\Anaconda3\lib\site-packages\keras\engine\training.py in train_on_batch(self, x, y, sample_weight, class_weight, reset_metrics)
   1506             x, y,
   1507             sample_weight=sample_weight,
-> 1508             class_weight=class_weight)
   1509         if self._uses_dynamic_learning_phase():
   1510             ins = x + y + sample_weights + [1]

C:\ProgramData\Anaconda3\lib\site-packages\keras\engine\training.py in _standardize_user_data(self, x, y, sample_weight, class_weight, check_array_lengths, batch_size)
    619                 feed_output_shapes,
    620                 check_batch_axis=False,  # Don't enforce the batch size.
--> 621                 exception_prefix='target')
    622 
    623             # Generate sample-wise weight values given the `sample_weight` and

C:\ProgramData\Anaconda3\lib\site-packages\keras\engine\training_utils.py in standardize_input_data(data, names, shapes, check_batch_axis, exception_prefix)
    143                             ': expected ' + names[i] + ' to have shape ' +
    144                             str(shape) + ' but got array with shape ' +
--> 145                             str(data_shape))
    146     return data
    147 

ValueError: Error when checking target: expected activation_21 to have shape (708, 1268, 3) but got array with shape (720, 1280, 3)
1 (1, 720, 1280, 3) (1, 720, 1280, 3)
print("[INFO] serializing model...")
mo.save('mode/',overwrite = False)
1 (10, 720, 1280, 3) (10, 720, 1280, 3)

in the above i am getting the value error so what is the correction needed

1

1 Answers

1
votes

You could either tweak the strides, kernel_size, and/or padding arguments in your Conv2D layers in your model to adjust what output size you want at the end (e.g., (720, 1280, 3)) or you could add a Lambda layer for resizing your final output (or the one before the final activation layer) to have your desired shape. You could follow the answer to this question to create a Lambda layer for resizing. It is simply used as follows:

try:
    out = keras.layers.Lambda(lambda image: keras.backend.tf.image.resize_images(image, (720,1280)))(previous_layer_output)
except :
    # if you have older version of tensorflow
    out = keras.layers.Lambda(lambda image: keras.backend.tf.image.resize_images(image, 720, 1280))(previous_layer_output)