【问题标题】:How to solve "total size of new array must be unchanged error" in Python?如何解决Python中的“新数组的总大小必须不变错误”?
【发布时间】:2024-01-04 03:56:01
【问题描述】:

我正在实现以下模型:

def ConnectomeCNNAutoencoder(input_shape, keep_pr=0.65, n_filter=32, n_dense1=64, n_classes=2, 
                      mode="autoencoder", sign="neg"):

input_1 = Input(shape=input_shape)
# Convolutional Encoder 
bias_init = tf.constant_initializer(value=0.001)
conv1 = Conv2D(filters=n_filter , kernel_size=(1,input_shape[1]), strides=(1, 1),
                                 padding= "valid", activation="selu", # "selu"
                                 kernel_initializer="glorot_uniform",
                                 bias_initializer=bias_init, name="conv1")(input_1)
dropout1 = Dropout(keep_pr, name="dropout1")(conv1)
conv2 = Conv2D(filters=n_filter*2 , kernel_size=(input_shape[1],1), strides=(1, 1),
                                 padding= "valid", activation="selu", 
                                 kernel_initializer="glorot_uniform",
                                 bias_initializer=bias_init, name="conv2")(dropout1)
encoded = Dropout(keep_pr, name="dropout2")(conv2)

# Classification
reshape = Reshape((n_filter*2,), name="reshape1")(encoded)
dense1 = Dense(n_dense1, activation="selu", name="dense1", kernel_regularizer=keras.regularizers.l1_l2())(reshape) 

if n_classes == 1:
    activation = "sigmoid"
else:
    activation = "softmax"
output = Dense(n_classes, activation=activation, name="output")(dense1)

# Decoder
dense2 = Dense(n_dense1, activation="selu", name="dense2")(output) 
dim_reconstruct = tuple(encoded.get_shape().as_list())
reshape2 = Reshape(dim_reconstruct[1:], name="reshape2")(dense2)

conv3 = Conv2DTranspose(filters=n_filter*2 , kernel_size=(1,1), strides=(1, 1),
                                  padding= "valid", activation="selu", # "selu"
                                  kernel_initializer="glorot_uniform",
                                  bias_initializer=bias_init, name="conv3")(reshape2)
conv4 = Conv2DTranspose(filters=n_filter , kernel_size=(input_shape[1],1), strides=(1, 1),
                                  padding= "valid", activation="selu", # "selu"
                                  kernel_initializer="glorot_uniform",
                                  bias_initializer=bias_init, name="conv4")(conv3)

if sign == "pos":
    reconstructed_activation = "sigmoid"
elif sign == "neg":
    reconstructed_activation = "tanh"
    
reconstructed_input = Conv2DTranspose(filters=input_shape[-1], kernel_size=(1,input_shape[1]), strides=(1, 1),
                                  padding= "valid", activation=reconstructed_activation, 
                                  kernel_initializer="glorot_uniform",
                                  bias_initializer=bias_init, name='autoencoder')(conv4)

if mode == "autoencoder":
    model = keras.models.Model(inputs=input_1, outputs=[output, reconstructed_input])
elif mode =="encoder":
    model = keras.models.Model(inputs=input_1, outputs=encoded)
elif mode == "decoder":
    model = keras.models.Model(inputs=input_1, outputs=reconstructed_input)
return model

模型在n_filter=32n_dense1=64 时工作正常,但是当我将这些变量更改为其他值时,会弹出此错误:"ValueError: total size of new array must be unchanged"。 我知道这与reshape2中Reshape的使用有关,但我不知道如何解决。

我该如何解决这个问题?

谢谢!

【问题讨论】:

    标签: arrays keras reshape autoencoder


    【解决方案1】:

    问题出现在这一行:

    reshape2 = Reshape(dim_reconstruct[1:], name="reshape2")(dense2)
    

    张量dense2 的形状应该可以“转换”成dim_reconstruct[1:] 的形状。这意味着dim_reconstruct[1:]的值的乘积应该等于dense2的形状(不包括第零维-批量大小,因为keras在导出张量的维数时不计算它)。

    如果n_filters = 30dim_reconstruct[1:] 将是[1, 1, 60] - 因为您将n_filters 乘以2。但是密集过滤器的数量必须等于来自[1, 1, 60] 的值的乘积,即60。

    我找不到任何将 1d 转换为 3d 数组的图像。但是有二维数组的例子:不能将数组 [1,2,3,4,5] 放入 2x3 二维数组,但可以将 [1,2,3,4,5,6] 转换为 [[1, 2, 3], [4, 5, 6]] 之类的东西

    因此,您可以在调用ConnectomeCNNAutoencoder 时将n_units1 设置为60,或者您可以自动派生它:

        # Decoder
        dim_reconstruct = tuple(encoded.get_shape().as_list())  # say, (1, 1, 60)
        n_dense2 = np.product(dim_reconstruct[1:])  # will be 60
        dense2 = Dense(n_dense2, activation="selu", name="dense2")(output)
        reshape2 = Reshape(dim_reconstruct[1:], name="reshape2")(dense2)
    

    完整示例(我删除了一些等于默认值的参数):

    import numpy as np
    import tensorflow as tf
    from tensorflow import keras
    from tensorflow.keras import Input
    from tensorflow.keras.layers import Conv2D, Dropout, Reshape, Dense, Conv2DTranspose
    
    
    def ConnectomeCNNAutoencoder(input_shape,
                                 keep_pr=0.65,
                                 n_filter=32,
                                 n_dense1=64,
                                 n_classes=2,
                                 mode="autoencoder",
                                 sign="neg"):
        input_1 = Input(shape=input_shape)
        # Convolutional Encoder
        bias_init = tf.constant_initializer(value=0.001)
        conv1 = Conv2D(filters=n_filter,
                       kernel_size=(1, input_shape[1]),
                       strides=(1, 1),
                       activation="selu",  # "selu"
                       bias_initializer=bias_init,
                       name="conv1")(input_1)
        dropout1 = Dropout(keep_pr, name="dropout1")(conv1)
        conv2 = Conv2D(filters=n_filter * 2,
                       kernel_size=(input_shape[1], 1),
                       strides=(1, 1),
                       activation="selu",
                       bias_initializer=bias_init,
                       name="conv2")(dropout1)
        encoded = Dropout(keep_pr, name="dropout2")(conv2)
    
        # Classification
        reshape = Reshape((n_filter * 2,), name="reshape1")(encoded)
        dense1 = Dense(n_dense1,
                       activation="selu",
                       name="dense1",
                       kernel_regularizer=keras.regularizers.l1_l2())(reshape)
    
        if n_classes == 1:
            activation = "sigmoid"
        else:
            activation = "softmax"
    
        output = Dense(n_classes, activation=activation, name="output")(dense1)
    
        # Decoder - Changes here
        dim_reconstruct = tuple(encoded.get_shape().as_list())
        n_dense2 = np.product(dim_reconstruct[1:])
        dense2 = Dense(n_dense2, activation="selu", name="dense2")(output)
        reshape2 = Reshape(dim_reconstruct[1:], name="reshape2")(dense2)
    
        conv3 = Conv2DTranspose(filters=n_filter * 2,
                                kernel_size=(1, 1),
                                strides=(1, 1),
                                activation="selu",  # "selu"
                                bias_initializer=bias_init,
                                name="conv3")(reshape2)
        conv4 = Conv2DTranspose(filters=n_filter,
                                kernel_size=(input_shape[1], 1),
                                strides=(1, 1),
                                activation="selu",  # "selu"
                                bias_initializer=bias_init,
                                name="conv4")(conv3)
    
        if sign == "pos":
            reconstructed_activation = "sigmoid"
        elif sign == "neg":
            reconstructed_activation = "tanh"
    
        reconstructed_input = Conv2DTranspose(filters=input_shape[-1],
                                              kernel_size=(1, input_shape[1]),
                                              strides=(1, 1),
                                              activation=reconstructed_activation,
                                              bias_initializer=bias_init,
                                              name='autoencoder')(conv4)
    
        if mode == "autoencoder":
            model = keras.models.Model(inputs=input_1, outputs=[output, reconstructed_input])
        elif mode == "encoder":
            model = keras.models.Model(inputs=input_1, outputs=encoded)
        elif mode == "decoder":
            model = keras.models.Model(inputs=input_1, outputs=reconstructed_input)
        else:
            raise ValueError("Unexpected mode: %s" % mode)
        return model
    
    
    model = ConnectomeCNNAutoencoder((32, 32, 3), n_filter=30, n_dense1=65)
    

    【讨论】:

    • 非常感谢您的解释@MikhailStepanov!刚刚拯救了我的一天!
    最近更新 更多