【发布时间】:2019-12-05 02:48:45
【问题描述】:
我是机器学习的新手,我使用以下代码在 MNISET 数据集上创建示例 VAE
# We are going to use MINISET Dataset to train our GAN.
# All imports are going to be in this place.
import numpy as np
# Progressbar
from keras.callbacks import TensorBoard
from tqdm import tqdm
import matplotlib.pyplot as plt
from keras.datasets import mnist
from keras.layers import Conv2D
from keras.models import Model
from keras.layers import Input
from keras.layers import Dense
from keras.layers import LeakyReLU
from keras.layers import Dropout
from keras.layers import MaxPooling2D
from keras.layers import BatchNormalization
from keras.layers import Flatten
# All Defs will be defined here
# Importing training data of MINISET
def loadData():
# y_train contains the labels, numbers are 1, 2 or 5, 7 etc.
(x_train, y_train), (x_test, y_test) = mnist.load_data()
# Normalizing based on color 255
x_train = (x_train.astype(np.float32) - 127.5) / 127.5
# convert shape of x_train from (60000, 28, 28) to (60000, 784)
# 784 columns per row
# x_train = x_train.reshape(60000, 784)
return (x_train, y_train, x_test, y_test)
def getEncoder():
# This returns a tensor
inputs = Input(shape=(28, 28, 1))
# Making a sample AlexNet Model Layer 1
encoder = Conv2D(392, (4, 4), padding='same', activation='relu')(inputs)
encoder = MaxPooling2D((4, 4), strides=(1, 1), padding='same')(encoder)
encoder = BatchNormalization()(encoder)
# Making a sample AlexNet Model Layer 2
encoder = Conv2D(196, (2, 2), padding='same', activation='relu')(encoder)
encoder = MaxPooling2D((4, 4), strides=(1, 1), padding='same')(encoder)
encoder = BatchNormalization()(encoder)
# Making a sample AlexNet Model Layer 3
encoder = Conv2D(98, (2, 2), padding='same', activation='relu')(encoder)
encoder = MaxPooling2D((2, 2), strides=(1, 1), padding='same')(encoder)
encoder = BatchNormalization()(encoder)
#encoder = Flatten()(encoder)
#encoder = Dense(2)(encoder)
#encoder = Dropout(0.4)(encoder)
model = Model(inputs=inputs, outputs=encoder)
model.compile(optimizer='adam', loss='binary_crossentropy')
# model.fit(data, labels) # starts training
print(model.summary())
return model;
def getDecoder():
# This returns a tensor
inputs = Input(shape=(98,))
# a layer instance is callable on a tensor, and returns a tensor
disc = Dense(196)(inputs)
disc = LeakyReLU(alpha=0.2)(disc)
disc = Dropout(0.3)(disc)
disc = Dense(392)(disc)
disc = LeakyReLU(alpha=0.2)(disc)
disc = Dropout(0.3)(disc)
disc = Dense(784, activation='sigmoid')(disc)
disc = LeakyReLU(alpha=0.2)(disc)
model = Model(inputs=inputs, outputs=disc)
model.compile(optimizer='rmsprop', loss='binary_crossentropy')
# starts training
# model.fit(data, labels)
print(model.summary())
return model;
def createVAE(decoder, encoder):
inputs = Input(shape=(28, 28, 1))
gen = encoder(inputs)
output = decoder(gen)
vae = Model(inputs=inputs, outputs=output)
print(vae.summary())
vae.compile(loss='binary_crossentropy', optimizer='rmsprop')
return vae
def doTraining(epochs=1, batchSize=128):
# Loading the data
(mniTrainX, mniTrainY, mniTestX, mniTestY) = loadData()
# Creating GAN
encoder = getEncoder()
decoder = getDecoder()
vae = createVAE(decoder, encoder)
vae.fit(mniTrainX, mniTrainX,
epochs=epochs,
batch_size=batchSize,
shuffle=True,
validation_data=(mniTestX, mniTestX),
callbacks = [TensorBoard(log_dir='/tmp/autoencoder')])
doTraining(10, 128)
VAE的模型总结是
None
Model: "model_3"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
input_3 (InputLayer) (None, 28, 28, 1) 0
_________________________________________________________________
model_1 (Model) (None, 28, 28, 98) 393862
_________________________________________________________________
model_2 (Model) multiple 404740
=================================================================
Total params: 798,602
Trainable params: 797,230
Non-trainable params: 1,372
现在它给出错误
ValueError: Error when checking input: expected input_3 to have 4 dimensions, but got array with shape (60000, 28, 28)
我在这里缺少什么,第 4 维是什么。
【问题讨论】:
标签: tensorflow machine-learning keras deep-learning keras-layer