В строке x = (Conv2D(16, (3, 3), padding="same", input_shape=x))
вы ничего не указали. Поэтому, когда вы запускаете код, и он достигает строки x = (Activation("relu")(x))
, где x - это слой, а не тензор, и он выдает вышеуказанную ошибку. Поэтому, как упоминалось в комментарии, вы должны передать вход в первый слой. Отредактированный код, как показано ниже (обратите внимание, что я использовал библиотеку tenorflow.keras вместо keras)
from tensorflow.compat.v1.keras.layers import AveragePooling2D
from tensorflow.compat.v1.keras.models import Sequential, Model
from tensorflow.compat.v1.keras.layers import Input, Dense, Dropout, Flatten, Activation, BatchNormalization, Conv2D, MaxPooling2D
from tensorflow.compat.v1.keras import backend as K
class SmallerVGGNet:
@staticmethod
def build(width, height, depth, classes, finalAct="softmax"):
x = (height, width, depth)
output = -1
# CONV => RELU => POOL
inputs = Input(shape=x)
x = (Conv2D(16, (3, 3), padding="same", input_shape=x)(inputs))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(3, 3))(x))
x = (Conv2D(32, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(3, 3))(x))
x = (BatchNormalization(axis=output)(x))
# (CONV => RELU) * 2 => POOL
x = (Conv2D(64, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (Conv2D(64, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (AveragePooling2D(pool_size=(2, 2))(x))
# (CONV => RELU) * 2 => POOL
x = (Conv2D(128, (3, 3), padding="same")(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (Conv2D(128, (3, 3))(x))
x = (Activation("relu")(x))
x = (BatchNormalization(axis=output)(x))
x = (MaxPooling2D(pool_size=(2, 2))(x))
# first (and only) set of FC => RELU layers
x = (Flatten()(x))
x = (Dense(128)(x))
x = (Activation("relu")(x))
x = (BatchNormalization()(x))
x = (Dropout(0.5)(x))
# softmax classifier
x = (Dense(classes)(x))
x = (Activation(finalAct)(x))
model = Model(inputs,x)
model.summary()
a = SmallerVGGNet()
a.build(100,100,100,10)