Хорошо, я придумала решение. Это уродливо, но это решение:
class UtilityLayer(Layer):
def build(self, input_shape):
self.kernel = self.add_weight(
name='kernel',
shape=(1,),
initializer='ones',
trainable=True,
constraint='nonneg'
)
super().build(input_shape)
def call(self, inputs, **kwargs):
return self.kernel
switch = -1
last_loss = 0
def custom_loss_builder(utility_layer):
def custom_loss(y_true, y_pred):
global switch, last_loss
switch *= -1
if switch == 1:
last_loss = utility_layer.trainable_weights[0] * MSE(y_true, y_pred)
return last_loss # your network loss
else:
return last_loss # your lambda loss
return custom_loss
dummy_y = np.empty(len(x))
inputs = Input(shape=(1,))
x = Dense(2, activation='relu')(inputs)
outputs = Dense(1)(x)
utility_outputs = UtilityLayer()(inputs)
model = Model(inputs, [outputs, utility_outputs])
model.compile(optimizer='adam', loss=custom_loss_builder(model.layers[-1]))
model.fit(x, [y, dummy_y], epochs=100)
И эволюция вашей лямбды: