Когда обучение = ложь все еще выбросит
from keras import layers
from keras import models
from keras import backend as K
import numpy as np
inp = layers.Input(shape=(10,))
out = layers.Dropout(0.5)(inp)
model = models.Model(inp, out)
model.layers[-1].trainable = False # set dropout layer as non-trainable
model.compile(optimizer='adam', loss='mse') # IMPORTANT: we must always compile model after changing `trainable` attribute
# create a custom backend function so that we can control the learning phase
func = K.function(model.inputs + [K.learning_phase()], model.outputs)
x = np.ones((1,10))
# learning phase = 1, i.e. training mode
print(func([x, 1]))
# the output will be:
[array([[2., 2., 2., 0., 0., 2., 2., 2., 0., 0.]], dtype=float32)]
# as you can see some of the neurons have been dropped
# now set learning phase = 0, i.e test mode
print(func([x, 0]))
# the output will be:
[array([[1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]], dtype=float32)]
# unsurprisingly, no neurons have been dropped in test phase
Plain Platypus