Modify network arch

master
Wojciech Janota 2 months ago
parent 23448a84e7
commit b2402f4336

@ -29,13 +29,13 @@ class NeuralNetworkEncoder:
self.model = keras.Sequential([ self.model = keras.Sequential([
layers.Reshape((512, 512, 1), input_shape=(262144,)), layers.Reshape((512, 512, 1), input_shape=(262144,)),
#layers.InputLayer(input_shape=(512 * 512, 1, 1)), #layers.InputLayer(input_shape=(512 * 512, 1, 1)),
layers.Conv2D(32, (3, 3), activation=internal_activation_function, padding='same'), layers.Conv2D(256, (3, 3), activation=internal_activation_function, padding='same'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(64, (3, 3), activation=internal_activation_function, padding='same'),
layers.MaxPooling2D((2, 2)), layers.MaxPooling2D((2, 2)),
layers.Conv2D(128, (3, 3), activation=internal_activation_function, padding='same'), layers.Conv2D(128, (3, 3), activation=internal_activation_function, padding='same'),
layers.MaxPooling2D((2, 2)), layers.MaxPooling2D((2, 2)),
layers.Conv2D(256, (3, 3), activation=internal_activation_function, padding='same'), layers.Conv2D(64, (3, 3), activation=internal_activation_function, padding='same'),
layers.MaxPooling2D((2, 2)),
layers.Conv2D(32, (3, 3), activation=internal_activation_function, padding='same'),
layers.Flatten(), layers.Flatten(),
layers.Dense(64, activation=external_activation_function) layers.Dense(64, activation=external_activation_function)
]) ])

Loading…
Cancel
Save