Skip to content

Commit

Permalink
Reduce size of toy network to pass on github runners
Browse files Browse the repository at this point in the history
Remove xfail for add + crown + multid
  • Loading branch information
nhuet committed Mar 19, 2024
1 parent d3cbec2 commit f3b9754
Show file tree
Hide file tree
Showing 2 changed files with 11 additions and 19 deletions.
22 changes: 11 additions & 11 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -1067,10 +1067,10 @@ def toy_network_tutorial(
dtype = keras_config.floatx()
layers = []
layers.append(Input(input_shape, dtype=dtype))
layers.append(Dense(100, dtype=dtype))
layers.append(Dense(10, dtype=dtype))
if activation is not None:
layers.append(Activation(activation, dtype=dtype))
layers.append(Dense(100, dtype=dtype))
layers.append(Dense(10, dtype=dtype))
layers.append(Dense(1, activation="linear", dtype=dtype))
model = Sequential(layers)
return model
Expand Down Expand Up @@ -1101,14 +1101,14 @@ def toy_network_submodel(
) -> Model:
if dtype is None:
dtype = keras_config.floatx()
submodel_input_shape = input_shape[:-1] + (100,)
submodel_input_shape = input_shape[:-1] + (10,)
layers = []
layers.append(Input(input_shape, dtype=dtype))
layers.append(Dense(100, dtype=dtype))
layers.append(Dense(10, dtype=dtype))
if activation is not None:
layers.append(Activation(activation, dtype=dtype))
layers.append(Helpers.toy_network_tutorial(submodel_input_shape, dtype=dtype, activation=activation))
layers.append(Dense(100, dtype=dtype))
layers.append(Dense(10, dtype=dtype))
layers.append(Dense(1, activation="linear", dtype=dtype))
model = Sequential(layers)
return model
Expand All @@ -1120,11 +1120,11 @@ def toy_network_add(
if dtype is None:
dtype = keras_config.floatx()
input_tensor = Input(input_shape, dtype=dtype)
output = Dense(100, dtype=dtype)(input_tensor)
output = Dense(10, dtype=dtype)(input_tensor)
if activation is not None:
output = Activation(activation, dtype=dtype)(output)
output = Add()([output, output])
output = Dense(100, dtype=dtype)(output)
output = Dense(10, dtype=dtype)(output)
if activation is not None:
output = Activation(activation, dtype=dtype)(output)
model = Model(inputs=input_tensor, outputs=output)
Expand All @@ -1137,11 +1137,11 @@ def toy_network_add_monolayer(
if dtype is None:
dtype = keras_config.floatx()
input_tensor = Input(input_shape, dtype=dtype)
output = Dense(100, dtype=dtype)(input_tensor)
output = Dense(10, dtype=dtype)(input_tensor)
if activation is not None:
output = Activation(activation, dtype=dtype)(output)
output = Add()([output])
output = Dense(100, dtype=dtype)(output)
output = Dense(10, dtype=dtype)(output)
if activation is not None:
output = Activation(activation, dtype=dtype)(output)
model = Model(inputs=input_tensor, outputs=output)
Expand All @@ -1153,8 +1153,8 @@ def toy_network_tutorial_with_embedded_activation(input_shape: tuple[int, ...] =
dtype = keras_config.floatx()
layers = []
layers.append(Input(input_shape, dtype=dtype))
layers.append(Dense(100, activation="relu", dtype=dtype))
layers.append(Dense(100, dtype=dtype))
layers.append(Dense(10, activation="relu", dtype=dtype))
layers.append(Dense(10, dtype=dtype))
layers.append(Dense(1, activation="linear", dtype=dtype))
model = Sequential(layers)
return model
Expand Down
8 changes: 0 additions & 8 deletions tests/test_clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,14 +57,6 @@ def test_clone(
if toy_model_name == "cnn" and len(input_shape) == 1:
pytest.skip("cnn not possible on 0d or 1d input.")

# xfail add model with standard multid input for now (memory issues to be fixed)
if (
model_decomon_input_metadata["name"] == "standard-multid"
and toy_model_name == "add"
and method.lower().startswith("crown")
):
pytest.xfail("crown on 'add' toy model crashed sometimes with standard-multid, to be investigated.")

slope = Slope.Z_SLOPE
decimal = 4

Expand Down

0 comments on commit f3b9754

Please sign in to comment.