Skip to content

Commit

Permalink
Recreate ResNet search space
Browse files Browse the repository at this point in the history
  • Loading branch information
Mikhael Djajapermana authored and moreib committed Nov 27, 2023
1 parent 5dce3b0 commit 07be227
Show file tree
Hide file tree
Showing 8 changed files with 468 additions and 222 deletions.
61 changes: 61 additions & 0 deletions hannah/conf/config_resnet.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
##
## Copyright (c) 2022 University of Tübingen.
##
## This file is part of hannah.
## See https://atreus.informatik.uni-tuebingen.de/ties/ai/hannah/hannah for further info.
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
defaults:
- base_config
- override dataset: cifar10 # Dataset configuration name
- override features: identity # Feature extractor configuration name (use identity for vision datasets)
- override model: resnet # Neural network name
- override scheduler: 1cycle # learning rate scheduler config name
- override optimizer: adamw # Optimizer config name
- override normalizer: null # Feature normalizer (used for quantized neural networks)
- override module: image_classifier # Lightning module config for the training loop (image classifier for image classification tasks)
- override nas: aging_evolution_nas
- _self_


# dataset:
# data_folder: ${oc.env:HANNAH_DATA_FOLDER,${hydra:runtime.cwd}/../../datasets/}

experiment_id: "resnet_nas"

seed: [1234]

model:
num_classes: 10

nas:
budget: 500
n_jobs: 4
total_candidates: 100
num_selected_candidates: 10
sampler:
population_size: 50
sample_size: 10

module:
batch_size: 64
num_workers: 4

trainer:
max_epochs: 10

scheduler:
max_lr: 0.001

fx_mac_summary: True
3 changes: 3 additions & 0 deletions hannah/conf/model/resnet.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
_target_: hannah.models.resnet.models.search_space
name: resnet
num_classes: 10
55 changes: 55 additions & 0 deletions hannah/models/resnet/blocks.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
from functools import partial
from hannah.models.embedded_vision_net.expressions import expr_product
from hannah.nas.expressions.arithmetic import Ceil
from hannah.nas.expressions.types import Int
from hannah.nas.functional_operators.op import scope
from hannah.models.embedded_vision_net.operators import adaptive_avg_pooling, add, conv2d, conv_relu, depthwise_conv2d, dynamic_depth, pointwise_conv2d, linear, relu, batch_norm, choice, identity
from hannah.nas.parameters.parameters import CategoricalParameter, IntScalarParameter


@scope
def conv_relu_bn(input, out_channels, kernel_size, stride):
out = conv2d(input, out_channels, kernel_size, stride)
out = batch_norm(out)
out = relu(out)
return out


@scope
def residual(input, main_branch_output_shape):
input_shape = input.shape()
in_fmap = input_shape[2]
out_channels = main_branch_output_shape[1]
out_fmap = main_branch_output_shape[2]
stride = Int(Ceil(in_fmap / out_fmap))

out = conv2d(input, out_channels=out_channels, kernel_size=1, stride=stride, padding=0)
out = batch_norm(out)
out = relu(out)
return out


@scope
def block(input, depth, out_channels, kernel_size, stride):
assert isinstance(depth, IntScalarParameter), "block depth must be of type IntScalarParameter"
out = input
exits = []
for i in range(depth.max+1):
out = conv_relu_bn(out,
out_channels=out_channels.new(),
kernel_size=kernel_size.new(),
stride=stride.new() if i == 0 else 1)
exits.append(out)

out = dynamic_depth(*exits, switch=depth)
res = residual(input, out.shape())
out = add(out, res)

return out


@scope
def classifier_head(input, num_classes):
out = choice(input, adaptive_avg_pooling)
out = linear(out, num_classes)
return out
24 changes: 24 additions & 0 deletions hannah/models/resnet/expressions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
from hannah.nas.expressions.logic import And, If
from hannah.nas.expressions.arithmetic import Ceil


def padding_expression(kernel_size, stride, dilation=1):
"""Symbolically calculate padding such that for a given kernel_size, stride and dilation
the padding is such that the output dimension is kept the same(stride=1) or halved(stride=2).
Note: If the input dimension is 1 and stride = 2, the calculated padding will result in
an output with also dimension 1.
Parameters
----------
kernel_size : Union[int, Expression]
stride : Union[int, Expression]
dilation : Union[int, Expression], optional
_description_, by default 1
Returns
-------
Expression
"""
# r = 1 - (kernel_size % 2)
p = (dilation * (kernel_size - 1) - stride + 1) / 2
return Ceil(p)
Loading

0 comments on commit 07be227

Please sign in to comment.