Skip to content

Commit

Permalink
#22 add SimpleNN implementation for ZeroPadding1D
Browse files Browse the repository at this point in the history
This layer is not used directly by targeted models, but since the CompiledNN implementation of Conv1D will require zero padding anyways we might as well support it all the way.
  • Loading branch information
fthielke committed May 23, 2024
1 parent 85730f2 commit 9671576
Show file tree
Hide file tree
Showing 3 changed files with 62 additions and 0 deletions.
18 changes: 18 additions & 0 deletions Src/CompiledNN/Formats/KerasHDF5.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -508,6 +508,23 @@ namespace NeuralNetwork
return layer;
}

std::unique_ptr<Layer> parseZeroPadding1DLayer(const SimpleMap::Record* config, const KerasHDF5::GetWeights2FuncType&, unsigned long)
{
const SimpleMap::Array* padding = getRecordEntry<SimpleMap::Array>(config, "padding");
const std::string dataFormat = getLiteral<std::string>(getRecordEntry<SimpleMap::Literal>(config, "data_format"));

if(dataFormat != "channels_last")
FAIL("Data formats other than channels last are not supported.");
ASSERT(padding->size() == 2);
const unsigned int leftPadding = getLiteral<unsigned int>(getArrayEntry<SimpleMap::Literal>(padding, 0));
const unsigned int rightPadding = getLiteral<unsigned int>(getArrayEntry<SimpleMap::Literal>(padding, 1));

std::unique_ptr<ZeroPadding1DLayer> layer = std::make_unique<ZeroPadding1DLayer>();
layer->padding[ZeroPadding1DLayer::LEFT] = leftPadding;
layer->padding[ZeroPadding1DLayer::RIGHT] = rightPadding;
return layer;
}

std::unique_ptr<Layer> parseZeroPadding2DLayer(const SimpleMap::Record* config, const KerasHDF5::GetWeights2FuncType&, unsigned long)
{
const SimpleMap::Array* padding = getRecordEntry<SimpleMap::Array>(config, "padding");
Expand Down Expand Up @@ -801,6 +818,7 @@ namespace NeuralNetwork
layerParsers.emplace("DepthwiseConv2D", &parseDepthwiseConv2DLayer);
layerParsers.emplace("Cropping2D", &parseCropping2DLayer);
layerParsers.emplace("UpSampling2D", &parseUpSampling2DLayer);
layerParsers.emplace("ZeroPadding1D", &parseZeroPadding1DLayer);
layerParsers.emplace("ZeroPadding2D", &parseZeroPadding2DLayer);
// Pooling layers
layerParsers.emplace("MaxPooling1D", &parseMaxPooling1DLayer);
Expand Down
15 changes: 15 additions & 0 deletions Src/CompiledNN/Model.h
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,7 @@ namespace NeuralNetwork
flatten,
cropping2D,
upSampling2D,
zeroPadding1D,
zeroPadding2D,
concatenate,
average,
Expand Down Expand Up @@ -398,6 +399,20 @@ namespace NeuralNetwork
void calcOutputDimensions(Node& node) const override;
};

struct ZeroPadding1DLayer : Layer
{
enum Side
{
LEFT,
RIGHT,
};
std::array<unsigned int, 2> padding;

ZeroPadding1DLayer() : Layer(LayerType::zeroPadding1D) {}

void calcOutputDimensions(Node& node) const override;
};

struct ZeroPadding2DLayer : Layer
{
enum Side
Expand Down
29 changes: 29 additions & 0 deletions Src/CompiledNN/SimpleNN.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -366,6 +366,30 @@ namespace NeuralNetwork
}
}

void apply(const TensorXf& input, TensorXf& output, const ZeroPadding1DLayer& layer)
{
ASSERT(input.rank() == 2);
ASSERT(output.rank() == 2);

std::vector<unsigned int> i(2);

for(i[0] = 0; i[0] < output.dims(1); i[0]++)
{
if(i[0] < layer.padding[ZeroPadding1DLayer::LEFT] || output.dims(0) - i[0] <= layer.padding[ZeroPadding1DLayer::RIGHT])
{
for(i[1] = 0; i[1] < output.dims(1); i[1]++)
output(i) = 0.f;
}
else
{
std::vector<unsigned int> i_ = i;
i_[0] = i[0] - layer.padding[ZeroPadding1DLayer::LEFT];
for(i_[1] = i[1] = 0; i_[1] < output.dims(1); i_[1]++, i[1]++)
output(i) = input(i_);
}
}
}

void apply(const TensorXf& input, TensorXf& output, const ZeroPadding2DLayer& layer)
{
ASSERT(input.rank() == 3);
Expand Down Expand Up @@ -813,6 +837,11 @@ namespace NeuralNetwork
ASSERT(output.size() == 1);
Impl::apply(*input[0], *output[0], *static_cast<const UpSampling2DLayer*>(node.layer));
break;
case LayerType::zeroPadding1D:
ASSERT(input.size() == 1);
ASSERT(output.size() == 1);
Impl::apply(*input[0], *output[0], *static_cast<const ZeroPadding1DLayer*>(node.layer));
break;
case LayerType::zeroPadding2D:
ASSERT(input.size() == 1);
ASSERT(output.size() == 1);
Expand Down

0 comments on commit 9671576

Please sign in to comment.