Skip to content

Commit

Permalink
Merge branch 'FixDeepNetGenerator' into 'master'
Browse files Browse the repository at this point in the history
  • Loading branch information
cmoineau committed Jul 29, 2022
2 parents fa1fdce + 9086ae4 commit fb61e27
Showing 1 changed file with 74 additions and 71 deletions.
145 changes: 74 additions & 71 deletions src/Generator/DeepNetGenerator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2523,11 +2523,12 @@ void N2D2::DeepNetGenerator::ONNX_processGraph(

std::vector<int> paddingDimsBegin;
std::vector<int> paddingDimsEnd;
if (node.input_size() >= 1) {
Tensor<int64_t> pad;
// See changelog opsetVersion 11 : https://github.com/onnx/onnx/blob/main/docs/Changelog.md#Pad-11
// TLDR : pads changed from an attribute to an input.
if (opsetVersion < 11) {

Tensor<int64_t> pad;
// See changelog opsetVersion 11 : https://github.com/onnx/onnx/blob/main/docs/Changelog.md#Pad-11
// TLDR : pads changed from an attribute to an input.
if (opsetVersion < 11) {
if(node.input_size() > 1){
if ((itAttr = attribute.find("pads")) != attribute.end()){
for (int dim = 0; dim < (*itAttr).second->ints_size(); ++dim)
pad.push_back((*itAttr).second->ints(dim));
Expand All @@ -2538,85 +2539,87 @@ void N2D2::DeepNetGenerator::ONNX_processGraph(
throw std::runtime_error(msgStr.str());
}
}else{
if ((itInit = initializer.find(node.input(1))) != initializer.end())
pad = ONNX_unpackTensor<int64_t>((*itInit).second);
else{
std::stringstream msgStr;
msgStr << " No initializer for \"" << node.input(1)
<< "\"" << std::endl;
throw std::runtime_error(msgStr.str());
}
std::cout << " No initializer for Padding operation, it will be ignored" << std::endl;
std::cout << Utils::cnotice << " Ignore Padding operation"
<< Utils::cdef << std::endl;
std::cout << " " << node.output(0) << " -> "
<< redirectName(node.input(0)) << std::endl;
redirect[node.output(0)] = redirectName(node.input(0));
continue;
}
assert(pad.size() % 2 == 0);
const int offset = pad.size() / 2;

for (int dim = 0; dim < offset; ++dim) {
paddingDimsBegin.push_back(pad(dim));
paddingDimsEnd.push_back(pad(offset + dim));
}else{ // opsetVersion > 11
if ((itInit = initializer.find(node.input(1))) != initializer.end())
pad.push_back(ONNX_unpackTensor<int64_t>((*itInit).second));
else{
std::stringstream msgStr;
msgStr << " No initializer for \"" << node.input(1)
<< "\"" << std::endl;
throw std::runtime_error(msgStr.str());
}
}

std::reverse(paddingDimsBegin.begin(), paddingDimsBegin.end());
std::reverse(paddingDimsEnd.begin(), paddingDimsEnd.end());
assert(pad.size() % 2 == 0);
const int offset = pad.size() / 2;

const std::string inputX = redirectName(node.input(0));
std::shared_ptr<Cell> inputXCell = getCell(inputX);
for (int dim = 0; dim < offset; ++dim) {
paddingDimsBegin.push_back(pad(dim));
paddingDimsEnd.push_back(pad(offset + dim));
}

std::map<std::string, std::vector<std::string> >
::const_iterator itConcat;
std::vector<std::shared_ptr<Cell> > parentCells;
std::reverse(paddingDimsBegin.begin(), paddingDimsBegin.end());
std::reverse(paddingDimsEnd.begin(), paddingDimsEnd.end());

if (globTranspose) {
std::swap(paddingDimsBegin[0], paddingDimsBegin[1]);
std::swap(paddingDimsEnd[0], paddingDimsEnd[1]);
}
const unsigned int nbOutputs = (cell)
? cell->getNbOutputs()
: sp->getNbChannels();
std::shared_ptr<PaddingCell> paddingCell = Registrar
<PaddingCell>::create(model)(*deepNet,
node.output(0),
nbOutputs,
paddingDimsBegin[1],
paddingDimsEnd[1],
paddingDimsBegin[0],
paddingDimsEnd[0]);
const std::string inputX = redirectName(node.input(0));
std::shared_ptr<Cell> inputXCell = getCell(inputX);

if ((itConcat = concat.find(inputX)) != concat.end()) {
for (unsigned int i = 0; i < (*itConcat).second.size(); ++i) {
const std::string input = (*itConcat).second[i];
std::shared_ptr<Cell> inputCell = getCell(input);
parentCells.push_back(inputCell);
std::map<std::string, std::vector<std::string> >
::const_iterator itConcat;
std::vector<std::shared_ptr<Cell> > parentCells;

paddingCell->addInput(inputCell.get());
}
}
else {
std::shared_ptr<Cell> inputXCell = getCell(inputX);
parentCells.push_back(inputXCell);
if (globTranspose) {
std::swap(paddingDimsBegin[0], paddingDimsBegin[1]);
std::swap(paddingDimsEnd[0], paddingDimsEnd[1]);
}
const unsigned int nbOutputs = (cell)
? cell->getNbOutputs()
: sp->getNbChannels();
std::shared_ptr<PaddingCell> paddingCell = Registrar
<PaddingCell>::create(model)(*deepNet,
node.output(0),
nbOutputs,
paddingDimsBegin[1],
paddingDimsEnd[1],
paddingDimsBegin[0],
paddingDimsEnd[0]);

if (inputXCell)
paddingCell->addInput(inputXCell.get());
else {
paddingCell->addInput(*sp, 0, 0,
sp->getSizeX(), sp->getSizeY());
}
}
if ((itConcat = concat.find(inputX)) != concat.end()) {
for (unsigned int i = 0; i < (*itConcat).second.size(); ++i) {
const std::string input = (*itConcat).second[i];
std::shared_ptr<Cell> inputCell = getCell(input);
parentCells.push_back(inputCell);

deepNet->addCell(paddingCell, parentCells);
paddingCell->initialize();
cell = paddingCell;
continue;
// }
}
std::cout << " No initializer for Padding operation, it will be ignored" << std::endl;
paddingCell->addInput(inputCell.get());
}
}
else {
std::shared_ptr<Cell> inputXCell = getCell(inputX);
parentCells.push_back(inputXCell);

std::cout << Utils::cnotice << " Ignore Padding operation"
<< Utils::cdef << std::endl;
if (inputXCell)
paddingCell->addInput(inputXCell.get());
else {
paddingCell->addInput(*sp, 0, 0,
sp->getSizeX(), sp->getSizeY());
}
}

std::cout << " " << node.output(0) << " -> "
<< redirectName(node.input(0)) << std::endl;
redirect[node.output(0)] = redirectName(node.input(0));
deepNet->addCell(paddingCell, parentCells);
paddingCell->initialize();
cell = paddingCell;
continue;
// }


}
//Pow
//QLinearConv
Expand Down

0 comments on commit fb61e27

Please sign in to comment.