From 9086ae4d05b56b16329f79c7f0f779ba3efdb773 Mon Sep 17 00:00:00 2001 From: cmoineau Date: Fri, 29 Jul 2022 11:46:01 +0000 Subject: [PATCH] [DeepNetGenerator] Fix issue when unpacking ONNX tensor to retrieve padding. --- src/Generator/DeepNetGenerator.cpp | 145 +++++++++++++++-------------- 1 file changed, 74 insertions(+), 71 deletions(-) diff --git a/src/Generator/DeepNetGenerator.cpp b/src/Generator/DeepNetGenerator.cpp index 8fb40bd8..b1ad9153 100755 --- a/src/Generator/DeepNetGenerator.cpp +++ b/src/Generator/DeepNetGenerator.cpp @@ -2523,11 +2523,12 @@ void N2D2::DeepNetGenerator::ONNX_processGraph( std::vector paddingDimsBegin; std::vector paddingDimsEnd; - if (node.input_size() >= 1) { - Tensor pad; - // See changelog opsetVersion 11 : https://github.com/onnx/onnx/blob/main/docs/Changelog.md#Pad-11 - // TLDR : pads changed from an attribute to an input. - if (opsetVersion < 11) { + + Tensor pad; + // See changelog opsetVersion 11 : https://github.com/onnx/onnx/blob/main/docs/Changelog.md#Pad-11 + // TLDR : pads changed from an attribute to an input. + if (opsetVersion < 11) { + if(node.input_size() > 1){ if ((itAttr = attribute.find("pads")) != attribute.end()){ for (int dim = 0; dim < (*itAttr).second->ints_size(); ++dim) pad.push_back((*itAttr).second->ints(dim)); @@ -2538,85 +2539,87 @@ void N2D2::DeepNetGenerator::ONNX_processGraph( throw std::runtime_error(msgStr.str()); } }else{ - if ((itInit = initializer.find(node.input(1))) != initializer.end()) - pad = ONNX_unpackTensor((*itInit).second); - else{ - std::stringstream msgStr; - msgStr << " No initializer for \"" << node.input(1) - << "\"" << std::endl; - throw std::runtime_error(msgStr.str()); - } + std::cout << " No initializer for Padding operation, it will be ignored" << std::endl; + std::cout << Utils::cnotice << " Ignore Padding operation" + << Utils::cdef << std::endl; + std::cout << " " << node.output(0) << " -> " + << redirectName(node.input(0)) << std::endl; + redirect[node.output(0)] = redirectName(node.input(0)); + continue; } - assert(pad.size() % 2 == 0); - const int offset = pad.size() / 2; - - for (int dim = 0; dim < offset; ++dim) { - paddingDimsBegin.push_back(pad(dim)); - paddingDimsEnd.push_back(pad(offset + dim)); + }else{ // opsetVersion > 11 + if ((itInit = initializer.find(node.input(1))) != initializer.end()) + pad.push_back(ONNX_unpackTensor((*itInit).second)); + else{ + std::stringstream msgStr; + msgStr << " No initializer for \"" << node.input(1) + << "\"" << std::endl; + throw std::runtime_error(msgStr.str()); } + } - std::reverse(paddingDimsBegin.begin(), paddingDimsBegin.end()); - std::reverse(paddingDimsEnd.begin(), paddingDimsEnd.end()); + assert(pad.size() % 2 == 0); + const int offset = pad.size() / 2; - const std::string inputX = redirectName(node.input(0)); - std::shared_ptr inputXCell = getCell(inputX); + for (int dim = 0; dim < offset; ++dim) { + paddingDimsBegin.push_back(pad(dim)); + paddingDimsEnd.push_back(pad(offset + dim)); + } - std::map > - ::const_iterator itConcat; - std::vector > parentCells; + std::reverse(paddingDimsBegin.begin(), paddingDimsBegin.end()); + std::reverse(paddingDimsEnd.begin(), paddingDimsEnd.end()); - if (globTranspose) { - std::swap(paddingDimsBegin[0], paddingDimsBegin[1]); - std::swap(paddingDimsEnd[0], paddingDimsEnd[1]); - } - const unsigned int nbOutputs = (cell) - ? cell->getNbOutputs() - : sp->getNbChannels(); - std::shared_ptr paddingCell = Registrar - ::create(model)(*deepNet, - node.output(0), - nbOutputs, - paddingDimsBegin[1], - paddingDimsEnd[1], - paddingDimsBegin[0], - paddingDimsEnd[0]); + const std::string inputX = redirectName(node.input(0)); + std::shared_ptr inputXCell = getCell(inputX); - if ((itConcat = concat.find(inputX)) != concat.end()) { - for (unsigned int i = 0; i < (*itConcat).second.size(); ++i) { - const std::string input = (*itConcat).second[i]; - std::shared_ptr inputCell = getCell(input); - parentCells.push_back(inputCell); + std::map > + ::const_iterator itConcat; + std::vector > parentCells; - paddingCell->addInput(inputCell.get()); - } - } - else { - std::shared_ptr inputXCell = getCell(inputX); - parentCells.push_back(inputXCell); + if (globTranspose) { + std::swap(paddingDimsBegin[0], paddingDimsBegin[1]); + std::swap(paddingDimsEnd[0], paddingDimsEnd[1]); + } + const unsigned int nbOutputs = (cell) + ? cell->getNbOutputs() + : sp->getNbChannels(); + std::shared_ptr paddingCell = Registrar + ::create(model)(*deepNet, + node.output(0), + nbOutputs, + paddingDimsBegin[1], + paddingDimsEnd[1], + paddingDimsBegin[0], + paddingDimsEnd[0]); - if (inputXCell) - paddingCell->addInput(inputXCell.get()); - else { - paddingCell->addInput(*sp, 0, 0, - sp->getSizeX(), sp->getSizeY()); - } - } + if ((itConcat = concat.find(inputX)) != concat.end()) { + for (unsigned int i = 0; i < (*itConcat).second.size(); ++i) { + const std::string input = (*itConcat).second[i]; + std::shared_ptr inputCell = getCell(input); + parentCells.push_back(inputCell); - deepNet->addCell(paddingCell, parentCells); - paddingCell->initialize(); - cell = paddingCell; - continue; - // } - } - std::cout << " No initializer for Padding operation, it will be ignored" << std::endl; + paddingCell->addInput(inputCell.get()); + } + } + else { + std::shared_ptr inputXCell = getCell(inputX); + parentCells.push_back(inputXCell); - std::cout << Utils::cnotice << " Ignore Padding operation" - << Utils::cdef << std::endl; + if (inputXCell) + paddingCell->addInput(inputXCell.get()); + else { + paddingCell->addInput(*sp, 0, 0, + sp->getSizeX(), sp->getSizeY()); + } + } - std::cout << " " << node.output(0) << " -> " - << redirectName(node.input(0)) << std::endl; - redirect[node.output(0)] = redirectName(node.input(0)); + deepNet->addCell(paddingCell, parentCells); + paddingCell->initialize(); + cell = paddingCell; continue; + // } + + } //Pow //QLinearConv