forked from onnx/onnx-tensorrt
-
Notifications
You must be signed in to change notification settings - Fork 0
/
TensorOrWeights.hpp
127 lines (120 loc) · 3.23 KB
/
TensorOrWeights.hpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
/*
* SPDX-License-Identifier: Apache-2.0
*/
#pragma once
#include "ShapedWeights.hpp"
#include <NvInfer.h>
#include <cassert>
namespace onnx2trt
{
class TensorOrWeights
{
union
{
nvinfer1::ITensor* _tensor;
ShapedWeights _weights;
};
enum
{
NODE_TENSOR,
NODE_WEIGHTS
} _variant;
public:
TensorOrWeights()
: _tensor(nullptr)
, _variant(NODE_TENSOR)
{
}
TensorOrWeights(nvinfer1::ITensor* tensor)
: _tensor(tensor)
, _variant(NODE_TENSOR)
{
}
TensorOrWeights(ShapedWeights const& weights)
: _weights(weights)
, _variant(NODE_WEIGHTS)
{
}
bool is_tensor() const
{
return _variant == NODE_TENSOR;
}
bool is_weights() const
{
return _variant == NODE_WEIGHTS;
}
bool isNullTensor() const
{
return is_tensor() && _tensor == nullptr;
}
nvinfer1::ITensor& tensor()
{
assert(!isNullTensor());
return *_tensor;
}
nvinfer1::ITensor const& tensor() const
{
assert(!isNullTensor());
return *_tensor;
}
ShapedWeights& weights()
{
assert(is_weights());
return _weights;
}
ShapedWeights const& weights() const
{
assert(is_weights());
return _weights;
}
nvinfer1::Dims shape() const
{
return is_tensor() ? _tensor->getDimensions() : _weights.shape;
}
explicit operator bool() const
{
return is_tensor() ? _tensor != nullptr : static_cast<bool>(_weights);
}
bool isInt32() const
{
return is_tensor() ? _tensor->getType() == nvinfer1::DataType::kINT32 : _weights.type == ::ONNX_NAMESPACE::TensorProto_DataType_INT32;
}
bool isBool() const
{
return is_tensor() ? _tensor->getType() == nvinfer1::DataType::kBOOL : _weights.type == ::ONNX_NAMESPACE::TensorProto_DataType_BOOL;
}
std::string getName() const
{
return is_tensor() ? _tensor->getName() : _weights.getName();
}
std::string getType() const
{
if (is_tensor())
{
switch(_tensor->getType())
{
case nvinfer1::DataType::kFLOAT:return "FLOAT";
case nvinfer1::DataType::kHALF: return "HALF";
case nvinfer1::DataType::kINT8: return "INT8";
case nvinfer1::DataType::kINT32: return "INT32";
case nvinfer1::DataType::kBOOL: return "BOOL";
default: return "UNKNOWN TYPE";
}
}
else
{
switch(_weights.type)
{
case ::ONNX_NAMESPACE::TensorProto::DOUBLE: return "DOUBLE -> FLOAT";
case ::ONNX_NAMESPACE::TensorProto::FLOAT: return "FLOAT";
case ::ONNX_NAMESPACE::TensorProto::INT8: return "INT8";
case ::ONNX_NAMESPACE::TensorProto::FLOAT16: return "HALF";
case ::ONNX_NAMESPACE::TensorProto::BOOL: return "BOOL";
case ::ONNX_NAMESPACE::TensorProto::INT32: return "INT32";
case ::ONNX_NAMESPACE::TensorProto::INT64: return "INT64 -> INT32";
default: return "UNKNOWN TYPE";
}
}
}
};
} // namespace onnx2trt