Skip to content

Commit b9136bb

Browse files
committed
[TRT] Fix Upsample TRT LayerBuilder Plugin & 3D BUGs
1 parent 3bf8a7b commit b9136bb

File tree

3 files changed

+52
-52
lines changed

3 files changed

+52
-52
lines changed

source/tnn/device/atlas/atlas_common_types.h

-4
Original file line numberDiff line numberDiff line change
@@ -25,10 +25,6 @@
2525
#include "tnn/core/blob.h"
2626
#include "tnn/core/macro.h"
2727

28-
///////////////////////
29-
#include <iostream>
30-
///////////////////////
31-
3228
namespace TNN_NS {
3329

3430
enum class AtlasOmModelDynamicMode {

source/tnn/device/atlas/atlas_mat_converter.cc

-16
Original file line numberDiff line numberDiff line change
@@ -105,12 +105,6 @@ Status AtlasMatConverterAcc::Copy(Mat& src, Mat& dst, void* command_queue) {
105105
return Status(TNNERR_NULL_PARAM, "init mat converter failed!");
106106
}
107107

108-
//auto atlas_cmd_queue = static_cast<AtlasCommandQueue*>(command_queue);
109-
//if (atlas_cmd_queue == nullptr) {
110-
// LOGE("get atlas command queue failed!\n");
111-
// return Status(TNNERR_NULL_PARAM, "get atlas command queue failed!");
112-
//}
113-
114108
aclrtMemcpyKind memcpy_type;
115109
if (DEVICE_ATLAS == src.GetDeviceType() && DEVICE_ATLAS == dst.GetDeviceType()) {
116110
memcpy_type = ACL_MEMCPY_DEVICE_TO_DEVICE;
@@ -156,11 +150,6 @@ Status AtlasMatConverterAcc::Resize(Mat& src, Mat& dst, ResizeParam param, void*
156150
return Status(TNNERR_NULL_PARAM, "init mat converter failed!");
157151
}
158152

159-
//auto atlas_cmd_queue = static_cast<AtlasCommandQueue*>(command_queue);
160-
//if (atlas_cmd_queue == nullptr) {
161-
// LOGE("get atlas command queue failed!\n");
162-
// return Status(TNNERR_NULL_PARAM, "get atlas command queue failed!");
163-
//}
164153
aclrtStream* stream_ptr = static_cast<aclrtStream*>(command_queue);
165154
if (stream_ptr == nullptr) {
166155
LOGE("get atlas command queue failed!\n");
@@ -230,11 +219,6 @@ Status AtlasMatConverterAcc::Crop(Mat& src, Mat& dst, CropParam param, void* com
230219
return Status(TNNERR_NULL_PARAM, "init mat converter failed!");
231220
}
232221

233-
//auto atlas_cmd_queue = static_cast<AtlasCommandQueue*>(command_queue);
234-
//if (atlas_cmd_queue == nullptr) {
235-
// LOGE("get atlas command queue failed!\n");
236-
// return Status(TNNERR_NULL_PARAM, "get atlas command queue failed!");
237-
//}
238222
aclrtStream* stream_ptr = static_cast<aclrtStream*>(command_queue);
239223
if (stream_ptr == nullptr) {
240224
LOGE("get atlas command queue failed!\n");

source/tnn/network/tensorrt/layer_builder/upsample_layer_builder.cc

+52-32
Original file line numberDiff line numberDiff line change
@@ -61,39 +61,46 @@ ILayer* UpsampleTRTPluginLayerBuilder::AddToNetwork(INetworkDefinition* network)
6161
out_shape_tensor = concat(network, nc, size);
6262
}
6363

64+
65+
// Dim Mode Special Case:
66+
// Cases When Both N,C and H+W are dynamic
67+
// In this case, We cannot turn to Scale mode.
68+
// Also layer->SetOutputDimensions() API does not accept -1 as dim
69+
// Have to use TNN Upsample Plugin.
70+
// e.g [-1,2,-1,-1]
71+
if (input_blobs_.size() == 1 && !paramlist->dims.empty()) {
72+
// In this case, network->addResize should not be called. GO Plugin
73+
auto trt_dim = input_tensor->getDimensions();
74+
if (trt_dim.d[0] <= 0 || trt_dim.d[1] <= 0) {
75+
LOGI("WARNING: Dynamic NCHW Upsample with fixed dims param is NOT SUPPORTED by TensorRT, use TNN Upsample Plugin instead.\n");
76+
return TensorRTPluginLayerBuilder::AddToNetwork(network);
77+
}
78+
}
79+
6480
IResizeLayer* layer = network->addResize(*input_tensor);
6581
if (layer != nullptr) {
6682
layer->setName(layer_name_.c_str());
6783
if (input_blobs_.size() == 1) {
6884
if (!paramlist->dims.empty()) {
6985
auto trt_dim = input_tensor->getDimensions();
7086
if (trt_dim.nbDims != 4) {
71-
LOGE("Upsample with 1 input only support 4d input.");
87+
LOGE("Upsample with 1 input only support 4d input.\n");
7288
return nullptr;
7389
}
7490

7591
// trt_dim may have one of the following values:
7692
// [-1,3,32,32], [-1,2,-1,-1], [1,16,256,256]
7793
if (trt_dim.d[0] <= 0 || trt_dim.d[1] <= 0) {
7894
// Cases When At least One of N, C be dynamic
79-
if (trt_dim.d[2] > 0 && trt_dim.d[3] > 0) {
80-
// Cases when H,W are fixed, turn to scale mode
81-
// e.g [-1,3,32,32]
82-
float scale[4];
83-
scale[0] = 1;
84-
scale[1] = 1;
85-
scale[2] = paramlist->dims[0] / float(trt_dim.d[2]);
86-
scale[3] = paramlist->dims[1] / float(trt_dim.d[3]);
87-
layer->setScales(scale, 4);
88-
} else {
89-
// Cases When Both N,C and H+W are dynamic
90-
// In this case, We cannot turn to Scale mode.
91-
// Also layer->SetOutputDimensions() API does not accept -1 as dim
92-
// Have to use TNN Upsample Plugin.
93-
// e.g [-1,2,-1,-1]
94-
LOGI("WARNING: Dynamic NCHW Upsample with fixed dims provided, NOT SUPPORTED by TensorRT, use TNN Upsample Plugin instead.");
95-
return TensorRTPluginLayerBuilder::AddToNetwork(network);
96-
}
95+
// and H,W are fixed, turn to scale mode
96+
// Here trt_dim.d[2] > 0 && trt_dim.d[3] > 0
97+
// e.g [-1,3,32,32]
98+
float scale[4];
99+
scale[0] = 1;
100+
scale[1] = 1;
101+
scale[2] = paramlist->dims[0] / float(trt_dim.d[2]);
102+
scale[3] = paramlist->dims[1] / float(trt_dim.d[3]);
103+
layer->setScales(scale, 4);
97104
} else {
98105
// Cases When Both N and C are fixed
99106
// e.g [1,16,256,256]
@@ -107,17 +114,30 @@ ILayer* UpsampleTRTPluginLayerBuilder::AddToNetwork(INetworkDefinition* network)
107114
paramlist->dims[0], paramlist->dims[1]);
108115
layer->setOutputDimensions(dims);
109116
} else {
110-
LOGE("Upsample with 1 input Fix N,C + Fixed dims does not have standard positive dim, Unsupported.");
117+
LOGE("Upsample with 1 input Fix N,C + Fixed dims does not have standard positive dim, Unsupported.\n");
111118
return nullptr;
112119
}
113120
}
114121
} else {
115-
float scale[4];
116-
scale[0] = 1;
117-
scale[1] = 1;
118-
scale[2] = paramlist->scales[1];
119-
scale[3] = paramlist->scales[0];
120-
layer->setScales(scale, 4);
122+
if (output_dims.size() == 4) {
123+
float scale[4];
124+
scale[0] = 1;
125+
scale[1] = 1;
126+
scale[2] = paramlist->scales[1];
127+
scale[3] = paramlist->scales[0];
128+
layer->setScales(scale, 4);
129+
} else if (output_dims.size() == 5) {
130+
float scale[5];
131+
scale[0] = 1;
132+
scale[1] = 1;
133+
scale[2] = paramlist->scales[2];
134+
scale[3] = paramlist->scales[1];
135+
scale[4] = paramlist->scales[0];
136+
layer->setScales(scale, 5);
137+
} else {
138+
LOGE("Upsample with 1 input and scale param only support 2d or 3d now.\n");
139+
return nullptr;
140+
}
121141
}
122142
} else if (input_blobs_.size() == 2) {
123143
// set resize layer input with shape tensor
@@ -127,12 +147,12 @@ ILayer* UpsampleTRTPluginLayerBuilder::AddToNetwork(INetworkDefinition* network)
127147
auto input_tensor2 = std::dynamic_pointer_cast<TensorRTTensor>(input_foreign_tensor2)->GetTensor();
128148
layer->setInput(1, *input_tensor2);
129149
} else {
130-
float scale[4];
131-
scale[0] = 1;
132-
scale[1] = 1;
133-
scale[2] = paramlist->scales[1];
134-
scale[3] = paramlist->scales[0];
135-
layer->setScales(scale, 4);
150+
float scale[4];
151+
scale[0] = 1;
152+
scale[1] = 1;
153+
scale[2] = paramlist->scales[1];
154+
scale[3] = paramlist->scales[0];
155+
layer->setScales(scale, 4);
136156
}
137157
layer->setResizeMode(paramlist->mode == 1 ? ResizeMode::kNEAREST : ResizeMode::kLINEAR);
138158
layer->setAlignCorners(paramlist->align_corners);

0 commit comments

Comments
 (0)