Skip to content

Commit

Permalink
Merge pull request #1 from lbq779660843/main
Browse files Browse the repository at this point in the history
CSDN
  • Loading branch information
RichardoMrMu authored Nov 15, 2021
2 parents cb30017 + b74472e commit 8367422
Show file tree
Hide file tree
Showing 3 changed files with 55 additions and 3 deletions.
52 changes: 52 additions & 0 deletions CMakeLists_deepsort-tensorrt_win10.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,52 @@
cmake_minimum_required(VERSION 2.6)
project(deepsort)

set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -std=c++11")
set(CMAKE_BUILD_TYPE Release)
set(CUDA_BIN_PATH C:\\Program Files\\NVIDIA GPU Computing Toolkit\\CUDA\\v11.1) #1.修改为自己的cuda路径
set(TRT_DIR "D:\\lbq\\TensorRT-7.2.3.4") #2.修改为自己的tensorRT路径
set(TRT_INCLUDE_DIRS ${TRT_DIR}\\include) #3.修改为自己的tensorRT头文件路径
set(TRT_LIB_DIRS ${TRT_DIR}\\lib) #4.修改为自己的tensorRT库文件路径
set(Eigen3_PATH D:\\lbq\\eigen) #5.修改为自己的Eigen路径
option(CUDA_USE_STATIC_CUDA_RUNTIME OFF)
find_package(OpenCV REQUIRED)
find_package(CUDA REQUIRED)


enable_language(CUDA)
include_directories(
${CUDA_INCLUDE_DIRS}
${OpenCV_INCLUDE_DIRS}
${TRT_INCLUDE_DIRS}
${Eigen3_PATH}
${PROJECT_SOURCE_DIR}/include
)

# tensorRT
link_directories(${TRT_LIB_DIRS})
link_directories(${OpenCV_LIB_DIRS})

aux_source_directory(${PROJECT_SOURCE_DIR}/src SRC_DIR)

# ===== deepsort =====
add_library(deepsort STATIC ${SRC_DIR})
target_link_libraries(deepsort
${CUDA_LIBRARIES} ${OpenCV_LIBS}
cudart nvinfer nvonnxparser
)

# ===== onnx2engine =====
add_executable(onnx2engine ${PROJECT_SOURCE_DIR}/onnx2engine.cpp)
target_link_libraries(onnx2engine
${CUDA_LIBRARIES}
cudart nvinfer nvonnxparser deepsort
)

# ===== demo =====
add_executable(demo ${PROJECT_SOURCE_DIR}/demo.cpp)
target_link_libraries(demo
${CUDA_LIBRARIES} ${OpenCV_LIBS}
cudart nvinfer nvonnxparser deepsort
)


4 changes: 2 additions & 2 deletions src/deepsortenginegenerator.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -49,7 +49,7 @@ void DeepSortEngineGenerator::createEngine(std::string onnxPath, std::string eng
std::ofstream serializeOutputStream;
serializeStr.resize(modelStream->size());
memcpy((void*)serializeStr.data(), modelStream->data(), modelStream->size());
serializeOutputStream.open(enginePath);
serializeOutputStream.open(enginePath, std::ios::binary);
serializeOutputStream << serializeStr;
serializeOutputStream.close();
}
}
2 changes: 1 addition & 1 deletion src/featuretensor.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ void FeatureTensor::loadEngine(std::string enginePath) {
// Deserialize model
runtime = createInferRuntime(*gLogger);
assert(runtime != nullptr);
std::ifstream engineStream(enginePath);
std::ifstream engineStream(enginePath, std::ios::binary);
std::string engineCache("");
while (engineStream.peek() != EOF) {
std::stringstream buffer;
Expand Down

0 comments on commit 8367422

Please sign in to comment.