Skip to content

Commit

Permalink
refactor: update TRT build log (#6117)
Browse files Browse the repository at this point in the history
* feat: add support of throttle logging

Signed-off-by: ktro2828 <[email protected]>

* refactor: replace custom `Logger` to `tensorrt_common::Logger`

Signed-off-by: ktro2828 <[email protected]>

* refactor: update to display standard output on screen

Signed-off-by: ktro2828 <[email protected]>

* refactor: update log message while building engine from onnx

Signed-off-by: ktro2828 <[email protected]>

* refactor: update logger in `lidar_centerpoint`

Signed-off-by: ktro2828 <[email protected]>

* refactor: update log message

Signed-off-by: ktro2828 <[email protected]>

* refactor: set default verbose=true for bulding engine from onnx

Signed-off-by: ktro2828 <[email protected]>

* refactor: remove including unused `rclcpp.hpp`

Signed-off-by: ktro2828 <[email protected]>

* refactor: update launcher to output logs on screen

Signed-off-by: ktro2828 <[email protected]>

---------

Signed-off-by: ktro2828 <[email protected]>
Co-authored-by: Shunsuke Miura <[email protected]>
  • Loading branch information
ktro2828 and miursh authored Jan 29, 2024
1 parent 1c4c3e3 commit 6835bc4
Show file tree
Hide file tree
Showing 13 changed files with 133 additions and 100 deletions.
63 changes: 56 additions & 7 deletions common/tensorrt_common/include/tensorrt_common/logger.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -19,13 +19,15 @@

#include "NvInferRuntimeCommon.h"

#include <atomic>
#include <cassert>
#include <ctime>
#include <iomanip>
#include <iostream>
#include <ostream>
#include <sstream>
#include <string>
#include <thread>

namespace tensorrt_common
{
Expand Down Expand Up @@ -200,7 +202,15 @@ class Logger : public nvinfer1::ILogger // NOLINT
public:
// Logger(Severity severity = Severity::kWARNING)
// Logger(Severity severity = Severity::kVERBOSE)
explicit Logger(Severity severity = Severity::kINFO) : mReportableSeverity(severity) {}
explicit Logger(Severity severity = Severity::kINFO)
: mReportableSeverity(severity), mVerbose(true), mThrottleStopFlag(false)
{
}

explicit Logger(const bool verbose, Severity severity = Severity::kINFO)
: mReportableSeverity(severity), mVerbose(verbose), mThrottleStopFlag(false)
{
}

//!
//! \enum TestResult
Expand Down Expand Up @@ -234,7 +244,44 @@ class Logger : public nvinfer1::ILogger // NOLINT
//!
void log(Severity severity, const char * msg) noexcept override
{
LogStreamConsumer(mReportableSeverity, severity) << "[TRT] " << std::string(msg) << std::endl;
if (mVerbose) {
LogStreamConsumer(mReportableSeverity, severity) << "[TRT] " << std::string(msg) << std::endl;
}
}

/**
* @brief Logging with throttle.
*
* @example
* Logger logger();
* auto log_thread = logger.log_throttle(nvinfer1::ILogger::Severity::kINFO, "SOME MSG", 1);
* // some operation
* logger.stop_throttle(log_thread);
*
* @param severity
* @param msg
* @param duration
* @return std::thread
*
*/
std::thread log_throttle(Severity severity, const char * msg, const int duration) noexcept
{
mThrottleStopFlag.store(false);
auto log_func = [this](Severity s, const char * m, const int d) {
while (!mThrottleStopFlag.load()) {
this->log(s, m);
std::this_thread::sleep_for(std::chrono::seconds(d));
}
};

std::thread log_thread(log_func, severity, msg, duration);
return log_thread;
}

void stop_throttle(std::thread & log_thread) noexcept
{
mThrottleStopFlag.store(true);
log_thread.join();
}

//!
Expand Down Expand Up @@ -430,6 +477,8 @@ class Logger : public nvinfer1::ILogger // NOLINT
}

Severity mReportableSeverity;
bool mVerbose;
std::atomic<bool> mThrottleStopFlag;
};

namespace
Expand All @@ -444,7 +493,7 @@ namespace
//!
inline LogStreamConsumer LOG_VERBOSE(const Logger & logger)
{
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kVERBOSE);
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kVERBOSE) << "[TRT] ";
}

//!
Expand All @@ -456,7 +505,7 @@ inline LogStreamConsumer LOG_VERBOSE(const Logger & logger)
//!
inline LogStreamConsumer LOG_INFO(const Logger & logger)
{
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kINFO);
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kINFO) << "[TRT] ";
}

//!
Expand All @@ -468,7 +517,7 @@ inline LogStreamConsumer LOG_INFO(const Logger & logger)
//!
inline LogStreamConsumer LOG_WARN(const Logger & logger)
{
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kWARNING);
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kWARNING) << "[TRT] ";
}

//!
Expand All @@ -480,7 +529,7 @@ inline LogStreamConsumer LOG_WARN(const Logger & logger)
//!
inline LogStreamConsumer LOG_ERROR(const Logger & logger)
{
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kERROR);
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kERROR) << "[TRT] ";
}

//!
Expand All @@ -494,7 +543,7 @@ inline LogStreamConsumer LOG_ERROR(const Logger & logger)
//!
inline LogStreamConsumer LOG_FATAL(const Logger & logger)
{
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kINTERNAL_ERROR);
return LogStreamConsumer(logger.getReportableSeverity(), Severity::kINTERNAL_ERROR) << "[TRT] ";
}

} // anonymous namespace
Expand Down
4 changes: 4 additions & 0 deletions common/tensorrt_common/src/tensorrt_common.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -186,7 +186,11 @@ void TrtCommon::setup()
} else {
std::cout << "Building... " << cache_engine_path << std::endl;
logger_.log(nvinfer1::ILogger::Severity::kINFO, "Start build engine");
auto log_thread = logger_.log_throttle(
nvinfer1::ILogger::Severity::kINFO,
"Applying optimizations and building TRT CUDA engine. Please wait for a few minutes...", 5);
buildEngineFromOnnx(model_file_path_, cache_engine_path);
logger_.stop_throttle(log_thread);
logger_.log(nvinfer1::ILogger::Severity::kINFO, "End build engine");
}
engine_path = cache_engine_path;
Expand Down
7 changes: 2 additions & 5 deletions perception/lidar_centerpoint/lib/network/network_trt.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@

#include "lidar_centerpoint/network/network_trt.hpp"

#include <rclcpp/rclcpp.hpp>

namespace centerpoint
{
bool VoxelEncoderTRT::setProfile(
Expand Down Expand Up @@ -65,9 +63,8 @@ bool HeadTRT::setProfile(
if (
out_name == std::string("heatmap") &&
network.getOutput(ci)->getDimensions().d[1] != static_cast<int32_t>(out_channel_sizes_[ci])) {
RCLCPP_ERROR(
rclcpp::get_logger("lidar_centerpoint"),
"Expected and actual number of classes do not match");
tensorrt_common::LOG_ERROR(logger_)
<< "Expected and actual number of classes do not match" << std::endl;
return false;
}
auto out_dims = nvinfer1::Dims4(
Expand Down
40 changes: 19 additions & 21 deletions perception/lidar_centerpoint/lib/network/tensorrt_wrapper.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -14,8 +14,6 @@

#include "lidar_centerpoint/network/tensorrt_wrapper.hpp"

#include <rclcpp/rclcpp.hpp>

#include <NvOnnxParser.h>

#include <fstream>
Expand All @@ -42,7 +40,7 @@ bool TensorRTWrapper::init(
runtime_ =
tensorrt_common::TrtUniquePtr<nvinfer1::IRuntime>(nvinfer1::createInferRuntime(logger_));
if (!runtime_) {
RCLCPP_ERROR(rclcpp::get_logger("lidar_centerpoint"), "Failed to create runtime");
tensorrt_common::LOG_ERROR(logger_) << "Failed to create runtime" << std::endl;
return false;
}

Expand All @@ -51,7 +49,11 @@ bool TensorRTWrapper::init(
if (engine_file.is_open()) {
success = loadEngine(engine_path);
} else {
auto log_thread = logger_.log_throttle(
nvinfer1::ILogger::Severity::kINFO,
"Applying optimizations and building TRT CUDA engine. Please wait a minutes...", 5);
success = parseONNX(onnx_path, engine_path, precision);
logger_.stop_throttle(log_thread);
}
success &= createContext();

Expand All @@ -61,15 +63,15 @@ bool TensorRTWrapper::init(
bool TensorRTWrapper::createContext()
{
if (!engine_) {
RCLCPP_ERROR(
rclcpp::get_logger("lidar_centerpoint"), "Failed to create context: Engine was not created");
tensorrt_common::LOG_ERROR(logger_)
<< "Failed to create context: Engine was not created" << std::endl;
return false;
}

context_ =
tensorrt_common::TrtUniquePtr<nvinfer1::IExecutionContext>(engine_->createExecutionContext());
if (!context_) {
RCLCPP_ERROR(rclcpp::get_logger("lidar_centerpoint"), "Failed to create context");
tensorrt_common::LOG_ERROR(logger_) << "Failed to create context" << std::endl;
return false;
}

Expand All @@ -83,14 +85,14 @@ bool TensorRTWrapper::parseONNX(
auto builder =
tensorrt_common::TrtUniquePtr<nvinfer1::IBuilder>(nvinfer1::createInferBuilder(logger_));
if (!builder) {
RCLCPP_ERROR(rclcpp::get_logger("lidar_centerpoint"), "Failed to create builder");
tensorrt_common::LOG_ERROR(logger_) << "Failed to create builder" << std::endl;
return false;
}

auto config =
tensorrt_common::TrtUniquePtr<nvinfer1::IBuilderConfig>(builder->createBuilderConfig());
if (!config) {
RCLCPP_ERROR(rclcpp::get_logger("lidar_centerpoint"), "Failed to create config");
tensorrt_common::LOG_ERROR(logger_) << "Failed to create config" << std::endl;
return false;
}
#if (NV_TENSORRT_MAJOR * 1000) + (NV_TENSORRT_MINOR * 100) + NV_TENSOR_PATCH >= 8400
Expand All @@ -100,12 +102,11 @@ bool TensorRTWrapper::parseONNX(
#endif
if (precision == "fp16") {
if (builder->platformHasFastFp16()) {
RCLCPP_INFO(rclcpp::get_logger("lidar_centerpoint"), "Using TensorRT FP16 Inference");
tensorrt_common::LOG_INFO(logger_) << "Using TensorRT FP16 Inference" << std::endl;
config->setFlag(nvinfer1::BuilderFlag::kFP16);
} else {
RCLCPP_INFO(
rclcpp::get_logger("lidar_centerpoint"),
"TensorRT FP16 Inference isn't supported in this environment");
tensorrt_common::LOG_INFO(logger_)
<< "TensorRT FP16 Inference isn't supported in this environment" << std::endl;
}
}

Expand All @@ -114,7 +115,7 @@ bool TensorRTWrapper::parseONNX(
auto network =
tensorrt_common::TrtUniquePtr<nvinfer1::INetworkDefinition>(builder->createNetworkV2(flag));
if (!network) {
RCLCPP_ERROR(rclcpp::get_logger("lidar_centerpoint"), "Failed to create network");
tensorrt_common::LOG_ERROR(logger_) << "Failed to create network" << std::endl;
return false;
}

Expand All @@ -123,23 +124,20 @@ bool TensorRTWrapper::parseONNX(
parser->parseFromFile(onnx_path.c_str(), static_cast<int>(nvinfer1::ILogger::Severity::kERROR));

if (!setProfile(*builder, *network, *config)) {
RCLCPP_ERROR(rclcpp::get_logger("lidar_centerpoint"), "Failed to set profile");
tensorrt_common::LOG_ERROR(logger_) << "Failed to set profile" << std::endl;
return false;
}

RCLCPP_INFO_STREAM(
rclcpp::get_logger("lidar_centerpoint"),
"Applying optimizations and building TRT CUDA engine (" << onnx_path << ") ...");
plan_ = tensorrt_common::TrtUniquePtr<nvinfer1::IHostMemory>(
builder->buildSerializedNetwork(*network, *config));
if (!plan_) {
RCLCPP_ERROR(rclcpp::get_logger("lidar_centerpoint"), "Failed to create serialized network");
tensorrt_common::LOG_ERROR(logger_) << "Failed to create serialized network" << std::endl;
return false;
}
engine_ = tensorrt_common::TrtUniquePtr<nvinfer1::ICudaEngine>(
runtime_->deserializeCudaEngine(plan_->data(), plan_->size()));
if (!engine_) {
RCLCPP_ERROR(rclcpp::get_logger("lidar_centerpoint"), "Failed to create engine");
tensorrt_common::LOG_ERROR(logger_) << "Failed to create engine" << std::endl;
return false;
}

Expand All @@ -148,7 +146,7 @@ bool TensorRTWrapper::parseONNX(

bool TensorRTWrapper::saveEngine(const std::string & engine_path)
{
RCLCPP_INFO_STREAM(rclcpp::get_logger("lidar_centerpoint"), "Writing to " << engine_path);
tensorrt_common::LOG_INFO(logger_) << "Writing to " << engine_path << std::endl;
std::ofstream file(engine_path, std::ios::out | std::ios::binary);
file.write(reinterpret_cast<const char *>(plan_->data()), plan_->size());
return true;
Expand All @@ -162,7 +160,7 @@ bool TensorRTWrapper::loadEngine(const std::string & engine_path)
std::string engine_str = engine_buffer.str();
engine_ = tensorrt_common::TrtUniquePtr<nvinfer1::ICudaEngine>(runtime_->deserializeCudaEngine(
reinterpret_cast<const void *>(engine_str.data()), engine_str.size()));
RCLCPP_INFO_STREAM(rclcpp::get_logger("lidar_centerpoint"), "Loaded engine from " << engine_path);
tensorrt_common::LOG_INFO(logger_) << "Loaded engine from " << engine_path << std::endl;
return true;
}

Expand Down
20 changes: 3 additions & 17 deletions perception/tensorrt_yolo/lib/include/trt_yolo.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/opencv.hpp>
#include <tensorrt_common/logger.hpp>
#include <yolo_layer.hpp>

#include <NvInfer.h>
Expand All @@ -67,22 +68,6 @@ struct Deleter
template <typename T>
using unique_ptr = std::unique_ptr<T, Deleter>;

class Logger : public nvinfer1::ILogger
{
public:
explicit Logger(bool verbose) : verbose_(verbose) {}

void log(Severity severity, const char * msg) noexcept override
{
if (verbose_ || ((severity != Severity::kINFO) && (severity != Severity::kVERBOSE))) {
std::cout << msg << std::endl;
}
}

private:
bool verbose_{false};
};

struct Config
{
int num_anchors;
Expand All @@ -105,7 +90,7 @@ class Net
Net(
const std::string & onnx_file_path, const std::string & precision, const int max_batch_size,
const Config & yolo_config, const std::vector<std::string> & calibration_images,
const std::string & calibration_table, bool verbose = false,
const std::string & calibration_table, bool verbose = true,
size_t workspace_size = (1ULL << 30));

~Net();
Expand Down Expand Up @@ -138,6 +123,7 @@ class Net
cuda::unique_ptr<float[]> out_scores_d_ = nullptr;
cuda::unique_ptr<float[]> out_boxes_d_ = nullptr;
cuda::unique_ptr<float[]> out_classes_d_ = nullptr;
tensorrt_common::Logger logger_;

void load(const std::string & path);
bool prepare();
Expand Down
Loading

0 comments on commit 6835bc4

Please sign in to comment.