forked from pytorch/pytorch
-
Notifications
You must be signed in to change notification settings - Fork 0
/
jit_log.h
43 lines (36 loc) · 1.58 KB
/
jit_log.h
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
#pragma once
#include <string>
// To enable logging please set(export) PYTORCH_JIT_LOG_LEVEL to
// the ordinal value of one of the following logging levels: 1 for GRAPH_DUMP,
// 2 for GRAPH_UPDATE, 3 for GRAPH_DEBUG.
// * Use GRAPH_DUMP for dumping graphs after optimization passes
// * Use GRAPH_UPDATE for reporting graph transformations (i.e. node deletion,
// constant folding, CSE)
// * Use GRAPH_DEBUG to provide information useful for debugging
// the internals of a particular optimization pass or analysis
namespace torch {
namespace jit {
enum class JitLoggingLevels {
OFF,
GRAPH_DUMP,
GRAPH_UPDATE,
GRAPH_DEBUG,
};
JitLoggingLevels jit_log_level();
std::string jit_log_prefix(JitLoggingLevels level, const std::string& in_str);
std::ostream& operator<<(std::ostream& out, JitLoggingLevels level);
#define JIT_LOG(level, ...) \
if (jit_log_level() != JitLoggingLevels::OFF && jit_log_level() >= level) { \
std::cerr << jit_log_prefix(level, ::c10::str(__VA_ARGS__)); \
}
// use GRAPH_DUMP for dumping graphs after optimization passes
#define GRAPH_DUMP(MSG, G) \
JIT_LOG(JitLoggingLevels::GRAPH_DUMP, MSG, "\n", (G)->toString());
// use GRAPH_UPDATE for reporting graph transformations (i.e. node deletion,
// constant folding, CSE)
#define GRAPH_UPDATE(...) JIT_LOG(JitLoggingLevels::GRAPH_UPDATE, __VA_ARGS__);
// use GRAPH_DEBUG to provide information useful for debugging a particular opt
// pass
#define GRAPH_DEBUG(...) JIT_LOG(JitLoggingLevels::GRAPH_DEBUG, __VA_ARGS__);
} // namespace jit
} // namespace torch