Skip to content

Commit

Permalink
Remove ProfiledType (pytorch#42570)
Browse files Browse the repository at this point in the history
Summary:
Pull Request resolved: pytorch#42570

ProfiledType doesn't do anything and is not used atm, removing

Test Plan: CI

Reviewed By: ezyang

Differential Revision: D22938664

Pulled By: ilia-cher

fbshipit-source-id: 037c512938028f44258b702bbcde3f8c144f4aa0
  • Loading branch information
Ilia Cherniavskii authored and facebook-github-bot committed Aug 6, 2020
1 parent ccfce9d commit a53fdaa
Show file tree
Hide file tree
Showing 10 changed files with 0 additions and 174 deletions.
6 changes: 0 additions & 6 deletions BUILD.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -186,12 +186,6 @@ libtorch_cpp_generated_sources = [
"torch/csrc/autograd/generated/VariableType_3.cpp",
"torch/csrc/autograd/generated/VariableType_4.cpp",
# "torch/csrc/autograd/generated/VariableTypeEverything.cpp",
"torch/csrc/autograd/generated/ProfiledType_0.cpp",
"torch/csrc/autograd/generated/ProfiledType_1.cpp",
"torch/csrc/autograd/generated/ProfiledType_2.cpp",
"torch/csrc/autograd/generated/ProfiledType_3.cpp",
"torch/csrc/autograd/generated/ProfiledType_4.cpp",
# "torch/csrc/autograd/generated/ProfiledTypeEverything.cpp",
"torch/csrc/autograd/generated/TraceType_0.cpp",
"torch/csrc/autograd/generated/TraceType_1.cpp",
"torch/csrc/autograd/generated/TraceType_2.cpp",
Expand Down
2 changes: 0 additions & 2 deletions c10/core/DispatchKey.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -50,8 +50,6 @@ const char* toString(DispatchKey t) {
return "Autocast";
case DispatchKey::TESTING_ONLY_GenericWrapper:
return "TESTING_ONLY_GenericWrapper";
case DispatchKey::Profiler:
return "Profiler";
case DispatchKey::Named:
return "Named";
case DispatchKey::Tracer:
Expand Down
2 changes: 0 additions & 2 deletions c10/core/DispatchKey.h
Original file line number Diff line number Diff line change
Expand Up @@ -189,8 +189,6 @@ enum class DispatchKey : uint8_t {
// the bulk of this logic.
Autograd,

Profiler,

Tracer,

// Pre-autograd dispatch keys allow backends to override the autograd behavior
Expand Down
6 changes: 0 additions & 6 deletions caffe2/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -309,11 +309,6 @@ if(NOT INTERN_BUILD_MOBILE OR NOT BUILD_CAFFE2_MOBILE)
"${TORCH_SRC_DIR}/csrc/autograd/generated/VariableType_2.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/VariableType_3.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/VariableType_4.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/ProfiledType_0.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/ProfiledType_1.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/ProfiledType_2.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/ProfiledType_3.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/ProfiledType_4.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/TraceType_0.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/TraceType_1.cpp"
"${TORCH_SRC_DIR}/csrc/autograd/generated/TraceType_2.cpp"
Expand Down Expand Up @@ -371,7 +366,6 @@ if(NOT INTERN_BUILD_MOBILE OR NOT BUILD_CAFFE2_MOBILE)
"${CMAKE_BINARY_DIR}/aten/src/ATen/Declarations.yaml"
"${TOOLS_PATH}/autograd/templates/VariableType.h"
"${TOOLS_PATH}/autograd/templates/VariableType.cpp"
"${TOOLS_PATH}/autograd/templates/ProfiledType.cpp"
"${TOOLS_PATH}/autograd/templates/TraceType.cpp"
"${TOOLS_PATH}/autograd/templates/Functions.h"
"${TOOLS_PATH}/autograd/templates/Functions.cpp"
Expand Down
7 changes: 0 additions & 7 deletions test/cpp/jit/test_misc.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -821,8 +821,6 @@ void checkScopeCallbacks() {
}

void testRecordFunction() {
// enable observers
c10::impl::IncludeDispatchKeyGuard observer_guard(c10::DispatchKey::Profiler);
// disabling the inlining of method calls
GraphOptimizerEnabledGuard opt_guard(false);

Expand Down Expand Up @@ -1016,8 +1014,6 @@ void testRecordFunction() {
ids.clear();

auto th = std::thread([&ids]() {
c10::impl::IncludeDispatchKeyGuard observer_guard(
c10::DispatchKey::Profiler);
addThreadLocalCallback(RecordFunctionCallback(
[&ids](const RecordFunction& fn) { ids.push_back(2); },
[](const RecordFunction&) {}));
Expand Down Expand Up @@ -1128,9 +1124,6 @@ void checkDebugInfo(c10::DebugInfoKind kind, int model_id) {
}

void testThreadLocalDebugInfo() {
// enable observers
c10::impl::IncludeDispatchKeyGuard observer_guard(c10::DispatchKey::Profiler);

TORCH_CHECK(
c10::ThreadLocalDebugInfo::get(c10::DebugInfoKind::TEST_INFO) == nullptr);
auto debug_info = std::make_shared<TestThreadLocalDebugInfo>();
Expand Down
90 changes: 0 additions & 90 deletions tools/autograd/gen_variable_type.py
Original file line number Diff line number Diff line change
Expand Up @@ -364,23 +364,6 @@
${return_type} ${api_name}(${declaration_formals}); // {"schema": "${schema_string}", "compound": "${compound}"}
""")

# TODO(iliacher): remove Profile wrappers
# ProfiledType templates
# See NOTE[UnboxedOnly] in function_wrapper.py
UNBOXED_PROFILE_DISPATCH = CodeTemplate("""\
static auto op = c10::Dispatcher::singleton()
.findSchemaOrThrow("aten::${operator_name}", "${overload_name}")
.typed<${return_type} (${profiled_arg_types})>();
return c10::Dispatcher::singleton().redispatch<${profiled_ret_and_arg_types}>(${profiled_dispatch_args});
""")
PROFILE_DISPATCH = CodeTemplate("""\
static auto op = c10::Dispatcher::singleton()
.findSchemaOrThrow("aten::${operator_name}", "${overload_name}")
.typed<${return_type} (${profiled_arg_types})>();
return c10::Dispatcher::singleton().redispatch<${profiled_ret_and_arg_types}>(${profiled_dispatch_args});
""")


# TraceType templates
# TODO: change `redispatch` to `NoTracerDispatchMode` + regular `call`.
# See NOTE[UnboxedOnly] in function_wrapper.py
Expand Down Expand Up @@ -678,14 +661,11 @@ def gen_variable_type(out, aten_declarations, template_path):
def gen_variable_type_shard(out, aten_declarations, template_path, suffix, header):
VARIABLE_TYPE_H = CodeTemplate.from_file(template_path + '/VariableType.h')
VARIABLE_TYPE_CPP = CodeTemplate.from_file(template_path + '/VariableType.cpp')
PROFILED_TYPE_CPP = CodeTemplate.from_file(template_path + '/ProfiledType.cpp')
TRACE_TYPE_CPP = CodeTemplate.from_file(template_path + '/TraceType.cpp')

type_declarations = []
type_definitions = []
wrapper_registrations = []
profiled_method_definitions = []
profiled_wrapper_registrations = []
trace_method_definitions = []
trace_wrapper_registrations = []

Expand All @@ -708,19 +688,6 @@ def gen_variable_type_shard(out, aten_declarations, template_path, suffix, heade
# See Note [Manual catchAll kernels]
assert (declaration['name'] in MANUAL_CATCHALL) == declaration['manual_kernel_registration']

# Emit ProfiledType code
profiled_body = emit_profiled_body(declaration)
profiled_method_definitions.append(METHOD_DEFINITION.substitute(
declaration, type_definition_body=profiled_body))

if declaration['use_c10_dispatcher'] == 'full':
profiled_wrapper_registrations.append(WRAPPER_REGISTRATION.substitute(
declaration, class_type='ProfiledType'))
else:
assert declaration['use_c10_dispatcher'] == 'with_codegenerated_unboxing_wrapper'
profiled_wrapper_registrations.append(UNBOXEDONLY_WRAPPER_REGISTRATION.substitute(
declaration, class_type='ProfiledType'))

# Emit TraceType code
if declaration['name'] not in MANUAL_TRACER:
trace_body = emit_trace_body(declaration)
Expand All @@ -738,73 +705,16 @@ def gen_variable_type_shard(out, aten_declarations, template_path, suffix, heade
'type_derived_method_declarations': type_declarations,
'type_derived_method_definitions': type_definitions,
'wrapper_registrations': wrapper_registrations,
'profiled_method_definitions': profiled_method_definitions,
'profiled_wrapper_registrations': profiled_wrapper_registrations,
'trace_method_definitions': trace_method_definitions,
'trace_wrapper_registrations': trace_wrapper_registrations,
}
if header:
write(out, 'VariableType.h', VARIABLE_TYPE_H, env)
else:
write(out, 'VariableType%s.cpp' % suffix, VARIABLE_TYPE_CPP, env)
write(out, 'ProfiledType%s.cpp' % suffix, PROFILED_TYPE_CPP, env)
write(out, 'TraceType%s.cpp' % suffix, TRACE_TYPE_CPP, env)


def emit_profiled_body(declaration):
arguments = declaration['arguments']
returns = declaration['returns']
func = declaration['derivative']
name = declaration['name']
inplace = declaration['inplace']
is_out_fn = name.endswith('_out')
modifies_arguments = inplace or is_out_fn
returns_void = len(returns) == 0

processed_args = []
for a in arguments:
processed_args.append('{}'.format(a['name']))

arg_types = ', '.join([a['type'] for a in declaration['arguments']])
ret_and_arg_types = ', '.join([declaration['return_type']] + [a['type'] for a in declaration['arguments']])
schema_order_arg_types = ', '.join([a['type'] for a in declaration['schema_order_arguments']])
schema_order_ret_and_arg_types = ', '.join(
[declaration['return_type']] + [a['type'] for a in declaration['schema_order_arguments']])

def check_record_function_input_type(simple_type):
return simple_type in ['Tensor', 'Scalar']

def record_function_input_names():
return ', '.join([
arg['name'] for arg in declaration['arguments']
if check_record_function_input_type(arg['simple_type'])])

profiled_dispatch_args = ['op', 'c10::DispatchKey::Profiler'] + declaration['args']
schema_order_profiled_dispatch_args = ['op', 'c10::DispatchKey::Profiler'] + declaration['schema_order_args']

if declaration['use_c10_dispatcher'] == 'full':
profiled_arg_types = schema_order_arg_types
profiled_ret_and_arg_types = schema_order_ret_and_arg_types
profiled_dispatch_args = schema_order_profiled_dispatch_args
else:
assert declaration['use_c10_dispatcher'] == 'with_codegenerated_unboxing_wrapper'
profiled_arg_types = arg_types
profiled_ret_and_arg_types = ret_and_arg_types
profiled_dispatch_args = profiled_dispatch_args

call = PROFILE_DISPATCH.substitute(
declaration,
name=name,
input_names=record_function_input_names(),
return_type=declaration['return_type'],
profiled_arg_types=profiled_arg_types,
profiled_ret_and_arg_types=profiled_ret_and_arg_types,
profiled_dispatch_args=profiled_dispatch_args,
)

return [call]


def emit_trace_body(declaration):
returns = declaration['returns']
name = declaration['name']
Expand Down
36 changes: 0 additions & 36 deletions tools/autograd/templates/ProfiledType.cpp

This file was deleted.

10 changes: 0 additions & 10 deletions tools/build_variables.bzl
Original file line number Diff line number Diff line change
Expand Up @@ -10,11 +10,6 @@ GENERATED_CPP = [
"jit/generated/generated_unboxing_wrappers_0.cpp",
"jit/generated/generated_unboxing_wrappers_1.cpp",
"jit/generated/generated_unboxing_wrappers_2.cpp",
"autograd/generated/ProfiledType_0.cpp",
"autograd/generated/ProfiledType_1.cpp",
"autograd/generated/ProfiledType_2.cpp",
"autograd/generated/ProfiledType_3.cpp",
"autograd/generated/ProfiledType_4.cpp",
"autograd/generated/TraceType_0.cpp",
"autograd/generated/TraceType_1.cpp",
"autograd/generated/TraceType_2.cpp",
Expand All @@ -38,11 +33,6 @@ def libtorch_generated_sources(gencode_pattern):
"autograd/generated/VariableType_2.cpp",
"autograd/generated/VariableType_3.cpp",
"autograd/generated/VariableType_4.cpp",
"autograd/generated/ProfiledType_0.cpp",
"autograd/generated/ProfiledType_1.cpp",
"autograd/generated/ProfiledType_2.cpp",
"autograd/generated/ProfiledType_3.cpp",
"autograd/generated/ProfiledType_4.cpp",
"autograd/generated/TraceType_0.cpp",
"autograd/generated/TraceType_1.cpp",
"autograd/generated/TraceType_2.cpp",
Expand Down
9 changes: 0 additions & 9 deletions torch/csrc/autograd/profiler.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -663,12 +663,3 @@ void RecordProfile::processEvents(const std::vector<Event*>& events) {
}

}}}

void profile_wrapper(const c10::OperatorHandle& op, torch::jit::Stack* stack) {
c10::impl::ExcludeDispatchKeyGuard key_guard(c10::DispatchKey::Profiler);
op.callBoxed(stack);
}

TORCH_LIBRARY_IMPL(_, Profiler, m) {
m.fallback(torch::CppFunction::makeFromBoxedFunction<&profile_wrapper>());
}
6 changes: 0 additions & 6 deletions torch/csrc/jit/passes/onnx/unpack_quantized_weights.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -205,12 +205,6 @@ void unpackQuantizedWeightsHelper(
.findSchemaOrThrow(unpack_fn.c_str(), "")
.typed<std::tuple<at::Tensor, c10::optional<at::Tensor>>(
at::Tensor)>();
// Temporary hack: when the `Profiler` dispatch key is inserted, this call
// will fail since the `unpack()` ops return multiple values, however the
// boxing code currently does not support this. Instead, exclude the
// Profiler dispatch key and go through unboxed dispatch, avoiding boxing
// altogether
c10::impl::ExcludeDispatchKeyGuard key_guard(c10::DispatchKey::Profiler);
std::tie(unpacked_weight, bias) = op.call(packed_weight);
}

Expand Down

0 comments on commit a53fdaa

Please sign in to comment.