Skip to content

Commit

Permalink
cherry pick #3193: add req_full_compilation_arg from main to release/…
Browse files Browse the repository at this point in the history
…2.5 (#3213)

Co-authored-by: apbose <[email protected]>
  • Loading branch information
lanluo-nvidia and apbose authored Oct 8, 2024
1 parent 2468a42 commit 348d21a
Show file tree
Hide file tree
Showing 2 changed files with 6 additions and 0 deletions.
2 changes: 2 additions & 0 deletions py/torch_tensorrt/dynamo/_compiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -375,6 +375,7 @@ def contains_metadata(gm: torch.fx.GraphModule) -> bool:
verbose=settings.debug,
min_block_size=settings.min_block_size,
torch_executed_ops=settings.torch_executed_ops,
require_full_compilation=settings.require_full_compilation,
)
except torch.fx.passes.splitter_base.FxNetSplitterInternalError:
logger.error(
Expand All @@ -393,6 +394,7 @@ def contains_metadata(gm: torch.fx.GraphModule) -> bool:
verbose=settings.debug,
min_block_size=settings.min_block_size,
torch_executed_ops=settings.torch_executed_ops,
require_full_compilation=settings.require_full_compilation,
)

dryrun_tracker.unsupported_ops = supported_ops.unsupported_operators
Expand Down
4 changes: 4 additions & 0 deletions py/torch_tensorrt/dynamo/backend/backends.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,10 @@ def _pretraced_backend(
torchtrt_inputs = prepare_inputs(
torch_inputs, disable_memory_format_check=True
)
if settings.require_full_compilation:
logger.warning(
"require_full_compilation arg is not applicable for torch.compile with backend='torch_tensorrt"
)
trt_compiled = compile_module(
gm,
torchtrt_inputs,
Expand Down

0 comments on commit 348d21a

Please sign in to comment.