Skip to content

Commit

Permalink
RuntimeError: Can't call main_program when full_graph=False. Use padd…
Browse files Browse the repository at this point in the history
…le.jit.to_static(full_graph=True) instead. (lyuwenyu#397)

File "/git/RT-DETR/rtdetr_paddle/ppdet/engine/trainer.py", line 937, in _get_infer_cfg_and_input_spec
    input_spec, static_model.forward.main_program,
  File "/miniconda3/envs/rtdetrpaddle/lib/python3.10/site-packages/paddle/jit/dy2static/program_translator.py", line 757, in main_program
    raise_error_template("main_program")()
  File "/miniconda3/envs/rtdetrpaddle/lib/python3.10/site-packages/paddle/jit/dy2static/program_translator.py", line 691, in _raise_error
    raise RuntimeError(error_template.format(func=func_str))
RuntimeError: Can't call main_program when full_graph=False. Use paddle.jit.to_static(full_graph=True) instead.
  • Loading branch information
Pecako2001 authored Jul 30, 2024
1 parent 6b36128 commit 2c65ad5
Showing 1 changed file with 10 additions and 10 deletions.
20 changes: 10 additions & 10 deletions rtdetr_paddle/ppdet/engine/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -883,24 +883,24 @@ def _get_infer_cfg_and_input_spec(self,
# set image_shape=[None, 3, -1, -1] as default
if image_shape is None:
image_shape = [None, 3, -1, -1]

if len(image_shape) == 3:
image_shape = [None] + image_shape
else:
im_shape = [image_shape[0], 2]
scale_factor = [image_shape[0], 2]

if hasattr(self.model, 'deploy'):
self.model.deploy = True

for layer in self.model.sublayers():
if hasattr(layer, 'convert_to_deploy'):
layer.convert_to_deploy()

if hasattr(self.cfg, 'export') and 'fuse_conv_bn' in self.cfg[
'export'] and self.cfg['export']['fuse_conv_bn']:
self.model = fuse_conv_bn(self.model)

export_post_process = self.cfg['export'].get(
'post_process', False) if hasattr(self.cfg, 'export') else True
export_nms = self.cfg['export'].get('nms', False) if hasattr(
Expand All @@ -913,12 +913,12 @@ def _get_infer_cfg_and_input_spec(self,
self.model.export_nms = export_nms if not export_benchmark else False
if export_post_process and not export_benchmark:
image_shape = [None] + image_shape[1:]

# Save infer cfg
_dump_infer_config(self.cfg,
os.path.join(save_dir, 'infer_cfg.yml'), image_shape,
self.model)

input_spec = [{
"image": InputSpec(
shape=image_shape, name='image'),
Expand All @@ -927,10 +927,10 @@ def _get_infer_cfg_and_input_spec(self,
"scale_factor": InputSpec(
shape=scale_factor, name='scale_factor')
}]

if prune_input:
static_model = paddle.jit.to_static(
self.model, input_spec=input_spec)
self.model, input_spec=input_spec, full_graph=True)
# NOTE: dy2st do not pruned program, but jit.save will prune program
# input spec, prune input spec here and save with pruned input spec
pruned_input_spec = _prune_input_spec(
Expand All @@ -939,7 +939,7 @@ def _get_infer_cfg_and_input_spec(self,
else:
static_model = None
pruned_input_spec = input_spec

return static_model, pruned_input_spec

def export(self, output_dir='output_inference'):
Expand Down

0 comments on commit 2c65ad5

Please sign in to comment.