diff --git a/intrinsic_compositing/albedo/pipeline.py b/intrinsic_compositing/albedo/pipeline.py index 956ca2f..652aa3c 100644 --- a/intrinsic_compositing/albedo/pipeline.py +++ b/intrinsic_compositing/albedo/pipeline.py @@ -11,7 +11,7 @@ from intrinsic_composite.albedo.model.editingnetwork_trainer import EditingNetworkTrainer -PAPER_WEIGHTS_URL = '' +PAPER_WEIGHTS_URL = 'https://github.com/compphoto/IntrinsicCompositing/releases/download/1.0.0/albedo_paper_weights.pth' CACHE_PATH = torch.hub.get_dir() diff --git a/intrinsic_compositing/shading/pipeline.py b/intrinsic_compositing/shading/pipeline.py index 283fbf5..0c9d412 100644 --- a/intrinsic_compositing/shading/pipeline.py +++ b/intrinsic_compositing/shading/pipeline.py @@ -12,7 +12,9 @@ def load_reshading_model(path, device='cuda'): if path == 'paper_weights': - state_dict = torch.hub.load_state_dict_from_url('', map_location=device, progress=True) + state_dict = torch.hub.load_state_dict_from_url('https://github.com/compphoto/IntrinsicCompositing/releases/download/1.0.0/shading_paper_weights.pt', map_location=device, progress=True) + if path == 'further_trained' + state_dict = torch.hub.load_state_dict_from_url('https://github.com/compphoto/IntrinsicCompositing/releases/download/1.0.0/further_trained.pt', map_location=device, progress=True) else: state_dict = torch.load(path)