diff --git a/dvc/repo/fetch.py b/dvc/repo/fetch.py index d63bec83aa..52d6f97444 100644 --- a/dvc/repo/fetch.py +++ b/dvc/repo/fetch.py @@ -59,10 +59,9 @@ def _collect_indexes( # noqa: PLR0913 types=types, ) - data = idx.data["repo"] - data.onerror = _make_index_onerror(onerror, rev) + idx.data["repo"].onerror = _make_index_onerror(onerror, rev) - indexes[idx.data_tree.hash_info.value] = data + indexes[rev or "workspace"] = idx except Exception as exc: # pylint: disable=broad-except if onerror: onerror(rev, None, exc) @@ -138,7 +137,10 @@ def fetch( # noqa: C901, PLR0913 onerror=onerror, ) - cache_key = ("fetch", tokenize(sorted(indexes.keys()))) + cache_key = ( + "fetch", + tokenize(sorted(idx.data_tree.hash_info.value for idx in indexes.values())), + ) with ui.progress( desc="Collecting", @@ -146,7 +148,7 @@ def fetch( # noqa: C901, PLR0913 leave=True, ) as pb: data = collect( - indexes.values(), + [idx.data["repo"] for idx in indexes.values()], "remote", cache_index=self.data_index, cache_key=cache_key, diff --git a/dvc/repo/push.py b/dvc/repo/push.py index 1323be142b..a76c9d9e2b 100644 --- a/dvc/repo/push.py +++ b/dvc/repo/push.py @@ -55,14 +55,17 @@ def push( # noqa: C901, PLR0913 revs=revs, ) - cache_key = ("fetch", tokenize(sorted(indexes.keys()))) + cache_key = ( + "fetch", + tokenize(sorted(idx.data_tree.hash_info.value for idx in indexes.values())), + ) with Callback.as_tqdm_callback( desc="Collecting", unit="entry", ) as cb: data = collect( - indexes.values(), + [idx.data["repo"] for idx in indexes.values()], "remote", cache_index=self.data_index, cache_key=cache_key, @@ -82,6 +85,25 @@ def push( # noqa: C901, PLR0913 callback=cb, ) # pylint: disable=assignment-from-no-return finally: + ws_idx = indexes.get("workspace") + if ws_idx is not None: + from dvc.repo.worktree import _merge_push_meta + + stages = set() + for out in ws_idx.outs: + if ( + not ws_idx.data["repo"] + .storage_map[out.index_key[1]] + .remote.fs.version_aware + ): + continue + + _merge_push_meta(out, ws_idx.data["repo"]) + stages.add(out.stage) + + for stage in stages: + stage.dump(with_files=True, update_pipeline=False) + for fs_index in data: fs_index.close()