Skip to content

Commit

Permalink
fetch: use index fetch
Browse files Browse the repository at this point in the history
  • Loading branch information
efiop committed May 8, 2023
1 parent 734ade3 commit 6fc5a5b
Show file tree
Hide file tree
Showing 2 changed files with 12 additions and 134 deletions.
144 changes: 11 additions & 133 deletions dvc/repo/fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,16 +47,11 @@ def fetch( # noqa: C901, PLR0913
"""
from dvc.repo.imports import save_imports
from dvc_data.hashfile.transfer import TransferResult
from dvc_data.index.fetch import fetch

if isinstance(targets, str):
targets = [targets]

worktree_remote: Optional["Remote"] = None
with suppress(NoRemoteError):
_remote = self.cloud.get_remote(name=remote)
if _remote.worktree or _remote.fs.version_aware:
worktree_remote = _remote

failed_count = 0
transferred_count = 0

Expand All @@ -66,133 +61,16 @@ def fetch( # noqa: C901, PLR0913
except DownloadError as exc:
failed_count += exc.amount

no_remote_msg: Optional[str] = None
result = TransferResult(set(), set())
try:
if worktree_remote is not None:
transferred_count += _fetch_worktree(
self,
worktree_remote,
revs=revs,
all_branches=all_branches,
all_tags=all_tags,
all_commits=all_commits,
targets=targets,
jobs=jobs,
with_deps=with_deps,
recursive=recursive,
)
else:
d, f = _fetch(
self,
def _indexes():
for _ in self.brancher(
revs=revs,
all_branches=all_branches,
all_tags=all_tags,
all_commits=all_commits,
):
yield self.index.targets_view(
targets,
all_branches=all_branches,
all_tags=all_tags,
all_commits=all_commits,
with_deps=with_deps,
force=True,
remote=remote,
jobs=jobs,
recursive=recursive,
revs=revs,
odb=odb,
)
result.transferred.update(d)
result.failed.update(f)
except NoRemoteError as exc:
no_remote_msg = str(exc)

for rev in self.brancher(
revs=revs,
all_branches=all_branches,
all_tags=all_tags,
all_commits=all_commits,
):
imported = save_imports(
self,
targets,
unpartial=not rev or rev == "workspace",
recursive=recursive,
)
result.transferred.update(imported)
result.failed.difference_update(imported)

failed_count += len(result.failed)

if failed_count:
if no_remote_msg:
logger.error(no_remote_msg)
raise DownloadError(failed_count)

transferred_count += len(result.transferred)
return transferred_count


def _fetch(
repo: "Repo",
targets: "TargetType",
remote: Optional[str] = None,
jobs: Optional[int] = None,
odb: Optional["HashFileDB"] = None,
**kwargs,
) -> "TransferResult":
from dvc_data.hashfile.transfer import TransferResult

result = TransferResult(set(), set())
used = repo.used_objs(
targets,
remote=remote,
jobs=jobs,
**kwargs,
)
if odb:
all_ids = set()
for _odb, obj_ids in used.items():
all_ids.update(obj_ids)
d, f = repo.cloud.pull(
all_ids,
jobs=jobs,
remote=remote,
odb=odb,
)
result.transferred.update(d)
result.failed.update(f)
else:
for src_odb, obj_ids in sorted(
used.items(),
key=lambda item: item[0] is not None
and item[0].fs.protocol == Schemes.MEMORY,
):
d, f = repo.cloud.pull(
obj_ids,
jobs=jobs,
remote=remote,
odb=src_odb,
)
result.transferred.update(d)
result.failed.update(f)
return result


def _fetch_worktree(
repo: "Repo",
remote: "Remote",
revs: Optional[Sequence[str]] = None,
all_branches: bool = False,
all_tags: bool = False,
all_commits: bool = False,
targets: Optional["TargetType"] = None,
jobs: Optional[int] = None,
**kwargs,
) -> int:
from dvc.repo.worktree import fetch_worktree
).data["repo"]

downloaded = 0
for _ in repo.brancher(
revs=revs,
all_branches=all_branches,
all_tags=all_tags,
all_commits=all_commits,
):
downloaded += fetch_worktree(repo, remote, targets=targets, jobs=jobs, **kwargs)
return downloaded
fetch(_indexes())
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ dependencies = [
"configobj>=5.0.6",
"distro>=1.3",
"dpath<3,>=2.1.0",
"dvc-data>=0.47.1,<0.48",
"dvc-data>=0.48.0,<0.49",
"dvc-http>=2.29.0",
"dvc-render>=0.3.1,<1",
"dvc-studio-client>=0.8.0,<1",
Expand Down

0 comments on commit 6fc5a5b

Please sign in to comment.