in project.py [0:0]
def _RemoteFetch(self, name=None,
current_branch_only=False,
initial=False,
quiet=False,
verbose=False,
output_redir=None,
alt_dir=None,
tags=True,
prune=False,
depth=None,
submodules=False,
ssh_proxy=None,
force_sync=False,
clone_filter=None,
retry_fetches=2,
retry_sleep_initial_sec=4.0,
retry_exp_factor=2.0):
is_sha1 = False
tag_name = None
# The depth should not be used when fetching to a mirror because
# it will result in a shallow repository that cannot be cloned or
# fetched from.
# The repo project should also never be synced with partial depth.
if self.manifest.IsMirror or self.relpath == '.repo/repo':
depth = None
if depth:
current_branch_only = True
if ID_RE.match(self.revisionExpr) is not None:
is_sha1 = True
if current_branch_only:
if self.revisionExpr.startswith(R_TAGS):
# This is a tag and its commit id should never change.
tag_name = self.revisionExpr[len(R_TAGS):]
elif self.upstream and self.upstream.startswith(R_TAGS):
# This is a tag and its commit id should never change.
tag_name = self.upstream[len(R_TAGS):]
if is_sha1 or tag_name is not None:
if self._CheckForImmutableRevision():
if verbose:
print('Skipped fetching project %s (already have persistent ref)'
% self.name)
return True
if is_sha1 and not depth:
# When syncing a specific commit and --depth is not set:
# * if upstream is explicitly specified and is not a sha1, fetch only
# upstream as users expect only upstream to be fetch.
# Note: The commit might not be in upstream in which case the sync
# will fail.
# * otherwise, fetch all branches to make sure we end up with the
# specific commit.
if self.upstream:
current_branch_only = not ID_RE.match(self.upstream)
else:
current_branch_only = False
if not name:
name = self.remote.name
remote = self.GetRemote(name)
if not remote.PreConnectFetch(ssh_proxy):
ssh_proxy = None
if initial:
if alt_dir and 'objects' == os.path.basename(alt_dir):
ref_dir = os.path.dirname(alt_dir)
packed_refs = os.path.join(self.gitdir, 'packed-refs')
all_refs = self.bare_ref.all
ids = set(all_refs.values())
tmp = set()
for r, ref_id in GitRefs(ref_dir).all.items():
if r not in all_refs:
if r.startswith(R_TAGS) or remote.WritesTo(r):
all_refs[r] = ref_id
ids.add(ref_id)
continue
if ref_id in ids:
continue
r = 'refs/_alt/%s' % ref_id
all_refs[r] = ref_id
ids.add(ref_id)
tmp.add(r)
tmp_packed_lines = []
old_packed_lines = []
for r in sorted(all_refs):
line = '%s %s\n' % (all_refs[r], r)
tmp_packed_lines.append(line)
if r not in tmp:
old_packed_lines.append(line)
tmp_packed = ''.join(tmp_packed_lines)
old_packed = ''.join(old_packed_lines)
_lwrite(packed_refs, tmp_packed)
else:
alt_dir = None
cmd = ['fetch']
if clone_filter:
git_require((2, 19, 0), fail=True, msg='partial clones')
cmd.append('--filter=%s' % clone_filter)
self.EnableRepositoryExtension('partialclone', self.remote.name)
if depth:
cmd.append('--depth=%s' % depth)
else:
# If this repo has shallow objects, then we don't know which refs have
# shallow objects or not. Tell git to unshallow all fetched refs. Don't
# do this with projects that don't have shallow objects, since it is less
# efficient.
if os.path.exists(os.path.join(self.gitdir, 'shallow')):
cmd.append('--depth=2147483647')
if not verbose:
cmd.append('--quiet')
if not quiet and sys.stdout.isatty():
cmd.append('--progress')
if not self.worktree:
cmd.append('--update-head-ok')
cmd.append(name)
if force_sync:
cmd.append('--force')
if prune:
cmd.append('--prune')
if submodules:
cmd.append('--recurse-submodules=on-demand')
spec = []
if not current_branch_only:
# Fetch whole repo
spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
elif tag_name is not None:
spec.append('tag')
spec.append(tag_name)
if self.manifest.IsMirror and not current_branch_only:
branch = None
else:
branch = self.revisionExpr
if (not self.manifest.IsMirror and is_sha1 and depth
and git_require((1, 8, 3))):
# Shallow checkout of a specific commit, fetch from that commit and not
# the heads only as the commit might be deeper in the history.
spec.append(branch)
if self.upstream:
spec.append(self.upstream)
else:
if is_sha1:
branch = self.upstream
if branch is not None and branch.strip():
if not branch.startswith('refs/'):
branch = R_HEADS + branch
spec.append(str((u'+%s:' % branch) + remote.ToLocal(branch)))
# If mirroring repo and we cannot deduce the tag or branch to fetch, fetch
# whole repo.
if self.manifest.IsMirror and not spec:
spec.append(str((u'+refs/heads/*:') + remote.ToLocal('refs/heads/*')))
# If using depth then we should not get all the tags since they may
# be outside of the depth.
if not tags or depth:
cmd.append('--no-tags')
else:
cmd.append('--tags')
spec.append(str((u'+refs/tags/*:') + remote.ToLocal('refs/tags/*')))
cmd.extend(spec)
# At least one retry minimum due to git remote prune.
retry_fetches = max(retry_fetches, 2)
retry_cur_sleep = retry_sleep_initial_sec
ok = prune_tried = False
for try_n in range(retry_fetches):
gitcmd = GitCommand(
self, cmd, bare=True, objdir=os.path.join(self.objdir, 'objects'),
ssh_proxy=ssh_proxy,
merge_output=True, capture_stdout=quiet or bool(output_redir))
if gitcmd.stdout and not quiet and output_redir:
output_redir.write(gitcmd.stdout)
ret = gitcmd.Wait()
if ret == 0:
ok = True
break
# Retry later due to HTTP 429 Too Many Requests.
elif (gitcmd.stdout and
'error:' in gitcmd.stdout and
'HTTP 429' in gitcmd.stdout):
# Fallthru to sleep+retry logic at the bottom.
pass
# Try to prune remote branches once in case there are conflicts.
# For example, if the remote had refs/heads/upstream, but deleted that and
# now has refs/heads/upstream/foo.
elif (gitcmd.stdout and
'error:' in gitcmd.stdout and
'git remote prune' in gitcmd.stdout and
not prune_tried):
prune_tried = True
prunecmd = GitCommand(self, ['remote', 'prune', name], bare=True,
ssh_proxy=ssh_proxy)
ret = prunecmd.Wait()
if ret:
break
print('retrying fetch after pruning remote branches', file=output_redir)
# Continue right away so we don't sleep as we shouldn't need to.
continue
elif current_branch_only and is_sha1 and ret == 128:
# Exit code 128 means "couldn't find the ref you asked for"; if we're
# in sha1 mode, we just tried sync'ing from the upstream field; it
# doesn't exist, thus abort the optimization attempt and do a full sync.
break
elif ret < 0:
# Git died with a signal, exit immediately
break
# Figure out how long to sleep before the next attempt, if there is one.
if not verbose and gitcmd.stdout:
print('\n%s:\n%s' % (self.name, gitcmd.stdout), end='', file=output_redir)
if try_n < retry_fetches - 1:
print('%s: sleeping %s seconds before retrying' % (self.name, retry_cur_sleep),
file=output_redir)
time.sleep(retry_cur_sleep)
retry_cur_sleep = min(retry_exp_factor * retry_cur_sleep,
MAXIMUM_RETRY_SLEEP_SEC)
retry_cur_sleep *= (1 - random.uniform(-RETRY_JITTER_PERCENT,
RETRY_JITTER_PERCENT))
if initial:
if alt_dir:
if old_packed != '':
_lwrite(packed_refs, old_packed)
else:
platform_utils.remove(packed_refs)
self.bare_git.pack_refs('--all', '--prune')
if is_sha1 and current_branch_only:
# We just synced the upstream given branch; verify we
# got what we wanted, else trigger a second run of all
# refs.
if not self._CheckForImmutableRevision():
# Sync the current branch only with depth set to None.
# We always pass depth=None down to avoid infinite recursion.
return self._RemoteFetch(
name=name, quiet=quiet, verbose=verbose, output_redir=output_redir,
current_branch_only=current_branch_only and depth,
initial=False, alt_dir=alt_dir,
depth=None, ssh_proxy=ssh_proxy, clone_filter=clone_filter)
return ok