blob: 726e6d079bae1d3e251dbab48a0cffcb66ceb8da [file] [log] [blame]
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -07001# Copyright (C) 2008 The Android Open Source Project
2#
3# Licensed under the Apache License, Version 2.0 (the "License");
4# you may not use this file except in compliance with the License.
5# You may obtain a copy of the License at
6#
7# http://www.apache.org/licenses/LICENSE-2.0
8#
9# Unless required by applicable law or agreed to in writing, software
10# distributed under the License is distributed on an "AS IS" BASIS,
11# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12# See the License for the specific language governing permissions and
13# limitations under the License.
14
LaMont Jonesbdcba7d2022-04-11 22:50:11 +000015import collections
Gavin Mak7b6ffed2025-06-13 17:53:38 -070016import contextlib
Mike Frysingerebf04a42021-02-23 20:48:04 -050017import functools
Mike Frysingeracf63b22019-06-13 02:24:21 -040018import http.cookiejar as cookielib
Mike Frysinger7b586f22021-02-23 18:38:39 -050019import io
Anthony King85b24ac2014-05-06 15:57:48 +010020import json
Mike Frysingerebf04a42021-02-23 20:48:04 -050021import multiprocessing
David Pursehouse86d973d2012-08-24 10:21:02 +090022import netrc
Mike Frysinger06ddc8c2023-08-21 21:26:51 -040023import optparse
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -070024import os
Josip Sokcevic55545722024-02-22 16:38:00 -080025from pathlib import Path
Jason Changdaf2ad32023-08-31 17:06:36 -070026import sys
Dan Willemsen0745bb22015-08-17 13:41:45 -070027import tempfile
Shawn O. Pearcef6906872009-04-18 10:49:00 -070028import time
Gavin Makdf3c4012025-06-17 19:40:06 -070029from typing import List, NamedTuple, Optional, Set, Tuple, Union
Mike Frysingeracf63b22019-06-13 02:24:21 -040030import urllib.error
31import urllib.parse
32import urllib.request
Mike Frysinger5951e302022-05-20 23:34:44 -040033import xml.parsers.expat
Mike Frysingeracf63b22019-06-13 02:24:21 -040034import xmlrpc.client
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -070035
Mike Frysinger64477332023-08-21 21:20:32 -040036
Roy Lee18afd7f2010-05-09 04:32:08 +080037try:
Gavin Makea2e3302023-03-11 06:46:20 +000038 import threading as _threading
Roy Lee18afd7f2010-05-09 04:32:08 +080039except ImportError:
Gavin Makea2e3302023-03-11 06:46:20 +000040 import dummy_threading as _threading
Roy Lee18afd7f2010-05-09 04:32:08 +080041
Shawn O. Pearce97d2b2f2011-09-22 17:23:41 -070042try:
Gavin Makea2e3302023-03-11 06:46:20 +000043 import resource
David Pursehouse819827a2020-02-12 15:20:19 +090044
Gavin Makea2e3302023-03-11 06:46:20 +000045 def _rlimit_nofile():
46 return resource.getrlimit(resource.RLIMIT_NOFILE)
47
Shawn O. Pearce97d2b2f2011-09-22 17:23:41 -070048except ImportError:
Gavin Makea2e3302023-03-11 06:46:20 +000049
50 def _rlimit_nofile():
51 return (256, 256)
52
Shawn O. Pearce97d2b2f2011-09-22 17:23:41 -070053
Mike Frysinger64477332023-08-21 21:20:32 -040054from command import Command
55from command import DEFAULT_LOCAL_JOBS
56from command import MirrorSafeCommand
57from command import WORKER_BATCH_SIZE
58from error import GitError
59from error import RepoChangedException
Jason Changdaf2ad32023-08-31 17:06:36 -070060from error import RepoError
Mike Frysinger64477332023-08-21 21:20:32 -040061from error import RepoExitError
62from error import RepoUnhandledExceptionError
63from error import SyncError
64from error import UpdateManifestError
David Rileye0684ad2017-04-05 00:02:59 -070065import event_log
Mike Frysinger347f9ed2021-03-15 14:58:52 -040066from git_command import git_require
David Pursehouseba7bc732015-08-20 16:55:42 +090067from git_config import GetUrlCookieFile
Mike Frysinger64477332023-08-21 21:20:32 -040068from git_refs import HEAD
69from git_refs import R_HEADS
Raman Tenneti6a872c92021-01-14 19:17:50 -080070import git_superproject
Kenny Cheng82d500e2025-06-02 21:55:04 +080071from hooks import RepoHook
Mike Frysinger64477332023-08-21 21:20:32 -040072import platform_utils
73from progress import elapsed_str
74from progress import jobs_str
75from progress import Progress
76from project import DeleteWorktreeError
Jaikumar Ganesh4f2517f2009-06-01 21:10:33 -070077from project import Project
78from project import RemoteSpec
Mike Frysinger64477332023-08-21 21:20:32 -040079from project import SyncBuffer
Aravind Vasudevane914ec22023-08-31 20:57:31 +000080from repo_logging import RepoLogger
Joanna Wanga6c52f52022-11-03 16:51:19 -040081from repo_trace import Trace
Mike Frysinger19e409c2021-05-05 19:44:35 -040082import ssh
Conley Owens094cdbe2014-01-30 15:09:59 -080083from wrapper import Wrapper
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -070084
Mike Frysinger64477332023-08-21 21:20:32 -040085
Dave Borowitz67700e92012-10-23 15:00:54 -070086_ONE_DAY_S = 24 * 60 * 60
87
Jason Chang17833322023-05-23 13:06:55 -070088_REPO_ALLOW_SHALLOW = os.environ.get("REPO_ALLOW_SHALLOW")
89
Gavin Makb5991d72025-12-09 22:29:43 +000090_BLOAT_PACK_COUNT_THRESHOLD = 10
91_BLOAT_SIZE_PACK_THRESHOLD_KB = 10 * 1024 * 1024 # 10 GiB in KiB
92_BLOAT_SIZE_GARBAGE_THRESHOLD_KB = 1 * 1024 * 1024 # 1 GiB in KiB
93
Aravind Vasudevane914ec22023-08-31 20:57:31 +000094logger = RepoLogger(__file__)
95
David Pursehouse819827a2020-02-12 15:20:19 +090096
Josip Sokcevic55545722024-02-22 16:38:00 -080097def _SafeCheckoutOrder(checkouts: List[Project]) -> List[List[Project]]:
98 """Generate a sequence of checkouts that is safe to perform. The client
99 should checkout everything from n-th index before moving to n+1.
100
101 This is only useful if manifest contains nested projects.
102
103 E.g. if foo, foo/bar and foo/bar/baz are project paths, then foo needs to
104 finish before foo/bar can proceed, and foo/bar needs to finish before
105 foo/bar/baz."""
106 res = [[]]
107 current = res[0]
108
109 # depth_stack contains a current stack of parent paths.
110 depth_stack = []
Josip Sokcevic46790222024-03-07 22:18:58 +0000111 # Checkouts are iterated in the hierarchical order. That way, it can easily
112 # be determined if the previous checkout is parent of the current checkout.
113 # We are splitting by the path separator so the final result is
114 # hierarchical, and not just lexicographical. For example, if the projects
115 # are: foo, foo/bar, foo-bar, lexicographical order produces foo, foo-bar
116 # and foo/bar, which doesn't work.
117 for checkout in sorted(checkouts, key=lambda x: x.relpath.split("/")):
Josip Sokcevic55545722024-02-22 16:38:00 -0800118 checkout_path = Path(checkout.relpath)
119 while depth_stack:
120 try:
121 checkout_path.relative_to(depth_stack[-1])
122 except ValueError:
123 # Path.relative_to returns ValueError if paths are not relative.
124 # TODO(sokcevic): Switch to is_relative_to once min supported
125 # version is py3.9.
126 depth_stack.pop()
127 else:
128 if len(depth_stack) >= len(res):
129 # Another depth created.
130 res.append([])
131 break
132
133 current = res[len(depth_stack)]
134 current.append(checkout)
135 depth_stack.append(checkout_path)
136
137 return res
138
139
Josip Sokcevic454fdaf2024-10-07 17:33:38 +0000140def _chunksize(projects: int, jobs: int) -> int:
141 """Calculate chunk size for the given number of projects and jobs."""
142 return min(max(1, projects // jobs), WORKER_BATCH_SIZE)
143
144
LaMont Jones1eddca82022-09-01 15:15:04 +0000145class _FetchOneResult(NamedTuple):
Gavin Makea2e3302023-03-11 06:46:20 +0000146 """_FetchOne return value.
LaMont Jones1eddca82022-09-01 15:15:04 +0000147
Gavin Makea2e3302023-03-11 06:46:20 +0000148 Attributes:
149 success (bool): True if successful.
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800150 project_idx (int): The fetched project index.
Gavin Makea2e3302023-03-11 06:46:20 +0000151 start (float): The starting time.time().
152 finish (float): The ending time.time().
153 remote_fetched (bool): True if the remote was actually queried.
154 """
155
156 success: bool
Jason Chang32b59562023-07-14 16:45:35 -0700157 errors: List[Exception]
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800158 project_idx: int
Gavin Makea2e3302023-03-11 06:46:20 +0000159 start: float
160 finish: float
161 remote_fetched: bool
LaMont Jones1eddca82022-09-01 15:15:04 +0000162
163
164class _FetchResult(NamedTuple):
Gavin Makea2e3302023-03-11 06:46:20 +0000165 """_Fetch return value.
LaMont Jones1eddca82022-09-01 15:15:04 +0000166
Gavin Makea2e3302023-03-11 06:46:20 +0000167 Attributes:
168 success (bool): True if successful.
169 projects (Set[str]): The names of the git directories of fetched projects.
170 """
171
172 success: bool
173 projects: Set[str]
LaMont Jones1eddca82022-09-01 15:15:04 +0000174
175
176class _FetchMainResult(NamedTuple):
Gavin Makea2e3302023-03-11 06:46:20 +0000177 """_FetchMain return value.
LaMont Jones1eddca82022-09-01 15:15:04 +0000178
Gavin Makea2e3302023-03-11 06:46:20 +0000179 Attributes:
180 all_projects (List[Project]): The fetched projects.
181 """
182
183 all_projects: List[Project]
LaMont Jones1eddca82022-09-01 15:15:04 +0000184
185
186class _CheckoutOneResult(NamedTuple):
Gavin Makea2e3302023-03-11 06:46:20 +0000187 """_CheckoutOne return value.
LaMont Jones1eddca82022-09-01 15:15:04 +0000188
Gavin Makea2e3302023-03-11 06:46:20 +0000189 Attributes:
190 success (bool): True if successful.
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800191 project_idx (int): The project index.
Gavin Makea2e3302023-03-11 06:46:20 +0000192 start (float): The starting time.time().
193 finish (float): The ending time.time().
194 """
195
196 success: bool
Jason Chang32b59562023-07-14 16:45:35 -0700197 errors: List[Exception]
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800198 project_idx: int
Gavin Makea2e3302023-03-11 06:46:20 +0000199 start: float
200 finish: float
LaMont Jones1eddca82022-09-01 15:15:04 +0000201
202
Gavin Makb4b323a2025-06-17 10:54:41 -0700203class _SyncResult(NamedTuple):
204 """Individual project sync result for interleaved mode.
205
206 Attributes:
Gavin Mak7b6ffed2025-06-13 17:53:38 -0700207 project_index (int): The index of the project in the shared list.
Gavin Makb4b323a2025-06-17 10:54:41 -0700208 relpath (str): The project's relative path from the repo client top.
Gavin Mak7b6ffed2025-06-13 17:53:38 -0700209 remote_fetched (bool): True if the remote was actually queried.
Gavin Makb4b323a2025-06-17 10:54:41 -0700210 fetch_success (bool): True if the fetch operation was successful.
Gavin Makd534a552025-08-13 23:42:00 -0700211 fetch_errors (List[Exception]): The Exceptions from a failed fetch.
Gavin Makb4b323a2025-06-17 10:54:41 -0700212 fetch_start (Optional[float]): The time.time() when fetch started.
213 fetch_finish (Optional[float]): The time.time() when fetch finished.
Gavin Mak7b6ffed2025-06-13 17:53:38 -0700214 checkout_success (bool): True if the checkout operation was
215 successful.
Gavin Makd534a552025-08-13 23:42:00 -0700216 checkout_errors (List[Exception]): The Exceptions from a failed
217 checkout.
Gavin Makb4b323a2025-06-17 10:54:41 -0700218 checkout_start (Optional[float]): The time.time() when checkout
219 started.
220 checkout_finish (Optional[float]): The time.time() when checkout
221 finished.
Gavin Mak7b6ffed2025-06-13 17:53:38 -0700222 stderr_text (str): The combined output from both fetch and checkout.
Gavin Makb4b323a2025-06-17 10:54:41 -0700223 """
224
Gavin Mak7b6ffed2025-06-13 17:53:38 -0700225 project_index: int
Gavin Makb4b323a2025-06-17 10:54:41 -0700226 relpath: str
Gavin Makb4b323a2025-06-17 10:54:41 -0700227
Gavin Mak7b6ffed2025-06-13 17:53:38 -0700228 remote_fetched: bool
229 fetch_success: bool
Gavin Makd534a552025-08-13 23:42:00 -0700230 fetch_errors: List[Exception]
Gavin Makb4b323a2025-06-17 10:54:41 -0700231 fetch_start: Optional[float]
232 fetch_finish: Optional[float]
Gavin Mak7b6ffed2025-06-13 17:53:38 -0700233
234 checkout_success: bool
Gavin Makd534a552025-08-13 23:42:00 -0700235 checkout_errors: List[Exception]
Gavin Makb4b323a2025-06-17 10:54:41 -0700236 checkout_start: Optional[float]
237 checkout_finish: Optional[float]
238
Gavin Mak7b6ffed2025-06-13 17:53:38 -0700239 stderr_text: str
240
Gavin Makb4b323a2025-06-17 10:54:41 -0700241
242class _InterleavedSyncResult(NamedTuple):
243 """Result of an interleaved sync.
244
245 Attributes:
246 results (List[_SyncResult]): A list of results, one for each project
247 processed. Empty if the worker failed before creating results.
248 """
249
250 results: List[_SyncResult]
251
252
Jason Chang32b59562023-07-14 16:45:35 -0700253class SuperprojectError(SyncError):
254 """Superproject sync repo."""
255
256
257class SyncFailFastError(SyncError):
258 """Sync exit error when --fail-fast set."""
259
260
261class SmartSyncError(SyncError):
262 """Smart sync exit error."""
263
264
Jason Changdaf2ad32023-08-31 17:06:36 -0700265class ManifestInterruptError(RepoError):
266 """Aggregate Error to be logged when a user interrupts a manifest update."""
267
268 def __init__(self, output, **kwargs):
269 super().__init__(output, **kwargs)
270 self.output = output
271
272 def __str__(self):
273 error_type = type(self).__name__
274 return f"{error_type}:{self.output}"
275
276
277class TeeStringIO(io.StringIO):
278 """StringIO class that can write to an additional destination."""
279
280 def __init__(
281 self, io: Union[io.TextIOWrapper, None], *args, **kwargs
282 ) -> None:
283 super().__init__(*args, **kwargs)
284 self.io = io
285
286 def write(self, s: str) -> int:
287 """Write to additional destination."""
Daniel Kutikb0430b52023-10-23 21:16:04 +0200288 ret = super().write(s)
Jason Changdaf2ad32023-08-31 17:06:36 -0700289 if self.io is not None:
290 self.io.write(s)
Daniel Kutikb0430b52023-10-23 21:16:04 +0200291 return ret
Jason Changdaf2ad32023-08-31 17:06:36 -0700292
293
Shawn O. Pearcec95583b2009-03-03 17:47:06 -0800294class Sync(Command, MirrorSafeCommand):
Gavin Makea2e3302023-03-11 06:46:20 +0000295 COMMON = True
296 MULTI_MANIFEST_SUPPORT = True
297 helpSummary = "Update working tree to the latest revision"
298 helpUsage = """
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -0700299%prog [<project>...]
300"""
Gavin Makea2e3302023-03-11 06:46:20 +0000301 helpDescription = """
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -0700302The '%prog' command synchronizes local project directories
303with the remote repositories specified in the manifest. If a local
304project does not yet exist, it will clone a new local directory from
305the remote repository and set up tracking branches as specified in
306the manifest. If the local project already exists, '%prog'
307will update the remote branches and rebase any new local changes
308on top of the new remote changes.
309
310'%prog' will synchronize all projects listed at the command
311line. Projects can be specified either by name, or by a relative
312or absolute path to the project's local directory. If no projects
313are specified, '%prog' will synchronize all projects listed in
314the manifest.
Shawn O. Pearce3e768c92009-04-10 16:59:36 -0700315
316The -d/--detach option can be used to switch specified projects
317back to the manifest revision. This option is especially helpful
318if the project is currently on a topic branch, but the manifest
319revision is temporarily needed.
Shawn O. Pearceeb7af872009-04-21 08:02:04 -0700320
Nico Sallembiena1bfd2c2010-04-06 10:40:01 -0700321The -s/--smart-sync option can be used to sync to a known good
322build as specified by the manifest-server element in the current
Victor Boivie08c880d2011-04-19 10:32:52 +0200323manifest. The -t/--smart-tag option is similar and allows you to
324specify a custom tag/label.
Nico Sallembiena1bfd2c2010-04-06 10:40:01 -0700325
David Pursehousecf76b1b2012-09-14 10:31:42 +0900326The -u/--manifest-server-username and -p/--manifest-server-password
327options can be used to specify a username and password to authenticate
328with the manifest server when using the -s or -t option.
329
330If -u and -p are not specified when using the -s or -t option, '%prog'
331will attempt to read authentication credentials for the manifest server
332from the user's .netrc file.
333
334'%prog' will not use authentication credentials from -u/-p or .netrc
335if the manifest server specified in the manifest file already includes
336credentials.
337
Mike Frysingerd9e5cf02019-08-26 03:12:55 -0400338By default, all projects will be synced. The --fail-fast option can be used
Mike Frysinger7ae210a2020-05-24 14:56:52 -0400339to halt syncing as soon as possible when the first project fails to sync.
Andrei Warkentin5df6de02010-07-02 17:58:31 -0500340
Kevin Degiabaa7f32014-11-12 11:27:45 -0700341The --force-sync option can be used to overwrite existing git
342directories if they have previously been linked to a different
Roger Shimizuac29ac32020-06-06 02:33:40 +0900343object directory. WARNING: This may cause data to be lost since
Kevin Degiabaa7f32014-11-12 11:27:45 -0700344refs may be removed when overwriting.
345
Josip Sokcevicedadb252024-02-29 09:48:37 -0800346The --force-checkout option can be used to force git to switch revs even if the
347index or the working tree differs from HEAD, and if there are untracked files.
348WARNING: This may cause data to be lost since uncommitted changes may be
349removed.
350
Oleksii Okolielovd3c0f592018-12-17 19:23:44 -0500351The --force-remove-dirty option can be used to remove previously used
352projects with uncommitted changes. WARNING: This may cause data to be
353lost since uncommitted changes may be removed with projects that no longer
354exist in the manifest.
355
Shawn O. Pearcee02ac0a2012-03-14 15:36:59 -0700356The --no-clone-bundle option disables any attempt to use
357$URL/clone.bundle to bootstrap a new Git repository from a
358resumeable bundle file on a content delivery network. This
359may be necessary if there are problems with the local Python
360HTTP client or proxy configuration, but the Git binary works.
361
Che-Liang Chioub2bd91c2012-01-11 11:28:42 +0800362The --fetch-submodules option enables fetching Git submodules
363of a project from server.
364
David Pursehousef2fad612015-01-29 14:36:28 +0900365The -c/--current-branch option can be used to only fetch objects that
366are on the branch specified by a project's revision.
367
David Pursehouseb1553542014-09-04 21:28:09 +0900368The --optimized-fetch option can be used to only fetch projects that
369are fixed to a sha1 revision if the sha1 revision does not already
370exist locally.
371
David Pursehouse74cfd272015-10-14 10:50:15 +0900372The --prune option can be used to remove any refs that no longer
373exist on the remote.
374
LaMont Jones7efab532022-09-01 15:41:12 +0000375The --auto-gc option can be used to trigger garbage collection on all
376projects. By default, repo does not run garbage collection.
377
Mike Frysingerb8f7bb02018-10-10 01:05:11 -0400378# SSH Connections
Shawn O. Pearceeb7af872009-04-21 08:02:04 -0700379
380If at least one project remote URL uses an SSH connection (ssh://,
381git+ssh://, or user@host:path syntax) repo will automatically
382enable the SSH ControlMaster option when connecting to that host.
383This feature permits other projects in the same '%prog' session to
384reuse the same SSH tunnel, saving connection setup overheads.
385
386To disable this behavior on UNIX platforms, set the GIT_SSH
387environment variable to 'ssh'. For example:
388
389 export GIT_SSH=ssh
390 %prog
391
Mike Frysingerb8f7bb02018-10-10 01:05:11 -0400392# Compatibility
Shawn O. Pearceeb7af872009-04-21 08:02:04 -0700393
394This feature is automatically disabled on Windows, due to the lack
395of UNIX domain socket support.
396
397This feature is not compatible with url.insteadof rewrites in the
398user's ~/.gitconfig. '%prog' is currently not able to perform the
399rewrite early enough to establish the ControlMaster tunnel.
400
401If the remote SSH daemon is Gerrit Code Review, version 2.0.10 or
402later is required to fix a server side protocol bug.
403
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -0700404"""
Gavin Makea2e3302023-03-11 06:46:20 +0000405 # A value of 0 means we want parallel jobs, but we'll determine the default
406 # value later on.
407 PARALLEL_JOBS = 0
Shawn O. Pearce6392c872011-09-22 17:44:31 -0700408
Gavin Makdaebd6c2025-04-09 13:59:27 -0700409 _JOBS_WARN_THRESHOLD = 100
410
Gavin Makea2e3302023-03-11 06:46:20 +0000411 def _Options(self, p, show_smart=True):
412 p.add_option(
413 "--jobs-network",
414 default=None,
415 type=int,
416 metavar="JOBS",
417 help="number of network jobs to run in parallel (defaults to "
Gavin Mak25858c82025-07-21 12:24:41 -0700418 "--jobs or 1). Ignored unless --no-interleaved is set",
Gavin Makea2e3302023-03-11 06:46:20 +0000419 )
420 p.add_option(
421 "--jobs-checkout",
422 default=None,
423 type=int,
424 metavar="JOBS",
Gavin Mak25858c82025-07-21 12:24:41 -0700425 help=(
426 "number of local checkout jobs to run in parallel (defaults "
427 f"to --jobs or {DEFAULT_LOCAL_JOBS}). Ignored unless "
428 "--no-interleaved is set"
429 ),
Gavin Makea2e3302023-03-11 06:46:20 +0000430 )
Mike Frysinger49de8ef2021-04-09 00:21:02 -0400431
Gavin Makea2e3302023-03-11 06:46:20 +0000432 p.add_option(
433 "-f",
434 "--force-broken",
Gavin Makea2e3302023-03-11 06:46:20 +0000435 action="store_true",
436 help="obsolete option (to be deleted in the future)",
437 )
438 p.add_option(
439 "--fail-fast",
Gavin Makea2e3302023-03-11 06:46:20 +0000440 action="store_true",
441 help="stop syncing after first error is hit",
442 )
443 p.add_option(
444 "--force-sync",
Gavin Makea2e3302023-03-11 06:46:20 +0000445 action="store_true",
446 help="overwrite an existing git directory if it needs to "
447 "point to a different object directory. WARNING: this "
448 "may cause loss of data",
449 )
450 p.add_option(
Josip Sokcevicedadb252024-02-29 09:48:37 -0800451 "--force-checkout",
Josip Sokcevicedadb252024-02-29 09:48:37 -0800452 action="store_true",
453 help="force checkout even if it results in throwing away "
454 "uncommitted modifications. "
455 "WARNING: this may cause loss of data",
456 )
457 p.add_option(
Gavin Makea2e3302023-03-11 06:46:20 +0000458 "--force-remove-dirty",
Gavin Makea2e3302023-03-11 06:46:20 +0000459 action="store_true",
460 help="force remove projects with uncommitted modifications if "
461 "projects no longer exist in the manifest. "
462 "WARNING: this may cause loss of data",
463 )
464 p.add_option(
Jeroen Dhollanderc44ad092024-08-20 10:28:41 +0200465 "--rebase",
Jeroen Dhollanderc44ad092024-08-20 10:28:41 +0200466 action="store_true",
467 help="rebase local commits regardless of whether they are "
468 "published",
469 )
470 p.add_option(
Gavin Makea2e3302023-03-11 06:46:20 +0000471 "-l",
472 "--local-only",
Gavin Makea2e3302023-03-11 06:46:20 +0000473 action="store_true",
474 help="only update working tree, don't fetch",
475 )
476 p.add_option(
477 "--no-manifest-update",
478 "--nmu",
479 dest="mp_update",
480 action="store_false",
481 default="true",
482 help="use the existing manifest checkout as-is. "
483 "(do not update to the latest revision)",
484 )
485 p.add_option(
Gavin Mak85352822025-06-11 00:13:52 +0000486 "--interleaved",
487 action="store_true",
Gavin Mak25858c82025-07-21 12:24:41 -0700488 default=True,
489 help="fetch and checkout projects in parallel (default)",
490 )
491 p.add_option(
492 "--no-interleaved",
493 dest="interleaved",
494 action="store_false",
495 help="fetch and checkout projects in phases",
Gavin Mak85352822025-06-11 00:13:52 +0000496 )
497 p.add_option(
Gavin Makea2e3302023-03-11 06:46:20 +0000498 "-n",
499 "--network-only",
Gavin Makea2e3302023-03-11 06:46:20 +0000500 action="store_true",
501 help="fetch only, don't update working tree",
502 )
503 p.add_option(
504 "-d",
505 "--detach",
506 dest="detach_head",
507 action="store_true",
508 help="detach projects back to manifest revision",
509 )
510 p.add_option(
511 "-c",
512 "--current-branch",
513 dest="current_branch_only",
514 action="store_true",
515 help="fetch only current branch from server",
516 )
517 p.add_option(
518 "--no-current-branch",
519 dest="current_branch_only",
520 action="store_false",
521 help="fetch all branches from server",
522 )
523 p.add_option(
524 "-m",
525 "--manifest-name",
Gavin Makea2e3302023-03-11 06:46:20 +0000526 help="temporary manifest to use for this sync",
527 metavar="NAME.xml",
528 )
529 p.add_option(
530 "--clone-bundle",
531 action="store_true",
532 help="enable use of /clone.bundle on HTTP/HTTPS",
533 )
534 p.add_option(
535 "--no-clone-bundle",
536 dest="clone_bundle",
537 action="store_false",
538 help="disable use of /clone.bundle on HTTP/HTTPS",
539 )
540 p.add_option(
541 "-u",
542 "--manifest-server-username",
543 action="store",
Gavin Makea2e3302023-03-11 06:46:20 +0000544 help="username to authenticate with the manifest server",
545 )
546 p.add_option(
547 "-p",
548 "--manifest-server-password",
549 action="store",
Gavin Makea2e3302023-03-11 06:46:20 +0000550 help="password to authenticate with the manifest server",
551 )
552 p.add_option(
553 "--fetch-submodules",
Gavin Makea2e3302023-03-11 06:46:20 +0000554 action="store_true",
555 help="fetch submodules from server",
556 )
557 p.add_option(
558 "--use-superproject",
559 action="store_true",
560 help="use the manifest superproject to sync projects; implies -c",
561 )
562 p.add_option(
563 "--no-use-superproject",
564 action="store_false",
565 dest="use_superproject",
566 help="disable use of manifest superprojects",
567 )
568 p.add_option("--tags", action="store_true", help="fetch tags")
569 p.add_option(
570 "--no-tags",
571 dest="tags",
572 action="store_false",
573 help="don't fetch tags (default)",
574 )
575 p.add_option(
576 "--optimized-fetch",
Gavin Makea2e3302023-03-11 06:46:20 +0000577 action="store_true",
578 help="only fetch projects fixed to sha1 if revision does not exist "
579 "locally",
580 )
581 p.add_option(
582 "--retry-fetches",
583 default=0,
584 action="store",
585 type="int",
586 help="number of times to retry fetches on transient errors",
587 )
588 p.add_option(
589 "--prune",
590 action="store_true",
591 help="delete refs that no longer exist on the remote (default)",
592 )
593 p.add_option(
594 "--no-prune",
595 dest="prune",
596 action="store_false",
597 help="do not delete refs that no longer exist on the remote",
598 )
599 p.add_option(
600 "--auto-gc",
601 action="store_true",
602 default=None,
603 help="run garbage collection on all synced projects",
604 )
605 p.add_option(
606 "--no-auto-gc",
607 dest="auto_gc",
608 action="store_false",
609 help="do not run garbage collection on any projects (default)",
610 )
611 if show_smart:
612 p.add_option(
613 "-s",
614 "--smart-sync",
Gavin Makea2e3302023-03-11 06:46:20 +0000615 action="store_true",
616 help="smart sync using manifest from the latest known good "
617 "build",
618 )
619 p.add_option(
620 "-t",
621 "--smart-tag",
Gavin Makea2e3302023-03-11 06:46:20 +0000622 action="store",
623 help="smart sync using manifest from a known tag",
624 )
Shawn O. Pearce3e768c92009-04-10 16:59:36 -0700625
Gavin Makea2e3302023-03-11 06:46:20 +0000626 g = p.add_option_group("repo Version options")
627 g.add_option(
628 "--no-repo-verify",
629 dest="repo_verify",
630 default=True,
631 action="store_false",
632 help="do not verify repo source code",
633 )
634 g.add_option(
635 "--repo-upgraded",
Gavin Makea2e3302023-03-11 06:46:20 +0000636 action="store_true",
Mike Frysinger06ddc8c2023-08-21 21:26:51 -0400637 help=optparse.SUPPRESS_HELP,
Gavin Makea2e3302023-03-11 06:46:20 +0000638 )
Kenny Cheng82d500e2025-06-02 21:55:04 +0800639 RepoHook.AddOptionGroup(p, "post-sync")
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -0700640
Gavin Makea2e3302023-03-11 06:46:20 +0000641 def _GetBranch(self, manifest_project):
642 """Returns the branch name for getting the approved smartsync manifest.
LaMont Jonesa46047a2022-04-07 21:57:06 +0000643
Gavin Makea2e3302023-03-11 06:46:20 +0000644 Args:
645 manifest_project: The manifestProject to query.
646 """
647 b = manifest_project.GetBranch(manifest_project.CurrentBranch)
648 branch = b.merge
649 if branch.startswith(R_HEADS):
650 branch = branch[len(R_HEADS) :]
651 return branch
Raman Tenneti8d43dea2021-02-07 16:30:27 -0800652
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800653 @classmethod
654 def _GetCurrentBranchOnly(cls, opt, manifest):
Gavin Makea2e3302023-03-11 06:46:20 +0000655 """Returns whether current-branch or use-superproject options are
656 enabled.
Daniel Anderssond52ca422022-04-01 12:55:38 +0200657
Gavin Makea2e3302023-03-11 06:46:20 +0000658 Args:
659 opt: Program options returned from optparse. See _Options().
660 manifest: The manifest to use.
LaMont Jonesa46047a2022-04-07 21:57:06 +0000661
Gavin Makea2e3302023-03-11 06:46:20 +0000662 Returns:
663 True if a superproject is requested, otherwise the value of the
664 current_branch option (True, False or None).
665 """
666 return (
667 git_superproject.UseSuperproject(opt.use_superproject, manifest)
668 or opt.current_branch_only
669 )
Raman Tenneti2ae44d72021-03-23 15:12:27 -0700670
Gavin Makea2e3302023-03-11 06:46:20 +0000671 def _UpdateProjectsRevisionId(
672 self, opt, args, superproject_logging_data, manifest
673 ):
674 """Update revisionId of projects with the commit from the superproject.
Raman Tenneti1fd7bc22021-02-04 14:39:38 -0800675
Gavin Makea2e3302023-03-11 06:46:20 +0000676 This function updates each project's revisionId with the commit hash
677 from the superproject. It writes the updated manifest into a file and
678 reloads the manifest from it. When appropriate, sub manifests are also
679 processed.
Raman Tenneti1fd7bc22021-02-04 14:39:38 -0800680
Gavin Makea2e3302023-03-11 06:46:20 +0000681 Args:
682 opt: Program options returned from optparse. See _Options().
683 args: Arguments to pass to GetProjects. See the GetProjects
684 docstring for details.
685 superproject_logging_data: A dictionary of superproject data to log.
686 manifest: The manifest to use.
687 """
688 have_superproject = manifest.superproject or any(
689 m.superproject for m in manifest.all_children
690 )
691 if not have_superproject:
692 return
LaMont Jonesbdcba7d2022-04-11 22:50:11 +0000693
Gavin Makea2e3302023-03-11 06:46:20 +0000694 if opt.local_only and manifest.superproject:
695 manifest_path = manifest.superproject.manifest_path
696 if manifest_path:
697 self._ReloadManifest(manifest_path, manifest)
698 return
Raman Tennetiae86a462021-07-27 08:54:59 -0700699
Gavin Makea2e3302023-03-11 06:46:20 +0000700 all_projects = self.GetProjects(
701 args,
702 missing_ok=True,
703 submodules_ok=opt.fetch_submodules,
704 manifest=manifest,
705 all_manifests=not opt.this_manifest_only,
706 )
LaMont Jonesbdcba7d2022-04-11 22:50:11 +0000707
Gavin Makea2e3302023-03-11 06:46:20 +0000708 per_manifest = collections.defaultdict(list)
709 if opt.this_manifest_only:
710 per_manifest[manifest.path_prefix] = all_projects
711 else:
712 for p in all_projects:
713 per_manifest[p.manifest.path_prefix].append(p)
LaMont Jonesbdcba7d2022-04-11 22:50:11 +0000714
Gavin Makea2e3302023-03-11 06:46:20 +0000715 superproject_logging_data = {}
716 need_unload = False
717 for m in self.ManifestList(opt):
718 if m.path_prefix not in per_manifest:
719 continue
720 use_super = git_superproject.UseSuperproject(
721 opt.use_superproject, m
722 )
723 if superproject_logging_data:
724 superproject_logging_data["multimanifest"] = True
725 superproject_logging_data.update(
726 superproject=use_super,
727 haslocalmanifests=bool(m.HasLocalManifests),
728 hassuperprojecttag=bool(m.superproject),
729 )
730 if use_super and (m.IsMirror or m.IsArchive):
731 # Don't use superproject, because we have no working tree.
732 use_super = False
733 superproject_logging_data["superproject"] = False
734 superproject_logging_data["noworktree"] = True
735 if opt.use_superproject is not False:
Aravind Vasudevane914ec22023-08-31 20:57:31 +0000736 logger.warning(
737 "%s: not using superproject because there is no "
738 "working tree.",
739 m.path_prefix,
Gavin Makea2e3302023-03-11 06:46:20 +0000740 )
LaMont Jonesbdcba7d2022-04-11 22:50:11 +0000741
Gavin Makea2e3302023-03-11 06:46:20 +0000742 if not use_super:
743 continue
Tomasz Wasilczyk208f3442024-01-05 12:23:10 -0800744 m.superproject.SetQuiet(not opt.verbose)
Gavin Makea2e3302023-03-11 06:46:20 +0000745 print_messages = git_superproject.PrintMessages(
746 opt.use_superproject, m
747 )
748 m.superproject.SetPrintMessages(print_messages)
749 update_result = m.superproject.UpdateProjectsRevisionId(
750 per_manifest[m.path_prefix], git_event_log=self.git_event_log
751 )
752 manifest_path = update_result.manifest_path
753 superproject_logging_data["updatedrevisionid"] = bool(manifest_path)
754 if manifest_path:
755 m.SetManifestOverride(manifest_path)
756 need_unload = True
757 else:
758 if print_messages:
Aravind Vasudevane914ec22023-08-31 20:57:31 +0000759 logger.warning(
760 "%s: warning: Update of revisionId from superproject "
761 "has failed, repo sync will not use superproject to "
762 "fetch the source. Please resync with the "
763 "--no-use-superproject option to avoid this repo "
764 "warning.",
765 m.path_prefix,
Gavin Makea2e3302023-03-11 06:46:20 +0000766 )
767 if update_result.fatal and opt.use_superproject is not None:
Jason Chang32b59562023-07-14 16:45:35 -0700768 raise SuperprojectError()
Gavin Makea2e3302023-03-11 06:46:20 +0000769 if need_unload:
770 m.outer_client.manifest.Unload()
Raman Tenneti1fd7bc22021-02-04 14:39:38 -0800771
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800772 @classmethod
773 def _FetchProjectList(cls, opt, projects):
Gavin Makea2e3302023-03-11 06:46:20 +0000774 """Main function of the fetch worker.
Mike Frysingerb2fa30a2021-02-24 00:15:32 -0500775
Gavin Makea2e3302023-03-11 06:46:20 +0000776 The projects we're given share the same underlying git object store, so
777 we have to fetch them in serial.
Roy Lee18afd7f2010-05-09 04:32:08 +0800778
Gavin Mak551285f2023-05-04 04:48:43 +0000779 Delegates most of the work to _FetchOne.
David James8d201162013-10-11 17:03:19 -0700780
Gavin Makea2e3302023-03-11 06:46:20 +0000781 Args:
782 opt: Program options returned from optparse. See _Options().
783 projects: Projects to fetch.
784 """
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800785 return [cls._FetchOne(opt, x) for x in projects]
David James8d201162013-10-11 17:03:19 -0700786
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800787 @classmethod
788 def _FetchOne(cls, opt, project_idx):
Gavin Makea2e3302023-03-11 06:46:20 +0000789 """Fetch git objects for a single project.
David James8d201162013-10-11 17:03:19 -0700790
Gavin Makea2e3302023-03-11 06:46:20 +0000791 Args:
792 opt: Program options returned from optparse. See _Options().
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800793 project_idx: Project index for the project to fetch.
David James8d201162013-10-11 17:03:19 -0700794
Gavin Makea2e3302023-03-11 06:46:20 +0000795 Returns:
796 Whether the fetch was successful.
797 """
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800798 project = cls.get_parallel_context()["projects"][project_idx]
Gavin Makea2e3302023-03-11 06:46:20 +0000799 start = time.time()
Gavin Mak551285f2023-05-04 04:48:43 +0000800 k = f"{project.name} @ {project.relpath}"
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800801 cls.get_parallel_context()["sync_dict"][k] = start
Gavin Makea2e3302023-03-11 06:46:20 +0000802 success = False
803 remote_fetched = False
Jason Chang32b59562023-07-14 16:45:35 -0700804 errors = []
Jason Changdaf2ad32023-08-31 17:06:36 -0700805 buf = TeeStringIO(sys.stdout if opt.verbose else None)
Gavin Makea2e3302023-03-11 06:46:20 +0000806 try:
807 sync_result = project.Sync_NetworkHalf(
808 quiet=opt.quiet,
809 verbose=opt.verbose,
810 output_redir=buf,
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800811 current_branch_only=cls._GetCurrentBranchOnly(
Gavin Makea2e3302023-03-11 06:46:20 +0000812 opt, project.manifest
813 ),
814 force_sync=opt.force_sync,
815 clone_bundle=opt.clone_bundle,
816 tags=opt.tags,
817 archive=project.manifest.IsArchive,
818 optimized_fetch=opt.optimized_fetch,
819 retry_fetches=opt.retry_fetches,
820 prune=opt.prune,
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800821 ssh_proxy=cls.get_parallel_context()["ssh_proxy"],
Gavin Makea2e3302023-03-11 06:46:20 +0000822 clone_filter=project.manifest.CloneFilter,
823 partial_clone_exclude=project.manifest.PartialCloneExclude,
Jason Chang17833322023-05-23 13:06:55 -0700824 clone_filter_for_depth=project.manifest.CloneFilterForDepth,
Gavin Makea2e3302023-03-11 06:46:20 +0000825 )
826 success = sync_result.success
827 remote_fetched = sync_result.remote_fetched
Jason Chang32b59562023-07-14 16:45:35 -0700828 if sync_result.error:
829 errors.append(sync_result.error)
Doug Andersonfc06ced2011-03-16 15:49:18 -0700830
Gavin Makea2e3302023-03-11 06:46:20 +0000831 output = buf.getvalue()
Jason Changdaf2ad32023-08-31 17:06:36 -0700832 if output and buf.io is None and not success:
Gavin Makea2e3302023-03-11 06:46:20 +0000833 print("\n" + output.rstrip())
Doug Andersonfc06ced2011-03-16 15:49:18 -0700834
Gavin Makea2e3302023-03-11 06:46:20 +0000835 if not success:
Aravind Vasudevane914ec22023-08-31 20:57:31 +0000836 logger.error(
837 "error: Cannot fetch %s from %s",
838 project.name,
839 project.remote.url,
Gavin Makea2e3302023-03-11 06:46:20 +0000840 )
841 except KeyboardInterrupt:
Aravind Vasudevane914ec22023-08-31 20:57:31 +0000842 logger.error("Keyboard interrupt while processing %s", project.name)
Gavin Makea2e3302023-03-11 06:46:20 +0000843 except GitError as e:
Aravind Vasudevane914ec22023-08-31 20:57:31 +0000844 logger.error("error.GitError: Cannot fetch %s", e)
Jason Chang32b59562023-07-14 16:45:35 -0700845 errors.append(e)
Gavin Makea2e3302023-03-11 06:46:20 +0000846 except Exception as e:
Aravind Vasudevane914ec22023-08-31 20:57:31 +0000847 logger.error(
848 "error: Cannot fetch %s (%s: %s)",
849 project.name,
850 type(e).__name__,
851 e,
Gavin Makea2e3302023-03-11 06:46:20 +0000852 )
Jason Chang32b59562023-07-14 16:45:35 -0700853 errors.append(e)
Gavin Makea2e3302023-03-11 06:46:20 +0000854 raise
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800855 finally:
856 del cls.get_parallel_context()["sync_dict"][k]
Mike Frysinger7b586f22021-02-23 18:38:39 -0500857
Gavin Makea2e3302023-03-11 06:46:20 +0000858 finish = time.time()
Jason Chang32b59562023-07-14 16:45:35 -0700859 return _FetchOneResult(
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800860 success, errors, project_idx, start, finish, remote_fetched
Jason Chang32b59562023-07-14 16:45:35 -0700861 )
David James8d201162013-10-11 17:03:19 -0700862
Gavin Mak04cba4a2023-05-24 21:28:28 +0000863 def _GetSyncProgressMessage(self):
Gavin Mak551285f2023-05-04 04:48:43 +0000864 earliest_time = float("inf")
865 earliest_proj = None
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800866 items = self.get_parallel_context()["sync_dict"].items()
Gavin Mak945c0062023-05-30 20:04:07 +0000867 for project, t in items:
Gavin Mak551285f2023-05-04 04:48:43 +0000868 if t < earliest_time:
869 earliest_time = t
870 earliest_proj = project
871
Josip Sokcevic71122f92023-05-26 02:44:37 +0000872 if not earliest_proj:
Gavin Mak945c0062023-05-30 20:04:07 +0000873 # This function is called when sync is still running but in some
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800874 # cases (by chance), sync_dict can contain no entries. Return some
Gavin Mak945c0062023-05-30 20:04:07 +0000875 # text to indicate that sync is still working.
876 return "..working.."
Josip Sokcevic71122f92023-05-26 02:44:37 +0000877
Gavin Mak551285f2023-05-04 04:48:43 +0000878 elapsed = time.time() - earliest_time
Gavin Mak945c0062023-05-30 20:04:07 +0000879 jobs = jobs_str(len(items))
Gavin Mak04cba4a2023-05-24 21:28:28 +0000880 return f"{jobs} | {elapsed_str(elapsed)} {earliest_proj}"
Gavin Mak551285f2023-05-04 04:48:43 +0000881
Kuang-che Wuab2d3212024-11-06 13:03:42 +0800882 @classmethod
883 def InitWorker(cls):
884 # Force connect to the manager server now.
885 # This is good because workers are initialized one by one. Without this,
886 # multiple workers may connect to the manager when handling the first
887 # job at the same time. Then the connection may fail if too many
888 # connections are pending and execeeded the socket listening backlog,
889 # especially on MacOS.
890 len(cls.get_parallel_context()["sync_dict"])
891
Jason Changdaf2ad32023-08-31 17:06:36 -0700892 def _Fetch(self, projects, opt, err_event, ssh_proxy, errors):
Gavin Makea2e3302023-03-11 06:46:20 +0000893 ret = True
Mike Frysingerb2fa30a2021-02-24 00:15:32 -0500894
Gavin Makea2e3302023-03-11 06:46:20 +0000895 fetched = set()
896 remote_fetched = set()
Gavin Makedcaa942023-04-27 05:58:57 +0000897 pm = Progress(
898 "Fetching",
899 len(projects),
900 delay=False,
901 quiet=opt.quiet,
902 show_elapsed=True,
Gavin Mak551285f2023-05-04 04:48:43 +0000903 elide=True,
Gavin Makedcaa942023-04-27 05:58:57 +0000904 )
Roy Lee18afd7f2010-05-09 04:32:08 +0800905
Gavin Mak551285f2023-05-04 04:48:43 +0000906 sync_event = _threading.Event()
Gavin Makb4b323a2025-06-17 10:54:41 -0700907 sync_progress_thread = self._CreateSyncProgressThread(pm, sync_event)
Gavin Mak551285f2023-05-04 04:48:43 +0000908
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800909 def _ProcessResults(pool, pm, results_sets):
Gavin Makea2e3302023-03-11 06:46:20 +0000910 ret = True
911 for results in results_sets:
912 for result in results:
913 success = result.success
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800914 project = projects[result.project_idx]
Gavin Makea2e3302023-03-11 06:46:20 +0000915 start = result.start
916 finish = result.finish
917 self._fetch_times.Set(project, finish - start)
Gavin Mak1d2e99d2023-07-22 02:56:44 +0000918 self._local_sync_state.SetFetchTime(project)
Gavin Makea2e3302023-03-11 06:46:20 +0000919 self.event_log.AddSync(
920 project,
921 event_log.TASK_SYNC_NETWORK,
922 start,
923 finish,
924 success,
925 )
Jason Chang32b59562023-07-14 16:45:35 -0700926 if result.errors:
927 errors.extend(result.errors)
Gavin Makea2e3302023-03-11 06:46:20 +0000928 if result.remote_fetched:
929 remote_fetched.add(project)
930 # Check for any errors before running any more tasks.
931 # ...we'll let existing jobs finish, though.
932 if not success:
933 ret = False
934 else:
935 fetched.add(project.gitdir)
Gavin Mak551285f2023-05-04 04:48:43 +0000936 pm.update()
Gavin Makea2e3302023-03-11 06:46:20 +0000937 if not ret and opt.fail_fast:
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800938 if pool:
939 pool.close()
Gavin Makea2e3302023-03-11 06:46:20 +0000940 break
Mike Frysingerb5d075d2021-03-01 00:56:38 -0500941 return ret
Xin Li745be2e2019-06-03 11:24:30 -0700942
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800943 with self.ParallelContext():
944 self.get_parallel_context()["projects"] = projects
945 self.get_parallel_context()[
946 "sync_dict"
947 ] = multiprocessing.Manager().dict()
Mike Frysingerebf04a42021-02-23 20:48:04 -0500948
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800949 objdir_project_map = dict()
950 for index, project in enumerate(projects):
951 objdir_project_map.setdefault(project.objdir, []).append(index)
952 projects_list = list(objdir_project_map.values())
953
Peter Kjellerstedt616e3142024-11-20 21:10:29 +0100954 jobs = max(1, min(opt.jobs_network, len(projects_list)))
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800955
956 # We pass the ssh proxy settings via the class. This allows
957 # multiprocessing to pickle it up when spawning children. We can't
958 # pass it as an argument to _FetchProjectList below as
959 # multiprocessing is unable to pickle those.
960 self.get_parallel_context()["ssh_proxy"] = ssh_proxy
961
962 sync_progress_thread.start()
Josip Sokcevic454fdaf2024-10-07 17:33:38 +0000963 if not opt.quiet:
Gavin Makea2e3302023-03-11 06:46:20 +0000964 pm.update(inc=0, msg="warming up")
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800965 try:
966 ret = self.ExecuteInParallel(
967 jobs,
Gavin Makea2e3302023-03-11 06:46:20 +0000968 functools.partial(self._FetchProjectList, opt),
969 projects_list,
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800970 callback=_ProcessResults,
971 output=pm,
972 # Use chunksize=1 to avoid the chance that some workers are
973 # idle while other workers still have more than one job in
974 # their chunk queue.
975 chunksize=1,
Kuang-che Wuab2d3212024-11-06 13:03:42 +0800976 initializer=self.InitWorker,
Gavin Makea2e3302023-03-11 06:46:20 +0000977 )
Kuang-che Wu39ffd992024-10-18 23:32:08 +0800978 finally:
979 sync_event.set()
980 sync_progress_thread.join()
LaMont Jonesfa8d9392022-11-02 22:01:29 +0000981
Gavin Makea2e3302023-03-11 06:46:20 +0000982 if not self.outer_client.manifest.IsArchive:
983 self._GCProjects(projects, opt, err_event)
Mike Frysinger65af2602021-04-08 22:47:44 -0400984
Jason Changdaf2ad32023-08-31 17:06:36 -0700985 return _FetchResult(ret, fetched)
LaMont Jonesfa8d9392022-11-02 22:01:29 +0000986
Gavin Makea2e3302023-03-11 06:46:20 +0000987 def _FetchMain(
Jason Changdaf2ad32023-08-31 17:06:36 -0700988 self, opt, args, all_projects, err_event, ssh_proxy, manifest, errors
Gavin Makea2e3302023-03-11 06:46:20 +0000989 ):
990 """The main network fetch loop.
Mike Frysinger65af2602021-04-08 22:47:44 -0400991
Gavin Makea2e3302023-03-11 06:46:20 +0000992 Args:
993 opt: Program options returned from optparse. See _Options().
994 args: Command line args used to filter out projects.
995 all_projects: List of all projects that should be fetched.
996 err_event: Whether an error was hit while processing.
997 ssh_proxy: SSH manager for clients & masters.
998 manifest: The manifest to use.
Dave Borowitz18857212012-10-23 17:02:59 -0700999
Gavin Makea2e3302023-03-11 06:46:20 +00001000 Returns:
1001 List of all projects that should be checked out.
1002 """
Gavin Makea2e3302023-03-11 06:46:20 +00001003 to_fetch = []
Gavin Makea2e3302023-03-11 06:46:20 +00001004 to_fetch.extend(all_projects)
1005 to_fetch.sort(key=self._fetch_times.Get, reverse=True)
Dave Borowitz18857212012-10-23 17:02:59 -07001006
Gavin Mak1afe96a2025-10-20 11:13:09 -07001007 try:
1008 result = self._Fetch(to_fetch, opt, err_event, ssh_proxy, errors)
Gavin Makea2e3302023-03-11 06:46:20 +00001009 success = result.success
Gavin Mak1afe96a2025-10-20 11:13:09 -07001010 fetched = result.projects
Gavin Makea2e3302023-03-11 06:46:20 +00001011 if not success:
1012 err_event.set()
Gavin Mak1afe96a2025-10-20 11:13:09 -07001013
1014 if opt.network_only:
1015 # Bail out now; the rest touches the working tree.
1016 if err_event.is_set():
1017 e = SyncError(
1018 "error: Exited sync due to fetch errors.",
1019 aggregate_errors=errors,
1020 )
1021
1022 logger.error(e)
1023 raise e
1024 return _FetchMainResult([])
1025
1026 # Iteratively fetch missing and/or nested unregistered submodules.
1027 previously_missing_set = set()
1028 while True:
1029 self._ReloadManifest(None, manifest)
1030 all_projects = self.GetProjects(
1031 args,
1032 missing_ok=True,
1033 submodules_ok=opt.fetch_submodules,
1034 manifest=manifest,
1035 all_manifests=not opt.this_manifest_only,
1036 )
1037 missing = []
1038 for project in all_projects:
1039 if project.gitdir not in fetched:
1040 missing.append(project)
1041 if not missing:
1042 break
1043 # Stop us from non-stopped fetching actually-missing repos: If
1044 # set of missing repos has not been changed from last fetch, we
1045 # break.
1046 missing_set = {p.name for p in missing}
1047 if previously_missing_set == missing_set:
1048 break
1049 previously_missing_set = missing_set
1050 result = self._Fetch(missing, opt, err_event, ssh_proxy, errors)
1051 success = result.success
1052 new_fetched = result.projects
1053 if not success:
1054 err_event.set()
1055 fetched.update(new_fetched)
1056 finally:
1057 self._fetch_times.Save()
1058 self._local_sync_state.Save()
Jaikumar Ganesh4f2517f2009-06-01 21:10:33 -07001059
Jason Changdaf2ad32023-08-31 17:06:36 -07001060 return _FetchMainResult(all_projects)
Jaikumar Ganesh4f2517f2009-06-01 21:10:33 -07001061
Kuang-che Wu39ffd992024-10-18 23:32:08 +08001062 @classmethod
Josip Sokcevicedadb252024-02-29 09:48:37 -08001063 def _CheckoutOne(
Kuang-che Wu39ffd992024-10-18 23:32:08 +08001064 cls,
Jeroen Dhollanderc44ad092024-08-20 10:28:41 +02001065 detach_head,
1066 force_sync,
1067 force_checkout,
1068 force_rebase,
1069 verbose,
Kuang-che Wu39ffd992024-10-18 23:32:08 +08001070 project_idx,
Josip Sokcevicedadb252024-02-29 09:48:37 -08001071 ):
Gavin Makea2e3302023-03-11 06:46:20 +00001072 """Checkout work tree for one project
jiajia tanga590e642021-04-25 20:02:02 +08001073
Gavin Makea2e3302023-03-11 06:46:20 +00001074 Args:
1075 detach_head: Whether to leave a detached HEAD.
Josip Sokcevicedadb252024-02-29 09:48:37 -08001076 force_sync: Force checking out of .git directory (e.g. overwrite
1077 existing git directory that was previously linked to a different
1078 object directory).
1079 force_checkout: Force checking out of the repo content.
Jeroen Dhollanderc44ad092024-08-20 10:28:41 +02001080 force_rebase: Force rebase.
Tomasz Wasilczyk4c809212023-12-08 13:42:17 -08001081 verbose: Whether to show verbose messages.
Kuang-che Wu39ffd992024-10-18 23:32:08 +08001082 project_idx: Project index for the project to checkout.
jiajia tanga590e642021-04-25 20:02:02 +08001083
Gavin Makea2e3302023-03-11 06:46:20 +00001084 Returns:
1085 Whether the fetch was successful.
1086 """
Kuang-che Wu39ffd992024-10-18 23:32:08 +08001087 project = cls.get_parallel_context()["projects"][project_idx]
Gavin Makea2e3302023-03-11 06:46:20 +00001088 start = time.time()
1089 syncbuf = SyncBuffer(
1090 project.manifest.manifestProject.config, detach_head=detach_head
LaMont Jonesbdcba7d2022-04-11 22:50:11 +00001091 )
Gavin Makea2e3302023-03-11 06:46:20 +00001092 success = False
Jason Chang32b59562023-07-14 16:45:35 -07001093 errors = []
David Pursehouse59b41742015-05-07 14:36:09 +09001094 try:
Jason Chang32b59562023-07-14 16:45:35 -07001095 project.Sync_LocalHalf(
Josip Sokcevicedadb252024-02-29 09:48:37 -08001096 syncbuf,
1097 force_sync=force_sync,
1098 force_checkout=force_checkout,
Jeroen Dhollanderc44ad092024-08-20 10:28:41 +02001099 force_rebase=force_rebase,
Josip Sokcevicedadb252024-02-29 09:48:37 -08001100 verbose=verbose,
Jason Chang32b59562023-07-14 16:45:35 -07001101 )
Gavin Makea2e3302023-03-11 06:46:20 +00001102 success = syncbuf.Finish()
Gavin Maka64149a2025-08-13 22:48:36 -07001103 errors.extend(syncbuf.errors)
Josip Sokcevicd93fe602025-01-08 18:31:46 +00001104 except KeyboardInterrupt:
1105 logger.error("Keyboard interrupt while processing %s", project.name)
Gavin Makea2e3302023-03-11 06:46:20 +00001106 except GitError as e:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001107 logger.error(
1108 "error.GitError: Cannot checkout %s: %s", project.name, e
Gavin Makea2e3302023-03-11 06:46:20 +00001109 )
Jason Chang32b59562023-07-14 16:45:35 -07001110 errors.append(e)
Gavin Makea2e3302023-03-11 06:46:20 +00001111 except Exception as e:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001112 logger.error(
1113 "error: Cannot checkout %s: %s: %s",
1114 project.name,
1115 type(e).__name__,
1116 e,
Gavin Makea2e3302023-03-11 06:46:20 +00001117 )
1118 raise
Nico Sallembiena1bfd2c2010-04-06 10:40:01 -07001119
Gavin Makea2e3302023-03-11 06:46:20 +00001120 if not success:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001121 logger.error("error: Cannot checkout %s", project.name)
Gavin Makea2e3302023-03-11 06:46:20 +00001122 finish = time.time()
Kuang-che Wu39ffd992024-10-18 23:32:08 +08001123 return _CheckoutOneResult(success, errors, project_idx, start, finish)
Mike Frysinger5a033082019-09-23 19:21:20 -04001124
Jason Chang32b59562023-07-14 16:45:35 -07001125 def _Checkout(self, all_projects, opt, err_results, checkout_errors):
Gavin Makea2e3302023-03-11 06:46:20 +00001126 """Checkout projects listed in all_projects
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -07001127
Gavin Makea2e3302023-03-11 06:46:20 +00001128 Args:
1129 all_projects: List of all projects that should be checked out.
1130 opt: Program options returned from optparse. See _Options().
1131 err_results: A list of strings, paths to git repos where checkout
1132 failed.
1133 """
1134 # Only checkout projects with worktrees.
1135 all_projects = [x for x in all_projects if x.worktree]
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -07001136
Gavin Makea2e3302023-03-11 06:46:20 +00001137 def _ProcessResults(pool, pm, results):
1138 ret = True
1139 for result in results:
1140 success = result.success
Kuang-che Wu39ffd992024-10-18 23:32:08 +08001141 project = self.get_parallel_context()["projects"][
1142 result.project_idx
1143 ]
Gavin Makea2e3302023-03-11 06:46:20 +00001144 start = result.start
1145 finish = result.finish
1146 self.event_log.AddSync(
1147 project, event_log.TASK_SYNC_LOCAL, start, finish, success
1148 )
Jason Chang32b59562023-07-14 16:45:35 -07001149
1150 if result.errors:
1151 checkout_errors.extend(result.errors)
1152
Gavin Makea2e3302023-03-11 06:46:20 +00001153 # Check for any errors before running any more tasks.
1154 # ...we'll let existing jobs finish, though.
Gavin Mak1d2e99d2023-07-22 02:56:44 +00001155 if success:
1156 self._local_sync_state.SetCheckoutTime(project)
1157 else:
Gavin Makea2e3302023-03-11 06:46:20 +00001158 ret = False
1159 err_results.append(
1160 project.RelPath(local=opt.this_manifest_only)
1161 )
1162 if opt.fail_fast:
1163 if pool:
1164 pool.close()
1165 return ret
1166 pm.update(msg=project.name)
1167 return ret
Shawn O. Pearcec9ef7442008-11-03 10:32:09 -08001168
Josip Sokcevic55545722024-02-22 16:38:00 -08001169 for projects in _SafeCheckoutOrder(all_projects):
Kuang-che Wu39ffd992024-10-18 23:32:08 +08001170 with self.ParallelContext():
1171 self.get_parallel_context()["projects"] = projects
1172 proc_res = self.ExecuteInParallel(
1173 opt.jobs_checkout,
1174 functools.partial(
1175 self._CheckoutOne,
1176 opt.detach_head,
1177 opt.force_sync,
1178 opt.force_checkout,
1179 opt.rebase,
1180 opt.verbose,
1181 ),
1182 range(len(projects)),
1183 callback=_ProcessResults,
1184 output=Progress(
1185 "Checking out", len(all_projects), quiet=opt.quiet
1186 ),
1187 # Use chunksize=1 to avoid the chance that some workers are
1188 # idle while other workers still have more than one job in
1189 # their chunk queue.
1190 chunksize=1,
1191 )
Simran Basib9a1b732015-08-20 12:19:28 -07001192
Gavin Mak1d2e99d2023-07-22 02:56:44 +00001193 self._local_sync_state.Save()
1194 return proc_res and not err_results
1195
Gavin Mak7b6ffed2025-06-13 17:53:38 -07001196 def _PrintManifestNotices(self, opt):
1197 """Print all manifest notices, but only once."""
1198 printed_notices = set()
1199 # Print all manifest notices, but only once.
1200 # Sort by path_prefix to ensure consistent ordering.
1201 for m in sorted(self.ManifestList(opt), key=lambda x: x.path_prefix):
1202 if m.notice and m.notice not in printed_notices:
1203 print(m.notice)
1204 printed_notices.add(m.notice)
1205
Gavin Makea2e3302023-03-11 06:46:20 +00001206 @staticmethod
1207 def _GetPreciousObjectsState(project: Project, opt):
1208 """Get the preciousObjects state for the project.
Mike Frysinger355f4392022-07-20 17:15:29 -04001209
Gavin Makea2e3302023-03-11 06:46:20 +00001210 Args:
1211 project (Project): the project to examine, and possibly correct.
1212 opt (optparse.Values): options given to sync.
Raman Tenneti1fd7bc22021-02-04 14:39:38 -08001213
Gavin Makea2e3302023-03-11 06:46:20 +00001214 Returns:
1215 Expected state of extensions.preciousObjects:
1216 False: Should be disabled. (not present)
1217 True: Should be enabled.
1218 """
1219 if project.use_git_worktrees:
1220 return False
1221 projects = project.manifest.GetProjectsWithName(
1222 project.name, all_manifests=True
1223 )
1224 if len(projects) == 1:
1225 return False
1226 if len(projects) > 1:
1227 # Objects are potentially shared with another project.
1228 # See the logic in Project.Sync_NetworkHalf regarding UseAlternates.
1229 # - When False, shared projects share (via symlink)
1230 # .repo/project-objects/{PROJECT_NAME}.git as the one-and-only
1231 # objects directory. All objects are precious, since there is no
1232 # project with a complete set of refs.
1233 # - When True, shared projects share (via info/alternates)
1234 # .repo/project-objects/{PROJECT_NAME}.git as an alternate object
1235 # store, which is written only on the first clone of the project,
1236 # and is not written subsequently. (When Sync_NetworkHalf sees
1237 # that it exists, it makes sure that the alternates file points
1238 # there, and uses a project-local .git/objects directory for all
1239 # syncs going forward.
1240 # We do not support switching between the options. The environment
1241 # variable is present for testing and migration only.
1242 return not project.UseAlternates
Simran Basib9a1b732015-08-20 12:19:28 -07001243
Gavin Makea2e3302023-03-11 06:46:20 +00001244 return False
Dan Willemsen5ea32d12015-09-08 13:27:20 -07001245
Gavin Makea2e3302023-03-11 06:46:20 +00001246 def _SetPreciousObjectsState(self, project: Project, opt):
1247 """Correct the preciousObjects state for the project.
1248
1249 Args:
1250 project: the project to examine, and possibly correct.
1251 opt: options given to sync.
1252 """
1253 expected = self._GetPreciousObjectsState(project, opt)
1254 actual = (
1255 project.config.GetBoolean("extensions.preciousObjects") or False
1256 )
1257 relpath = project.RelPath(local=opt.this_manifest_only)
1258
1259 if expected != actual:
1260 # If this is unexpected, log it and repair.
1261 Trace(
1262 f"{relpath} expected preciousObjects={expected}, got {actual}"
1263 )
1264 if expected:
1265 if not opt.quiet:
Aravind Vasudevan83c66ec2023-09-28 19:06:59 +00001266 print(
1267 "\r%s: Shared project %s found, disabling pruning."
1268 % (relpath, project.name)
Gavin Makea2e3302023-03-11 06:46:20 +00001269 )
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001270
Gavin Makea2e3302023-03-11 06:46:20 +00001271 if git_require((2, 7, 0)):
1272 project.EnableRepositoryExtension("preciousObjects")
1273 else:
1274 # This isn't perfect, but it's the best we can do with old
1275 # git.
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001276 logger.warning(
1277 "%s: WARNING: shared projects are unreliable when "
Gavin Makea2e3302023-03-11 06:46:20 +00001278 "using old versions of git; please upgrade to "
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001279 "git-2.7.0+.",
1280 relpath,
Gavin Makea2e3302023-03-11 06:46:20 +00001281 )
1282 project.config.SetString("gc.pruneExpire", "never")
1283 else:
Gavin Makea2e3302023-03-11 06:46:20 +00001284 project.config.SetString("extensions.preciousObjects", None)
1285 project.config.SetString("gc.pruneExpire", None)
1286
1287 def _GCProjects(self, projects, opt, err_event):
1288 """Perform garbage collection.
1289
1290 If We are skipping garbage collection (opt.auto_gc not set), we still
1291 want to potentially mark objects precious, so that `git gc` does not
1292 discard shared objects.
1293 """
1294 if not opt.auto_gc:
1295 # Just repair preciousObjects state, and return.
1296 for project in projects:
1297 self._SetPreciousObjectsState(project, opt)
1298 return
1299
1300 pm = Progress(
1301 "Garbage collecting", len(projects), delay=False, quiet=opt.quiet
1302 )
1303 pm.update(inc=0, msg="prescan")
1304
1305 tidy_dirs = {}
1306 for project in projects:
1307 self._SetPreciousObjectsState(project, opt)
1308
1309 project.config.SetString("gc.autoDetach", "false")
1310 # Only call git gc once per objdir, but call pack-refs for the
1311 # remainder.
1312 if project.objdir not in tidy_dirs:
1313 tidy_dirs[project.objdir] = (
1314 True, # Run a full gc.
1315 project.bare_git,
1316 )
1317 elif project.gitdir not in tidy_dirs:
1318 tidy_dirs[project.gitdir] = (
1319 False, # Do not run a full gc; just run pack-refs.
1320 project.bare_git,
1321 )
1322
1323 jobs = opt.jobs
1324
1325 if jobs < 2:
1326 for run_gc, bare_git in tidy_dirs.values():
1327 pm.update(msg=bare_git._project.name)
1328
1329 if run_gc:
1330 bare_git.gc("--auto")
1331 else:
1332 bare_git.pack_refs()
1333 pm.end()
1334 return
1335
1336 cpu_count = os.cpu_count()
1337 config = {"pack.threads": cpu_count // jobs if cpu_count > jobs else 1}
1338
1339 threads = set()
1340 sem = _threading.Semaphore(jobs)
1341
1342 def tidy_up(run_gc, bare_git):
1343 pm.start(bare_git._project.name)
1344 try:
1345 try:
1346 if run_gc:
1347 bare_git.gc("--auto", config=config)
1348 else:
1349 bare_git.pack_refs(config=config)
1350 except GitError:
1351 err_event.set()
1352 except Exception:
1353 err_event.set()
1354 raise
1355 finally:
1356 pm.finish(bare_git._project.name)
1357 sem.release()
1358
1359 for run_gc, bare_git in tidy_dirs.values():
1360 if err_event.is_set() and opt.fail_fast:
1361 break
1362 sem.acquire()
1363 t = _threading.Thread(
1364 target=tidy_up,
1365 args=(
1366 run_gc,
1367 bare_git,
1368 ),
1369 )
1370 t.daemon = True
1371 threads.add(t)
1372 t.start()
1373
1374 for t in threads:
1375 t.join()
1376 pm.end()
1377
Gavin Makb5991d72025-12-09 22:29:43 +00001378 @classmethod
1379 def _CheckOneBloatedProject(cls, project_index: int) -> Optional[str]:
1380 """Checks if a single project is bloated.
1381
1382 Args:
1383 project_index: The index of the project in the parallel context.
1384
1385 Returns:
1386 The name of the project if it is bloated, else None.
1387 """
1388 project = cls.get_parallel_context()["projects"][project_index]
1389
1390 if not project.Exists or not project.worktree:
1391 return None
1392
1393 # Only check dirty or locally modified projects. These can't be
1394 # freshly cloned and will accumulate garbage.
1395 try:
1396 is_dirty = project.IsDirty(consider_untracked=True)
1397
1398 manifest_rev = project.GetRevisionId(project.bare_ref.all)
1399 head_rev = project.work_git.rev_parse(HEAD)
1400 has_local_commits = manifest_rev != head_rev
1401
1402 if not (is_dirty or has_local_commits):
1403 return None
1404
1405 output = project.bare_git.count_objects("-v")
1406 except Exception:
1407 return None
1408
1409 stats = {}
1410 for line in output.splitlines():
1411 try:
1412 key, value = line.split(": ", 1)
1413 stats[key.strip()] = int(value.strip())
1414 except ValueError:
1415 pass
1416
1417 pack_count = stats.get("packs", 0)
1418 size_pack_kb = stats.get("size-pack", 0)
1419 size_garbage_kb = stats.get("size-garbage", 0)
1420
1421 is_fragmented = (
1422 pack_count > _BLOAT_PACK_COUNT_THRESHOLD
1423 and size_pack_kb > _BLOAT_SIZE_PACK_THRESHOLD_KB
1424 )
1425 has_excessive_garbage = (
1426 size_garbage_kb > _BLOAT_SIZE_GARBAGE_THRESHOLD_KB
1427 )
1428
1429 if is_fragmented or has_excessive_garbage:
1430 return project.name
1431 return None
1432
1433 def _CheckForBloatedProjects(self, projects, opt):
1434 """Check for shallow projects that are accumulating unoptimized data.
1435
1436 For projects with clone-depth="1" that are dirty (have local changes),
1437 run 'git count-objects -v' and warn if the repository is accumulating
1438 excessive pack files or garbage.
1439 """
Gavin Mak5b0b5512025-12-15 18:49:13 +00001440 # We only care about bloated projects if we have a git version that
1441 # supports --no-auto-gc (2.23.0+) since what we use to disable auto-gc
1442 # in Project._RemoteFetch.
1443 if not git_require((2, 23, 0)):
1444 return
1445
Gavin Makb5991d72025-12-09 22:29:43 +00001446 projects = [p for p in projects if p.clone_depth]
1447 if not projects:
1448 return
1449
1450 bloated_projects = []
1451 pm = Progress(
1452 "Checking for bloat", len(projects), delay=False, quiet=opt.quiet
1453 )
1454
1455 def _ProcessResults(pool, pm, results):
1456 for result in results:
1457 if result:
1458 bloated_projects.append(result)
1459 pm.update(msg="")
1460
1461 with self.ParallelContext():
1462 self.get_parallel_context()["projects"] = projects
1463 self.ExecuteInParallel(
1464 opt.jobs,
1465 self._CheckOneBloatedProject,
1466 range(len(projects)),
1467 callback=_ProcessResults,
1468 output=pm,
1469 chunksize=1,
1470 )
1471 pm.end()
1472
1473 for project_name in bloated_projects:
1474 warn_msg = (
1475 f'warning: Project "{project_name}" is accumulating '
1476 'unoptimized data. Please run "repo sync --auto-gc" or '
1477 '"repo gc --repack" to clean up.'
1478 )
1479 self.git_event_log.ErrorEvent(warn_msg)
1480 logger.warning(warn_msg)
1481
Gavin Makf7a3f992025-06-23 09:04:26 -07001482 def _UpdateRepoProject(self, opt, manifest, errors):
1483 """Fetch the repo project and check for updates."""
1484 if opt.local_only:
1485 return
1486
1487 rp = manifest.repoProject
1488 now = time.time()
1489 # If we've fetched in the last day, don't bother fetching again.
1490 if (now - rp.LastFetch) < _ONE_DAY_S:
1491 return
1492
1493 with multiprocessing.Manager() as manager:
1494 with ssh.ProxyManager(manager) as ssh_proxy:
1495 ssh_proxy.sock()
1496 start = time.time()
1497 buf = TeeStringIO(sys.stdout if opt.verbose else None)
1498 sync_result = rp.Sync_NetworkHalf(
1499 quiet=opt.quiet,
1500 verbose=opt.verbose,
1501 output_redir=buf,
1502 current_branch_only=self._GetCurrentBranchOnly(
1503 opt, manifest
1504 ),
1505 force_sync=opt.force_sync,
1506 clone_bundle=opt.clone_bundle,
1507 tags=opt.tags,
1508 archive=manifest.IsArchive,
1509 optimized_fetch=opt.optimized_fetch,
1510 retry_fetches=opt.retry_fetches,
1511 prune=opt.prune,
1512 ssh_proxy=ssh_proxy,
1513 clone_filter=manifest.CloneFilter,
1514 partial_clone_exclude=manifest.PartialCloneExclude,
1515 clone_filter_for_depth=manifest.CloneFilterForDepth,
1516 )
1517 if sync_result.error:
1518 errors.append(sync_result.error)
1519
1520 finish = time.time()
1521 self.event_log.AddSync(
1522 rp,
1523 event_log.TASK_SYNC_NETWORK,
1524 start,
1525 finish,
1526 sync_result.success,
1527 )
1528 if not sync_result.success:
1529 logger.error("error: Cannot fetch repo tool %s", rp.name)
1530 return
1531
1532 # After fetching, check if a new version of repo is available and
1533 # restart. This is only done if the user hasn't explicitly disabled it.
1534 if os.environ.get("REPO_SKIP_SELF_UPDATE", "0") == "0":
1535 _PostRepoFetch(rp, opt.repo_verify)
1536
Gavin Makea2e3302023-03-11 06:46:20 +00001537 def _ReloadManifest(self, manifest_name, manifest):
1538 """Reload the manfiest from the file specified by the |manifest_name|.
1539
1540 It unloads the manifest if |manifest_name| is None.
1541
1542 Args:
1543 manifest_name: Manifest file to be reloaded.
1544 manifest: The manifest to use.
1545 """
Dan Willemsen5ea32d12015-09-08 13:27:20 -07001546 if manifest_name:
Gavin Makea2e3302023-03-11 06:46:20 +00001547 # Override calls Unload already.
1548 manifest.Override(manifest_name)
Dan Willemsen5ea32d12015-09-08 13:27:20 -07001549 else:
Gavin Makea2e3302023-03-11 06:46:20 +00001550 manifest.Unload()
Simran Basib9a1b732015-08-20 12:19:28 -07001551
Gavin Makea2e3302023-03-11 06:46:20 +00001552 def UpdateProjectList(self, opt, manifest):
1553 """Update the cached projects list for |manifest|
LaMont Jonesbdcba7d2022-04-11 22:50:11 +00001554
Gavin Makea2e3302023-03-11 06:46:20 +00001555 In a multi-manifest checkout, each manifest has its own project.list.
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -07001556
Gavin Makea2e3302023-03-11 06:46:20 +00001557 Args:
1558 opt: Program options returned from optparse. See _Options().
1559 manifest: The manifest to use.
Mike Frysinger5a033082019-09-23 19:21:20 -04001560
Gavin Makea2e3302023-03-11 06:46:20 +00001561 Returns:
1562 0: success
1563 1: failure
1564 """
1565 new_project_paths = []
1566 for project in self.GetProjects(
1567 None, missing_ok=True, manifest=manifest, all_manifests=False
1568 ):
1569 if project.relpath:
1570 new_project_paths.append(project.relpath)
1571 file_name = "project.list"
1572 file_path = os.path.join(manifest.subdir, file_name)
1573 old_project_paths = []
Mike Frysinger339f2df2021-05-06 00:44:42 -04001574
Gavin Makea2e3302023-03-11 06:46:20 +00001575 if os.path.exists(file_path):
Jason R. Coombs034950b2023-10-20 23:32:02 +05451576 with open(file_path) as fd:
Gavin Makea2e3302023-03-11 06:46:20 +00001577 old_project_paths = fd.read().split("\n")
1578 # In reversed order, so subfolders are deleted before parent folder.
1579 for path in sorted(old_project_paths, reverse=True):
1580 if not path:
1581 continue
1582 if path not in new_project_paths:
1583 # If the path has already been deleted, we don't need to do
1584 # it.
1585 gitdir = os.path.join(manifest.topdir, path, ".git")
1586 if os.path.exists(gitdir):
1587 project = Project(
1588 manifest=manifest,
1589 name=path,
1590 remote=RemoteSpec("origin"),
1591 gitdir=gitdir,
1592 objdir=gitdir,
1593 use_git_worktrees=os.path.isfile(gitdir),
1594 worktree=os.path.join(manifest.topdir, path),
1595 relpath=path,
1596 revisionExpr="HEAD",
1597 revisionId=None,
1598 groups=None,
1599 )
Jason Chang32b59562023-07-14 16:45:35 -07001600 project.DeleteWorktree(
Tomasz Wasilczyk4c809212023-12-08 13:42:17 -08001601 verbose=opt.verbose, force=opt.force_remove_dirty
Jason Chang32b59562023-07-14 16:45:35 -07001602 )
Mike Frysinger5a033082019-09-23 19:21:20 -04001603
Gavin Makea2e3302023-03-11 06:46:20 +00001604 new_project_paths.sort()
1605 with open(file_path, "w") as fd:
1606 fd.write("\n".join(new_project_paths))
1607 fd.write("\n")
1608 return 0
Che-Liang Chioub2bd91c2012-01-11 11:28:42 +08001609
Gavin Makea2e3302023-03-11 06:46:20 +00001610 def UpdateCopyLinkfileList(self, manifest):
1611 """Save all dests of copyfile and linkfile, and update them if needed.
Shawn O. Pearcecd1d7ff2009-06-04 16:15:53 -07001612
Gavin Makea2e3302023-03-11 06:46:20 +00001613 Returns:
1614 Whether update was successful.
1615 """
1616 new_paths = {}
1617 new_linkfile_paths = []
1618 new_copyfile_paths = []
1619 for project in self.GetProjects(
1620 None, missing_ok=True, manifest=manifest, all_manifests=False
1621 ):
1622 new_linkfile_paths.extend(x.dest for x in project.linkfiles)
1623 new_copyfile_paths.extend(x.dest for x in project.copyfiles)
Jaikumar Ganesh4f2517f2009-06-01 21:10:33 -07001624
Gavin Makea2e3302023-03-11 06:46:20 +00001625 new_paths = {
1626 "linkfile": new_linkfile_paths,
1627 "copyfile": new_copyfile_paths,
1628 }
jiajia tanga590e642021-04-25 20:02:02 +08001629
Gavin Makea2e3302023-03-11 06:46:20 +00001630 copylinkfile_name = "copy-link-files.json"
1631 copylinkfile_path = os.path.join(manifest.subdir, copylinkfile_name)
1632 old_copylinkfile_paths = {}
The Android Open Source Projectcf31fe92008-10-21 07:00:00 -07001633
Gavin Makea2e3302023-03-11 06:46:20 +00001634 if os.path.exists(copylinkfile_path):
1635 with open(copylinkfile_path, "rb") as fp:
1636 try:
1637 old_copylinkfile_paths = json.load(fp)
1638 except Exception:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001639 logger.error(
1640 "error: %s is not a json formatted file.",
1641 copylinkfile_path,
Gavin Makea2e3302023-03-11 06:46:20 +00001642 )
1643 platform_utils.remove(copylinkfile_path)
Jason Chang32b59562023-07-14 16:45:35 -07001644 raise
Doug Anderson2b8db3c2010-11-01 15:08:06 -07001645
Gavin Makea2e3302023-03-11 06:46:20 +00001646 need_remove_files = []
1647 need_remove_files.extend(
1648 set(old_copylinkfile_paths.get("linkfile", []))
1649 - set(new_linkfile_paths)
1650 )
1651 need_remove_files.extend(
1652 set(old_copylinkfile_paths.get("copyfile", []))
1653 - set(new_copyfile_paths)
1654 )
Mike Frysinger5a033082019-09-23 19:21:20 -04001655
Gavin Makea2e3302023-03-11 06:46:20 +00001656 for need_remove_file in need_remove_files:
1657 # Try to remove the updated copyfile or linkfile.
1658 # So, if the file is not exist, nothing need to do.
Josip Sokcevic9500aca2024-12-13 18:24:20 +00001659 platform_utils.remove(
1660 os.path.join(self.client.topdir, need_remove_file),
1661 missing_ok=True,
1662 )
Raman Tenneti7954de12021-07-28 14:36:49 -07001663
Gavin Makea2e3302023-03-11 06:46:20 +00001664 # Create copy-link-files.json, save dest path of "copyfile" and
1665 # "linkfile".
1666 with open(copylinkfile_path, "w", encoding="utf-8") as fp:
1667 json.dump(new_paths, fp)
1668 return True
Raman Tenneti7954de12021-07-28 14:36:49 -07001669
Gavin Makea2e3302023-03-11 06:46:20 +00001670 def _SmartSyncSetup(self, opt, smart_sync_manifest_path, manifest):
1671 if not manifest.manifest_server:
Jason Chang32b59562023-07-14 16:45:35 -07001672 raise SmartSyncError(
Gavin Makea2e3302023-03-11 06:46:20 +00001673 "error: cannot smart sync: no manifest server defined in "
Jason Chang32b59562023-07-14 16:45:35 -07001674 "manifest"
Gavin Makea2e3302023-03-11 06:46:20 +00001675 )
Gavin Makea2e3302023-03-11 06:46:20 +00001676
1677 manifest_server = manifest.manifest_server
1678 if not opt.quiet:
Aravind Vasudevan83c66ec2023-09-28 19:06:59 +00001679 print("Using manifest server %s" % manifest_server)
Gavin Makea2e3302023-03-11 06:46:20 +00001680
1681 if "@" not in manifest_server:
1682 username = None
1683 password = None
1684 if opt.manifest_server_username and opt.manifest_server_password:
1685 username = opt.manifest_server_username
1686 password = opt.manifest_server_password
1687 else:
1688 try:
1689 info = netrc.netrc()
Jason R. Coombsae824fb2023-10-20 23:32:40 +05451690 except OSError:
Gavin Makea2e3302023-03-11 06:46:20 +00001691 # .netrc file does not exist or could not be opened.
1692 pass
1693 else:
1694 try:
1695 parse_result = urllib.parse.urlparse(manifest_server)
1696 if parse_result.hostname:
1697 auth = info.authenticators(parse_result.hostname)
1698 if auth:
1699 username, _account, password = auth
1700 else:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001701 logger.error(
1702 "No credentials found for %s in .netrc",
1703 parse_result.hostname,
Gavin Makea2e3302023-03-11 06:46:20 +00001704 )
1705 except netrc.NetrcParseError as e:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001706 logger.error("Error parsing .netrc file: %s", e)
Gavin Makea2e3302023-03-11 06:46:20 +00001707
1708 if username and password:
1709 manifest_server = manifest_server.replace(
Jason R. Coombsb32ccbb2023-09-29 11:04:49 -04001710 "://", f"://{username}:{password}@", 1
Gavin Makea2e3302023-03-11 06:46:20 +00001711 )
1712
1713 transport = PersistentTransport(manifest_server)
1714 if manifest_server.startswith("persistent-"):
1715 manifest_server = manifest_server[len("persistent-") :]
1716
Mike Frysingerdfdf5772025-01-30 19:11:36 -05001717 # Changes in behavior should update docs/smart-sync.md accordingly.
Gavin Makea2e3302023-03-11 06:46:20 +00001718 try:
1719 server = xmlrpc.client.Server(manifest_server, transport=transport)
1720 if opt.smart_sync:
1721 branch = self._GetBranch(manifest.manifestProject)
1722
1723 if "SYNC_TARGET" in os.environ:
1724 target = os.environ["SYNC_TARGET"]
1725 [success, manifest_str] = server.GetApprovedManifest(
1726 branch, target
1727 )
1728 elif (
1729 "TARGET_PRODUCT" in os.environ
1730 and "TARGET_BUILD_VARIANT" in os.environ
Navil1e19f7d2024-09-11 16:49:49 +00001731 and "TARGET_RELEASE" in os.environ
1732 ):
1733 target = "%s-%s-%s" % (
1734 os.environ["TARGET_PRODUCT"],
1735 os.environ["TARGET_RELEASE"],
1736 os.environ["TARGET_BUILD_VARIANT"],
1737 )
1738 [success, manifest_str] = server.GetApprovedManifest(
1739 branch, target
1740 )
1741 elif (
1742 "TARGET_PRODUCT" in os.environ
1743 and "TARGET_BUILD_VARIANT" in os.environ
Gavin Makea2e3302023-03-11 06:46:20 +00001744 ):
1745 target = "%s-%s" % (
1746 os.environ["TARGET_PRODUCT"],
1747 os.environ["TARGET_BUILD_VARIANT"],
1748 )
1749 [success, manifest_str] = server.GetApprovedManifest(
1750 branch, target
1751 )
1752 else:
1753 [success, manifest_str] = server.GetApprovedManifest(branch)
1754 else:
1755 assert opt.smart_tag
1756 [success, manifest_str] = server.GetManifest(opt.smart_tag)
1757
1758 if success:
1759 manifest_name = os.path.basename(smart_sync_manifest_path)
1760 try:
1761 with open(smart_sync_manifest_path, "w") as f:
1762 f.write(manifest_str)
Jason R. Coombsae824fb2023-10-20 23:32:40 +05451763 except OSError as e:
Jason Chang32b59562023-07-14 16:45:35 -07001764 raise SmartSyncError(
Gavin Makea2e3302023-03-11 06:46:20 +00001765 "error: cannot write manifest to %s:\n%s"
1766 % (smart_sync_manifest_path, e),
Jason Chang32b59562023-07-14 16:45:35 -07001767 aggregate_errors=[e],
Gavin Makea2e3302023-03-11 06:46:20 +00001768 )
Gavin Makea2e3302023-03-11 06:46:20 +00001769 self._ReloadManifest(manifest_name, manifest)
1770 else:
Jason Chang32b59562023-07-14 16:45:35 -07001771 raise SmartSyncError(
1772 "error: manifest server RPC call failed: %s" % manifest_str
Gavin Makea2e3302023-03-11 06:46:20 +00001773 )
Jason R. Coombsae824fb2023-10-20 23:32:40 +05451774 except (OSError, xmlrpc.client.Fault) as e:
Jason Chang32b59562023-07-14 16:45:35 -07001775 raise SmartSyncError(
Gavin Makea2e3302023-03-11 06:46:20 +00001776 "error: cannot connect to manifest server %s:\n%s"
1777 % (manifest.manifest_server, e),
Jason Chang32b59562023-07-14 16:45:35 -07001778 aggregate_errors=[e],
Gavin Makea2e3302023-03-11 06:46:20 +00001779 )
Gavin Makea2e3302023-03-11 06:46:20 +00001780 except xmlrpc.client.ProtocolError as e:
Jason Chang32b59562023-07-14 16:45:35 -07001781 raise SmartSyncError(
Gavin Makea2e3302023-03-11 06:46:20 +00001782 "error: cannot connect to manifest server %s:\n%d %s"
1783 % (manifest.manifest_server, e.errcode, e.errmsg),
Jason Chang32b59562023-07-14 16:45:35 -07001784 aggregate_errors=[e],
Gavin Makea2e3302023-03-11 06:46:20 +00001785 )
Gavin Makea2e3302023-03-11 06:46:20 +00001786
1787 return manifest_name
1788
Jason Changdaf2ad32023-08-31 17:06:36 -07001789 def _UpdateAllManifestProjects(self, opt, mp, manifest_name, errors):
Gavin Makea2e3302023-03-11 06:46:20 +00001790 """Fetch & update the local manifest project.
1791
1792 After syncing the manifest project, if the manifest has any sub
1793 manifests, those are recursively processed.
1794
1795 Args:
1796 opt: Program options returned from optparse. See _Options().
1797 mp: the manifestProject to query.
1798 manifest_name: Manifest file to be reloaded.
1799 """
1800 if not mp.standalone_manifest_url:
Jason Changdaf2ad32023-08-31 17:06:36 -07001801 self._UpdateManifestProject(opt, mp, manifest_name, errors)
Gavin Makea2e3302023-03-11 06:46:20 +00001802
1803 if mp.manifest.submanifests:
1804 for submanifest in mp.manifest.submanifests.values():
1805 child = submanifest.repo_client.manifest
1806 child.manifestProject.SyncWithPossibleInit(
1807 submanifest,
1808 current_branch_only=self._GetCurrentBranchOnly(opt, child),
1809 verbose=opt.verbose,
1810 tags=opt.tags,
1811 git_event_log=self.git_event_log,
1812 )
1813 self._UpdateAllManifestProjects(
Jason Changdaf2ad32023-08-31 17:06:36 -07001814 opt, child.manifestProject, None, errors
Gavin Makea2e3302023-03-11 06:46:20 +00001815 )
1816
Jason Changdaf2ad32023-08-31 17:06:36 -07001817 def _UpdateManifestProject(self, opt, mp, manifest_name, errors):
Gavin Makea2e3302023-03-11 06:46:20 +00001818 """Fetch & update the local manifest project.
1819
1820 Args:
1821 opt: Program options returned from optparse. See _Options().
1822 mp: the manifestProject to query.
1823 manifest_name: Manifest file to be reloaded.
1824 """
1825 if not opt.local_only:
1826 start = time.time()
Jason Changdaf2ad32023-08-31 17:06:36 -07001827 buf = TeeStringIO(sys.stdout)
1828 try:
1829 result = mp.Sync_NetworkHalf(
Tomasz Wasilczyk208f3442024-01-05 12:23:10 -08001830 quiet=not opt.verbose,
Jason Changdaf2ad32023-08-31 17:06:36 -07001831 output_redir=buf,
1832 verbose=opt.verbose,
1833 current_branch_only=self._GetCurrentBranchOnly(
1834 opt, mp.manifest
1835 ),
1836 force_sync=opt.force_sync,
1837 tags=opt.tags,
1838 optimized_fetch=opt.optimized_fetch,
1839 retry_fetches=opt.retry_fetches,
1840 submodules=mp.manifest.HasSubmodules,
1841 clone_filter=mp.manifest.CloneFilter,
1842 partial_clone_exclude=mp.manifest.PartialCloneExclude,
1843 clone_filter_for_depth=mp.manifest.CloneFilterForDepth,
1844 )
1845 if result.error:
1846 errors.append(result.error)
1847 except KeyboardInterrupt:
1848 errors.append(
1849 ManifestInterruptError(buf.getvalue(), project=mp.name)
1850 )
1851 raise
1852
Gavin Makea2e3302023-03-11 06:46:20 +00001853 finish = time.time()
1854 self.event_log.AddSync(
Jason Chang32b59562023-07-14 16:45:35 -07001855 mp, event_log.TASK_SYNC_NETWORK, start, finish, result.success
Gavin Makea2e3302023-03-11 06:46:20 +00001856 )
1857
1858 if mp.HasChanges:
Jason Chang32b59562023-07-14 16:45:35 -07001859 errors = []
Gavin Makea2e3302023-03-11 06:46:20 +00001860 syncbuf = SyncBuffer(mp.config)
1861 start = time.time()
Jason Chang32b59562023-07-14 16:45:35 -07001862 mp.Sync_LocalHalf(
Tomasz Wasilczyk4c809212023-12-08 13:42:17 -08001863 syncbuf,
1864 submodules=mp.manifest.HasSubmodules,
Tomasz Wasilczyk4c809212023-12-08 13:42:17 -08001865 verbose=opt.verbose,
Jason Chang32b59562023-07-14 16:45:35 -07001866 )
Gavin Makea2e3302023-03-11 06:46:20 +00001867 clean = syncbuf.Finish()
Gavin Maka64149a2025-08-13 22:48:36 -07001868 errors.extend(syncbuf.errors)
Gavin Makea2e3302023-03-11 06:46:20 +00001869 self.event_log.AddSync(
1870 mp, event_log.TASK_SYNC_LOCAL, start, time.time(), clean
1871 )
1872 if not clean:
Yiwei Zhangd379e772023-12-20 20:39:59 +00001873 raise UpdateManifestError(aggregate_errors=errors)
Gavin Makea2e3302023-03-11 06:46:20 +00001874 self._ReloadManifest(manifest_name, mp.manifest)
1875
1876 def ValidateOptions(self, opt, args):
1877 if opt.force_broken:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001878 logger.warning(
Gavin Makea2e3302023-03-11 06:46:20 +00001879 "warning: -f/--force-broken is now the default behavior, and "
Aravind Vasudevane914ec22023-08-31 20:57:31 +00001880 "the options are deprecated"
Gavin Makea2e3302023-03-11 06:46:20 +00001881 )
1882 if opt.network_only and opt.detach_head:
1883 self.OptionParser.error("cannot combine -n and -d")
1884 if opt.network_only and opt.local_only:
1885 self.OptionParser.error("cannot combine -n and -l")
1886 if opt.manifest_name and opt.smart_sync:
1887 self.OptionParser.error("cannot combine -m and -s")
1888 if opt.manifest_name and opt.smart_tag:
1889 self.OptionParser.error("cannot combine -m and -t")
1890 if opt.manifest_server_username or opt.manifest_server_password:
1891 if not (opt.smart_sync or opt.smart_tag):
1892 self.OptionParser.error(
1893 "-u and -p may only be combined with -s or -t"
1894 )
1895 if None in [
1896 opt.manifest_server_username,
1897 opt.manifest_server_password,
1898 ]:
1899 self.OptionParser.error("both -u and -p must be given")
1900
1901 if opt.prune is None:
1902 opt.prune = True
1903
Gavin Makea2e3302023-03-11 06:46:20 +00001904 def _ValidateOptionsWithManifest(self, opt, mp):
1905 """Like ValidateOptions, but after we've updated the manifest.
1906
1907 Needed to handle sync-xxx option defaults in the manifest.
1908
1909 Args:
1910 opt: The options to process.
1911 mp: The manifest project to pull defaults from.
1912 """
1913 if not opt.jobs:
1914 # If the user hasn't made a choice, use the manifest value.
1915 opt.jobs = mp.manifest.default.sync_j
1916 if opt.jobs:
1917 # If --jobs has a non-default value, propagate it as the default for
1918 # --jobs-xxx flags too.
1919 if not opt.jobs_network:
1920 opt.jobs_network = opt.jobs
1921 if not opt.jobs_checkout:
1922 opt.jobs_checkout = opt.jobs
1923 else:
1924 # Neither user nor manifest have made a choice, so setup defaults.
1925 if not opt.jobs_network:
1926 opt.jobs_network = 1
1927 if not opt.jobs_checkout:
1928 opt.jobs_checkout = DEFAULT_LOCAL_JOBS
1929 opt.jobs = os.cpu_count()
1930
1931 # Try to stay under user rlimit settings.
1932 #
1933 # Since each worker requires at 3 file descriptors to run `git fetch`,
1934 # use that to scale down the number of jobs. Unfortunately there isn't
1935 # an easy way to determine this reliably as systems change, but it was
1936 # last measured by hand in 2011.
1937 soft_limit, _ = _rlimit_nofile()
1938 jobs_soft_limit = max(1, (soft_limit - 5) // 3)
1939 opt.jobs = min(opt.jobs, jobs_soft_limit)
1940 opt.jobs_network = min(opt.jobs_network, jobs_soft_limit)
1941 opt.jobs_checkout = min(opt.jobs_checkout, jobs_soft_limit)
1942
Gavin Makdaebd6c2025-04-09 13:59:27 -07001943 # Warn once if effective job counts seem excessively high.
1944 # Prioritize --jobs, then --jobs-network, then --jobs-checkout.
1945 job_options_to_check = (
1946 ("--jobs", opt.jobs),
1947 ("--jobs-network", opt.jobs_network),
1948 ("--jobs-checkout", opt.jobs_checkout),
1949 )
1950 for name, value in job_options_to_check:
1951 if value > self._JOBS_WARN_THRESHOLD:
1952 logger.warning(
1953 "High job count (%d > %d) specified for %s; this may "
1954 "lead to excessive resource usage or diminishing returns.",
1955 value,
1956 self._JOBS_WARN_THRESHOLD,
1957 name,
1958 )
1959 break
1960
Gavin Makea2e3302023-03-11 06:46:20 +00001961 def Execute(self, opt, args):
Jason Chang32b59562023-07-14 16:45:35 -07001962 errors = []
1963 try:
1964 self._ExecuteHelper(opt, args, errors)
Jason Chang26fa3182024-02-05 15:15:20 -08001965 except (RepoExitError, RepoChangedException):
Jason Chang32b59562023-07-14 16:45:35 -07001966 raise
1967 except (KeyboardInterrupt, Exception) as e:
1968 raise RepoUnhandledExceptionError(e, aggregate_errors=errors)
1969
Kenny Cheng82d500e2025-06-02 21:55:04 +08001970 # Run post-sync hook only after successful sync
1971 self._RunPostSyncHook(opt)
1972
1973 def _RunPostSyncHook(self, opt):
1974 """Run post-sync hook if configured in manifest <repo-hooks>."""
1975 hook = RepoHook.FromSubcmd(
1976 hook_type="post-sync",
1977 manifest=self.manifest,
1978 opt=opt,
1979 abort_if_user_denies=False,
1980 )
1981 success = hook.Run(repo_topdir=self.client.topdir)
1982 if not success:
1983 print("Warning: post-sync hook reported failure.")
1984
Jason Chang32b59562023-07-14 16:45:35 -07001985 def _ExecuteHelper(self, opt, args, errors):
Gavin Makea2e3302023-03-11 06:46:20 +00001986 manifest = self.outer_manifest
1987 if not opt.outer_manifest:
1988 manifest = self.manifest
1989
1990 if opt.manifest_name:
1991 manifest.Override(opt.manifest_name)
1992
1993 manifest_name = opt.manifest_name
1994 smart_sync_manifest_path = os.path.join(
1995 manifest.manifestProject.worktree, "smart_sync_override.xml"
1996 )
1997
1998 if opt.clone_bundle is None:
1999 opt.clone_bundle = manifest.CloneBundle
2000
2001 if opt.smart_sync or opt.smart_tag:
2002 manifest_name = self._SmartSyncSetup(
2003 opt, smart_sync_manifest_path, manifest
2004 )
2005 else:
2006 if os.path.isfile(smart_sync_manifest_path):
2007 try:
2008 platform_utils.remove(smart_sync_manifest_path)
2009 except OSError as e:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00002010 logger.error(
Gavin Makea2e3302023-03-11 06:46:20 +00002011 "error: failed to remove existing smart sync override "
Aravind Vasudevane914ec22023-08-31 20:57:31 +00002012 "manifest: %s",
2013 e,
Gavin Makea2e3302023-03-11 06:46:20 +00002014 )
2015
Gavin Makea2e3302023-03-11 06:46:20 +00002016 rp = manifest.repoProject
2017 rp.PreSync()
2018 cb = rp.CurrentBranch
2019 if cb:
2020 base = rp.GetBranch(cb).merge
2021 if not base or not base.startswith("refs/heads/"):
Aravind Vasudevane914ec22023-08-31 20:57:31 +00002022 logger.warning(
Gavin Makea2e3302023-03-11 06:46:20 +00002023 "warning: repo is not tracking a remote branch, so it will "
2024 "not receive updates; run `repo init --repo-rev=stable` to "
Aravind Vasudevane914ec22023-08-31 20:57:31 +00002025 "fix."
Gavin Makea2e3302023-03-11 06:46:20 +00002026 )
2027
2028 for m in self.ManifestList(opt):
2029 if not m.manifestProject.standalone_manifest_url:
2030 m.manifestProject.PreSync()
2031
2032 if opt.repo_upgraded:
2033 _PostRepoUpgrade(manifest, quiet=opt.quiet)
2034
2035 mp = manifest.manifestProject
Jason Chang17833322023-05-23 13:06:55 -07002036
2037 if _REPO_ALLOW_SHALLOW is not None:
2038 if _REPO_ALLOW_SHALLOW == "1":
2039 mp.ConfigureCloneFilterForDepth(None)
2040 elif (
2041 _REPO_ALLOW_SHALLOW == "0" and mp.clone_filter_for_depth is None
2042 ):
2043 mp.ConfigureCloneFilterForDepth("blob:none")
2044
Gavin Makea2e3302023-03-11 06:46:20 +00002045 if opt.mp_update:
Jason Changdaf2ad32023-08-31 17:06:36 -07002046 self._UpdateAllManifestProjects(opt, mp, manifest_name, errors)
Gavin Makea2e3302023-03-11 06:46:20 +00002047 else:
Aravind Vasudevan83c66ec2023-09-28 19:06:59 +00002048 print("Skipping update of local manifest project.")
Gavin Makea2e3302023-03-11 06:46:20 +00002049
2050 # Now that the manifests are up-to-date, setup options whose defaults
2051 # might be in the manifest.
2052 self._ValidateOptionsWithManifest(opt, mp)
2053
Gavin Makf7a3f992025-06-23 09:04:26 -07002054 # Update the repo project and check for new versions of repo.
2055 self._UpdateRepoProject(opt, manifest, errors)
2056
Gavin Makea2e3302023-03-11 06:46:20 +00002057 superproject_logging_data = {}
2058 self._UpdateProjectsRevisionId(
2059 opt, args, superproject_logging_data, manifest
2060 )
2061
Gavin Makea2e3302023-03-11 06:46:20 +00002062 all_projects = self.GetProjects(
2063 args,
2064 missing_ok=True,
2065 submodules_ok=opt.fetch_submodules,
2066 manifest=manifest,
2067 all_manifests=not opt.this_manifest_only,
2068 )
2069
Gavin Makb4b323a2025-06-17 10:54:41 -07002070 # Log the repo projects by existing and new.
2071 existing = [x for x in all_projects if x.Exists]
2072 mp.config.SetString("repo.existingprojectcount", str(len(existing)))
2073 mp.config.SetString(
2074 "repo.newprojectcount", str(len(all_projects) - len(existing))
2075 )
2076
2077 self._fetch_times = _FetchTimes(manifest)
2078 self._local_sync_state = LocalSyncState(manifest)
2079
Gavin Mak85352822025-06-11 00:13:52 +00002080 if opt.interleaved:
2081 sync_method = self._SyncInterleaved
2082 else:
2083 sync_method = self._SyncPhased
2084
2085 sync_method(
2086 opt,
2087 args,
2088 errors,
2089 manifest,
2090 mp,
2091 all_projects,
2092 superproject_logging_data,
2093 )
2094
2095 # Log the previous sync analysis state from the config.
2096 self.git_event_log.LogDataConfigEvents(
2097 mp.config.GetSyncAnalysisStateData(), "previous_sync_state"
2098 )
2099
2100 # Update and log with the new sync analysis state.
2101 mp.config.UpdateSyncAnalysisState(opt, superproject_logging_data)
2102 self.git_event_log.LogDataConfigEvents(
2103 mp.config.GetSyncAnalysisStateData(), "current_sync_state"
2104 )
2105
2106 self._local_sync_state.PruneRemovedProjects()
2107 if self._local_sync_state.IsPartiallySynced():
2108 logger.warning(
2109 "warning: Partial syncs are not supported. For the best "
2110 "experience, sync the entire tree."
2111 )
2112
Gavin Mak871e4c72025-12-15 20:08:59 +00002113 if existing:
2114 self._CheckForBloatedProjects(all_projects, opt)
Gavin Makb5991d72025-12-09 22:29:43 +00002115
Gavin Mak85352822025-06-11 00:13:52 +00002116 if not opt.quiet:
2117 print("repo sync has finished successfully.")
2118
Gavin Makb4b323a2025-06-17 10:54:41 -07002119 def _CreateSyncProgressThread(
2120 self, pm: Progress, stop_event: _threading.Event
2121 ) -> _threading.Thread:
2122 """Creates and returns a daemon thread to update a Progress object.
2123
2124 The returned thread is not yet started. The thread will periodically
2125 update the progress bar with information from _GetSyncProgressMessage
2126 until the stop_event is set.
2127
2128 Args:
2129 pm: The Progress object to update.
2130 stop_event: The threading.Event to signal the monitor to stop.
2131
2132 Returns:
2133 The configured _threading.Thread object.
2134 """
2135
2136 def _monitor_loop():
2137 """The target function for the monitor thread."""
2138 while True:
2139 # Update the progress bar with the current status message.
2140 pm.update(inc=0, msg=self._GetSyncProgressMessage())
2141 # Wait for 1 second or until the stop_event is set.
2142 if stop_event.wait(timeout=1):
2143 return
2144
2145 return _threading.Thread(target=_monitor_loop, daemon=True)
2146
Gavin Makdf3c4012025-06-17 19:40:06 -07002147 def _UpdateManifestLists(
2148 self,
2149 opt: optparse.Values,
2150 err_event: multiprocessing.Event,
2151 errors: List[Exception],
2152 ) -> Tuple[bool, bool]:
2153 """Updates project lists and copy/link files for all manifests.
2154
2155 Args:
2156 opt: Program options from optparse.
2157 err_event: An event to set if any error occurs.
2158 errors: A list to append any encountered exceptions to.
2159
2160 Returns:
2161 A tuple (err_update_projects, err_update_linkfiles) indicating
2162 an error for each task.
2163 """
2164 err_update_projects = False
2165 err_update_linkfiles = False
2166 for m in self.ManifestList(opt):
2167 if m.IsMirror or m.IsArchive:
2168 continue
2169
2170 try:
2171 self.UpdateProjectList(opt, m)
2172 except Exception as e:
2173 err_event.set()
2174 err_update_projects = True
2175 errors.append(e)
2176 if isinstance(e, DeleteWorktreeError):
2177 errors.extend(e.aggregate_errors)
2178 if opt.fail_fast:
2179 logger.error("error: Local checkouts *not* updated.")
2180 raise SyncFailFastError(aggregate_errors=errors)
2181
2182 try:
2183 self.UpdateCopyLinkfileList(m)
2184 except Exception as e:
2185 err_event.set()
2186 err_update_linkfiles = True
2187 errors.append(e)
2188 if opt.fail_fast:
2189 logger.error(
2190 "error: Local update copyfile or linkfile failed."
2191 )
2192 raise SyncFailFastError(aggregate_errors=errors)
2193 return err_update_projects, err_update_linkfiles
2194
Gavin Mak99b5a172025-06-17 20:15:50 -07002195 def _ReportErrors(
2196 self,
2197 errors,
2198 err_network_sync=False,
2199 failing_network_repos=None,
2200 err_checkout=False,
2201 failing_checkout_repos=None,
2202 err_update_projects=False,
2203 err_update_linkfiles=False,
2204 ):
2205 """Logs detailed error messages and raises a SyncError."""
2206
2207 def print_and_log(err_msg):
2208 self.git_event_log.ErrorEvent(err_msg)
2209 logger.error("%s", err_msg)
2210
2211 print_and_log("error: Unable to fully sync the tree")
2212 if err_network_sync:
2213 print_and_log("error: Downloading network changes failed.")
2214 if failing_network_repos:
2215 logger.error(
2216 "Failing repos (network):\n%s",
2217 "\n".join(sorted(failing_network_repos)),
2218 )
2219 if err_update_projects:
2220 print_and_log("error: Updating local project lists failed.")
2221 if err_update_linkfiles:
2222 print_and_log("error: Updating copyfiles or linkfiles failed.")
2223 if err_checkout:
2224 print_and_log("error: Checking out local projects failed.")
2225 if failing_checkout_repos:
2226 logger.error(
2227 "Failing repos (checkout):\n%s",
2228 "\n".join(sorted(failing_checkout_repos)),
2229 )
2230 logger.error(
2231 'Try re-running with "-j1 --fail-fast" to exit at the first error.'
2232 )
2233 raise SyncError(aggregate_errors=errors)
2234
Gavin Mak85352822025-06-11 00:13:52 +00002235 def _SyncPhased(
2236 self,
2237 opt,
2238 args,
2239 errors,
2240 manifest,
2241 mp,
2242 all_projects,
2243 superproject_logging_data,
2244 ):
2245 """Sync projects by separating network and local operations.
2246
2247 This method performs sync in two distinct, sequential phases:
2248 1. Network Phase: Fetches updates for all projects from their remotes.
2249 2. Local Phase: Checks out the updated revisions into the local
2250 worktrees for all projects.
2251
2252 This approach ensures that the local work-tree is not modified until
2253 all network operations are complete, providing a transactional-like
2254 safety net for the checkout state.
2255 """
2256 err_event = multiprocessing.Event()
Gavin Makea2e3302023-03-11 06:46:20 +00002257 err_network_sync = False
2258 err_update_projects = False
2259 err_update_linkfiles = False
2260
Josip Sokcevic5ae82922025-01-31 12:00:52 -08002261 if not opt.local_only:
Gavin Makea2e3302023-03-11 06:46:20 +00002262 with multiprocessing.Manager() as manager:
2263 with ssh.ProxyManager(manager) as ssh_proxy:
2264 # Initialize the socket dir once in the parent.
2265 ssh_proxy.sock()
2266 result = self._FetchMain(
Jason Changdaf2ad32023-08-31 17:06:36 -07002267 opt,
2268 args,
2269 all_projects,
2270 err_event,
2271 ssh_proxy,
2272 manifest,
2273 errors,
Gavin Makea2e3302023-03-11 06:46:20 +00002274 )
2275 all_projects = result.all_projects
2276
2277 if opt.network_only:
2278 return
2279
2280 # If we saw an error, exit with code 1 so that other scripts can
2281 # check.
2282 if err_event.is_set():
2283 err_network_sync = True
2284 if opt.fail_fast:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00002285 logger.error(
2286 "error: Exited sync due to fetch errors.\n"
Gavin Makea2e3302023-03-11 06:46:20 +00002287 "Local checkouts *not* updated. Resolve network issues "
2288 "& retry.\n"
Aravind Vasudevane914ec22023-08-31 20:57:31 +00002289 "`repo sync -l` will update some local checkouts."
Gavin Makea2e3302023-03-11 06:46:20 +00002290 )
Jason Chang32b59562023-07-14 16:45:35 -07002291 raise SyncFailFastError(aggregate_errors=errors)
Gavin Makea2e3302023-03-11 06:46:20 +00002292
Gavin Makdf3c4012025-06-17 19:40:06 -07002293 err_update_projects, err_update_linkfiles = self._UpdateManifestLists(
2294 opt,
2295 err_event,
2296 errors,
2297 )
Gavin Makea2e3302023-03-11 06:46:20 +00002298
2299 err_results = []
2300 # NB: We don't exit here because this is the last step.
Jason Chang32b59562023-07-14 16:45:35 -07002301 err_checkout = not self._Checkout(
2302 all_projects, opt, err_results, errors
2303 )
Gavin Makea2e3302023-03-11 06:46:20 +00002304 if err_checkout:
2305 err_event.set()
2306
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002307 self._PrintManifestNotices(opt)
Gavin Makea2e3302023-03-11 06:46:20 +00002308
2309 # If we saw an error, exit with code 1 so that other scripts can check.
2310 if err_event.is_set():
Gavin Mak99b5a172025-06-17 20:15:50 -07002311 self._ReportErrors(
2312 errors,
2313 err_network_sync=err_network_sync,
2314 err_checkout=err_checkout,
2315 failing_checkout_repos=err_results,
2316 err_update_projects=err_update_projects,
2317 err_update_linkfiles=err_update_linkfiles,
Gavin Makea2e3302023-03-11 06:46:20 +00002318 )
Gavin Makea2e3302023-03-11 06:46:20 +00002319
Gavin Makb4b323a2025-06-17 10:54:41 -07002320 @classmethod
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002321 def _SyncOneProject(cls, opt, project_index, project) -> _SyncResult:
2322 """Syncs a single project for interleaved sync."""
2323 fetch_success = False
2324 remote_fetched = False
Gavin Makd534a552025-08-13 23:42:00 -07002325 fetch_errors = []
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002326 fetch_start = None
2327 fetch_finish = None
2328 network_output = ""
2329
2330 if opt.local_only:
2331 fetch_success = True
2332 else:
2333 fetch_start = time.time()
2334 network_output_capture = io.StringIO()
2335 try:
2336 ssh_proxy = cls.get_parallel_context().get("ssh_proxy")
2337 sync_result = project.Sync_NetworkHalf(
2338 quiet=opt.quiet,
2339 verbose=opt.verbose,
2340 output_redir=network_output_capture,
2341 current_branch_only=cls._GetCurrentBranchOnly(
2342 opt, project.manifest
2343 ),
2344 force_sync=opt.force_sync,
2345 clone_bundle=opt.clone_bundle,
2346 tags=opt.tags,
2347 archive=project.manifest.IsArchive,
2348 optimized_fetch=opt.optimized_fetch,
2349 retry_fetches=opt.retry_fetches,
2350 prune=opt.prune,
2351 ssh_proxy=ssh_proxy,
2352 clone_filter=project.manifest.CloneFilter,
2353 partial_clone_exclude=project.manifest.PartialCloneExclude,
2354 clone_filter_for_depth=project.manifest.CloneFilterForDepth,
2355 )
2356 fetch_success = sync_result.success
2357 remote_fetched = sync_result.remote_fetched
Gavin Makd534a552025-08-13 23:42:00 -07002358 if sync_result.error:
2359 fetch_errors.append(sync_result.error)
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002360 except KeyboardInterrupt:
2361 logger.error(
2362 "Keyboard interrupt while processing %s", project.name
2363 )
2364 except GitError as e:
Gavin Makd534a552025-08-13 23:42:00 -07002365 fetch_errors.append(e)
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002366 logger.error("error.GitError: Cannot fetch %s", e)
2367 except Exception as e:
Gavin Makd534a552025-08-13 23:42:00 -07002368 fetch_errors.append(e)
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002369 logger.error(
2370 "error: Cannot fetch %s (%s: %s)",
2371 project.name,
2372 type(e).__name__,
2373 e,
2374 )
2375 finally:
2376 fetch_finish = time.time()
2377 network_output = network_output_capture.getvalue()
2378
2379 checkout_success = False
Gavin Makd534a552025-08-13 23:42:00 -07002380 checkout_errors = []
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002381 checkout_start = None
2382 checkout_finish = None
2383 checkout_stderr = ""
2384
Gavin Mak720bd1e2025-07-23 15:23:10 -07002385 if fetch_success:
2386 # We skip checkout if it's network-only or if the project has no
2387 # working tree (e.g., a mirror).
2388 if opt.network_only or not project.worktree:
2389 checkout_success = True
2390 else:
2391 # This is a normal project that needs a checkout.
2392 checkout_start = time.time()
2393 stderr_capture = io.StringIO()
2394 try:
2395 with contextlib.redirect_stderr(stderr_capture):
2396 syncbuf = SyncBuffer(
2397 project.manifest.manifestProject.config,
2398 detach_head=opt.detach_head,
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002399 )
Gavin Mak720bd1e2025-07-23 15:23:10 -07002400 project.Sync_LocalHalf(
2401 syncbuf,
2402 force_sync=opt.force_sync,
2403 force_checkout=opt.force_checkout,
2404 force_rebase=opt.rebase,
Gavin Mak720bd1e2025-07-23 15:23:10 -07002405 verbose=opt.verbose,
2406 )
2407 checkout_success = syncbuf.Finish()
Gavin Maka64149a2025-08-13 22:48:36 -07002408 if syncbuf.errors:
Gavin Makd534a552025-08-13 23:42:00 -07002409 checkout_errors.extend(syncbuf.errors)
Gavin Mak720bd1e2025-07-23 15:23:10 -07002410 except KeyboardInterrupt:
2411 logger.error(
2412 "Keyboard interrupt while processing %s", project.name
2413 )
2414 except GitError as e:
Gavin Makd534a552025-08-13 23:42:00 -07002415 checkout_errors.append(e)
Gavin Mak720bd1e2025-07-23 15:23:10 -07002416 logger.error(
2417 "error.GitError: Cannot checkout %s: %s",
2418 project.name,
2419 e,
2420 )
2421 except Exception as e:
Gavin Makd534a552025-08-13 23:42:00 -07002422 checkout_errors.append(e)
Gavin Mak720bd1e2025-07-23 15:23:10 -07002423 logger.error(
2424 "error: Cannot checkout %s: %s: %s",
2425 project.name,
2426 type(e).__name__,
2427 e,
2428 )
2429 finally:
2430 checkout_finish = time.time()
2431 checkout_stderr = stderr_capture.getvalue()
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002432
2433 # Consolidate all captured output.
2434 captured_parts = []
2435 if network_output:
2436 captured_parts.append(network_output)
2437 if checkout_stderr:
2438 captured_parts.append(checkout_stderr)
2439 stderr_text = "\n".join(captured_parts)
2440
2441 return _SyncResult(
2442 project_index=project_index,
2443 relpath=project.relpath,
2444 fetch_success=fetch_success,
2445 remote_fetched=remote_fetched,
2446 checkout_success=checkout_success,
Gavin Makd534a552025-08-13 23:42:00 -07002447 fetch_errors=fetch_errors,
2448 checkout_errors=checkout_errors,
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002449 stderr_text=stderr_text.strip(),
2450 fetch_start=fetch_start,
2451 fetch_finish=fetch_finish,
2452 checkout_start=checkout_start,
2453 checkout_finish=checkout_finish,
2454 )
2455
2456 @classmethod
Gavin Makb4b323a2025-06-17 10:54:41 -07002457 def _SyncProjectList(cls, opt, project_indices) -> _InterleavedSyncResult:
2458 """Worker for interleaved sync.
2459
2460 This function is responsible for syncing a group of projects that share
2461 a git object directory.
2462
2463 Args:
2464 opt: Program options returned from optparse. See _Options().
2465 project_indices: A list of indices into the projects list stored in
2466 the parallel context.
2467
2468 Returns:
2469 An `_InterleavedSyncResult` containing the results for each project.
2470 """
2471 results = []
2472 context = cls.get_parallel_context()
2473 projects = context["projects"]
2474 sync_dict = context["sync_dict"]
2475
2476 assert project_indices, "_SyncProjectList called with no indices."
2477
2478 # Use the first project as the representative for the progress bar.
2479 first_project = projects[project_indices[0]]
2480 key = f"{first_project.name} @ {first_project.relpath}"
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002481 sync_dict[key] = time.time()
Gavin Makb4b323a2025-06-17 10:54:41 -07002482
2483 try:
2484 for idx in project_indices:
2485 project = projects[idx]
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002486 results.append(cls._SyncOneProject(opt, idx, project))
Gavin Makb4b323a2025-06-17 10:54:41 -07002487 finally:
2488 del sync_dict[key]
2489
2490 return _InterleavedSyncResult(results=results)
2491
2492 def _ProcessSyncInterleavedResults(
2493 self,
Gavin Maka6e1a592025-08-13 01:51:59 +00002494 finished_relpaths: Set[str],
Gavin Makb4b323a2025-06-17 10:54:41 -07002495 err_event: _threading.Event,
2496 errors: List[Exception],
2497 opt: optparse.Values,
2498 pool: Optional[multiprocessing.Pool],
2499 pm: Progress,
2500 results_sets: List[_InterleavedSyncResult],
2501 ):
2502 """Callback to process results from interleaved sync workers."""
2503 ret = True
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002504 projects = self.get_parallel_context()["projects"]
Gavin Makb4b323a2025-06-17 10:54:41 -07002505 for result_group in results_sets:
2506 for result in result_group.results:
2507 pm.update()
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002508 project = projects[result.project_index]
2509
Gavin Mak380bf952025-08-13 01:10:37 +00002510 success = result.fetch_success and result.checkout_success
2511 if result.stderr_text and (opt.verbose or not success):
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002512 pm.display_message(result.stderr_text)
2513
2514 if result.fetch_start:
2515 self._fetch_times.Set(
2516 project,
2517 result.fetch_finish - result.fetch_start,
2518 )
2519 self._local_sync_state.SetFetchTime(project)
2520 self.event_log.AddSync(
2521 project,
2522 event_log.TASK_SYNC_NETWORK,
2523 result.fetch_start,
2524 result.fetch_finish,
2525 result.fetch_success,
2526 )
2527 if result.checkout_start:
2528 if result.checkout_success:
2529 self._local_sync_state.SetCheckoutTime(project)
2530 self.event_log.AddSync(
2531 project,
2532 event_log.TASK_SYNC_LOCAL,
2533 result.checkout_start,
2534 result.checkout_finish,
2535 result.checkout_success,
2536 )
2537
Gavin Maka6e1a592025-08-13 01:51:59 +00002538 finished_relpaths.add(result.relpath)
2539
2540 if not success:
Gavin Makb4b323a2025-06-17 10:54:41 -07002541 ret = False
2542 err_event.set()
Gavin Makd534a552025-08-13 23:42:00 -07002543 if result.fetch_errors:
2544 errors.extend(result.fetch_errors)
Gavin Mak99b5a172025-06-17 20:15:50 -07002545 self._interleaved_err_network = True
2546 self._interleaved_err_network_results.append(
2547 result.relpath
2548 )
Gavin Makd534a552025-08-13 23:42:00 -07002549 if result.checkout_errors:
2550 errors.extend(result.checkout_errors)
Gavin Mak99b5a172025-06-17 20:15:50 -07002551 self._interleaved_err_checkout = True
2552 self._interleaved_err_checkout_results.append(
2553 result.relpath
2554 )
Gavin Makb4b323a2025-06-17 10:54:41 -07002555
2556 if not ret and opt.fail_fast:
2557 if pool:
2558 pool.close()
2559 break
2560 return ret
2561
Gavin Mak85352822025-06-11 00:13:52 +00002562 def _SyncInterleaved(
2563 self,
2564 opt,
2565 args,
2566 errors,
2567 manifest,
2568 mp,
2569 all_projects,
2570 superproject_logging_data,
2571 ):
2572 """Sync projects by performing network and local operations in parallel.
Gavin Makea2e3302023-03-11 06:46:20 +00002573
Gavin Mak85352822025-06-11 00:13:52 +00002574 This method processes each project (or groups of projects that share git
2575 objects) independently. For each project, it performs the fetch and
2576 checkout operations back-to-back. These independent tasks are run in
2577 parallel.
Gavin Makea2e3302023-03-11 06:46:20 +00002578
Gavin Mak85352822025-06-11 00:13:52 +00002579 It respects two constraints for correctness:
2580 1. Projects in nested directories (e.g. 'foo' and 'foo/bar') are
2581 processed in hierarchical order.
2582 2. Projects that share git objects are processed serially to prevent
2583 race conditions.
2584 """
Gavin Mak99b5a172025-06-17 20:15:50 -07002585 # Temporary state for tracking errors in interleaved mode.
2586 self._interleaved_err_network = False
2587 self._interleaved_err_network_results = []
2588 self._interleaved_err_checkout = False
2589 self._interleaved_err_checkout_results = []
2590
Gavin Makb4b323a2025-06-17 10:54:41 -07002591 err_event = multiprocessing.Event()
Gavin Maka6e1a592025-08-13 01:51:59 +00002592 finished_relpaths = set()
Gavin Makb4b323a2025-06-17 10:54:41 -07002593 project_list = list(all_projects)
2594 pm = Progress(
2595 "Syncing",
2596 len(project_list),
2597 delay=False,
2598 quiet=opt.quiet,
2599 show_elapsed=True,
2600 elide=True,
2601 )
2602 previously_pending_relpaths = set()
2603
2604 sync_event = _threading.Event()
2605 sync_progress_thread = self._CreateSyncProgressThread(pm, sync_event)
2606
Gavin Mak1afe96a2025-10-20 11:13:09 -07002607 try:
2608 with multiprocessing.Manager() as manager, ssh.ProxyManager(
2609 manager
2610 ) as ssh_proxy:
2611 ssh_proxy.sock()
2612 with self.ParallelContext():
2613 self.get_parallel_context()["ssh_proxy"] = ssh_proxy
2614 # TODO(gavinmak): Use multprocessing.Queue instead of dict.
2615 self.get_parallel_context()[
2616 "sync_dict"
2617 ] = multiprocessing.Manager().dict()
2618 sync_progress_thread.start()
Gavin Makb4b323a2025-06-17 10:54:41 -07002619
Gavin Mak1afe96a2025-10-20 11:13:09 -07002620 try:
2621 # Outer loop for dynamic project discovery. This
2622 # continues until no unsynced projects remain.
2623 while True:
2624 projects_to_sync = [
2625 p
2626 for p in project_list
2627 if p.relpath not in finished_relpaths
2628 ]
2629 if not projects_to_sync:
2630 break
Gavin Makb4b323a2025-06-17 10:54:41 -07002631
Gavin Mak1afe96a2025-10-20 11:13:09 -07002632 pending_relpaths = {
2633 p.relpath for p in projects_to_sync
2634 }
2635 if previously_pending_relpaths == pending_relpaths:
2636 stalled_projects_str = "\n".join(
2637 f" - {path}"
2638 for path in sorted(list(pending_relpaths))
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002639 )
Gavin Mak1afe96a2025-10-20 11:13:09 -07002640 logger.error(
2641 "The following projects failed and could "
2642 "not be synced:\n%s",
2643 stalled_projects_str,
2644 )
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002645 err_event.set()
Gavin Mak1afe96a2025-10-20 11:13:09 -07002646 break
2647 previously_pending_relpaths = pending_relpaths
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002648
Gavin Mak1afe96a2025-10-20 11:13:09 -07002649 self.get_parallel_context()[
2650 "projects"
2651 ] = projects_to_sync
2652 project_index_map = {
2653 p: i for i, p in enumerate(projects_to_sync)
2654 }
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002655
Gavin Mak1afe96a2025-10-20 11:13:09 -07002656 # Inner loop to process projects in a hierarchical
2657 # order. This iterates through levels of project
2658 # dependencies (e.g. 'foo' then 'foo/bar'). All
2659 # projects in one level can be processed in
2660 # parallel, but we must wait for a level to complete
2661 # before starting the next.
2662 for level_projects in _SafeCheckoutOrder(
2663 projects_to_sync
2664 ):
2665 if not level_projects:
2666 continue
2667
2668 objdir_project_map = collections.defaultdict(
2669 list
2670 )
2671 for p in level_projects:
2672 objdir_project_map[p.objdir].append(
2673 project_index_map[p]
2674 )
2675
2676 work_items = list(objdir_project_map.values())
2677 if not work_items:
2678 continue
2679
2680 jobs = max(1, min(opt.jobs, len(work_items)))
2681 callback = functools.partial(
2682 self._ProcessSyncInterleavedResults,
2683 finished_relpaths,
2684 err_event,
2685 errors,
2686 opt,
2687 )
2688 if not self.ExecuteInParallel(
2689 jobs,
2690 functools.partial(
2691 self._SyncProjectList, opt
2692 ),
2693 work_items,
2694 callback=callback,
2695 output=pm,
2696 chunksize=1,
2697 initializer=self.InitWorker,
2698 ):
2699 err_event.set()
2700
2701 if err_event.is_set() and opt.fail_fast:
2702 raise SyncFailFastError(
2703 aggregate_errors=errors
2704 )
2705
2706 self._ReloadManifest(None, manifest)
2707 project_list = self.GetProjects(
2708 args,
2709 missing_ok=True,
2710 submodules_ok=opt.fetch_submodules,
2711 manifest=manifest,
2712 all_manifests=not opt.this_manifest_only,
2713 )
2714 pm.update_total(len(project_list))
2715 finally:
2716 sync_event.set()
2717 sync_progress_thread.join()
2718 finally:
2719 self._fetch_times.Save()
2720 self._local_sync_state.Save()
Gavin Makb4b323a2025-06-17 10:54:41 -07002721
2722 pm.end()
2723
Gavin Mak99b5a172025-06-17 20:15:50 -07002724 err_update_projects, err_update_linkfiles = self._UpdateManifestLists(
2725 opt, err_event, errors
2726 )
Gavin Mak7b6ffed2025-06-13 17:53:38 -07002727 if not self.outer_client.manifest.IsArchive:
2728 self._GCProjects(project_list, opt, err_event)
2729
2730 self._PrintManifestNotices(opt)
Gavin Makb4b323a2025-06-17 10:54:41 -07002731 if err_event.is_set():
Gavin Mak99b5a172025-06-17 20:15:50 -07002732 self._ReportErrors(
2733 errors,
2734 err_network_sync=self._interleaved_err_network,
2735 failing_network_repos=self._interleaved_err_network_results,
2736 err_checkout=self._interleaved_err_checkout,
2737 failing_checkout_repos=self._interleaved_err_checkout_results,
2738 err_update_projects=err_update_projects,
2739 err_update_linkfiles=err_update_linkfiles,
Gavin Makb4b323a2025-06-17 10:54:41 -07002740 )
Mike Frysingere19d9e12020-02-12 11:23:32 -05002741
David Pursehouse819827a2020-02-12 15:20:19 +09002742
Shawn O. Pearce80d2ceb2012-10-26 12:23:05 -07002743def _PostRepoUpgrade(manifest, quiet=False):
Gavin Makea2e3302023-03-11 06:46:20 +00002744 # Link the docs for the internal .repo/ layout for people.
2745 link = os.path.join(manifest.repodir, "internal-fs-layout.md")
2746 if not platform_utils.islink(link):
2747 target = os.path.join("repo", "docs", "internal-fs-layout.md")
2748 try:
2749 platform_utils.symlink(target, link)
2750 except Exception:
2751 pass
Mike Frysingerfdeb20f2021-11-14 03:53:04 -05002752
Gavin Makea2e3302023-03-11 06:46:20 +00002753 wrapper = Wrapper()
2754 if wrapper.NeedSetupGnuPG():
2755 wrapper.SetupGnuPG(quiet)
2756 for project in manifest.projects:
2757 if project.Exists:
2758 project.PostRepoUpgrade()
Shawn O. Pearcee756c412009-04-13 11:51:15 -07002759
David Pursehouse819827a2020-02-12 15:20:19 +09002760
Mike Frysingerc58ec4d2020-02-17 14:36:08 -05002761def _PostRepoFetch(rp, repo_verify=True, verbose=False):
Gavin Makea2e3302023-03-11 06:46:20 +00002762 if rp.HasChanges:
Aravind Vasudevan8bc50002023-10-13 19:22:47 +00002763 logger.warning("info: A new version of repo is available")
Gavin Makea2e3302023-03-11 06:46:20 +00002764 wrapper = Wrapper()
2765 try:
2766 rev = rp.bare_git.describe(rp.GetRevisionId())
2767 except GitError:
2768 rev = None
2769 _, new_rev = wrapper.check_repo_rev(
2770 rp.gitdir, rev, repo_verify=repo_verify
2771 )
2772 # See if we're held back due to missing signed tag.
2773 current_revid = rp.bare_git.rev_parse("HEAD")
2774 new_revid = rp.bare_git.rev_parse("--verify", new_rev)
2775 if current_revid != new_revid:
2776 # We want to switch to the new rev, but also not trash any
2777 # uncommitted changes. This helps with local testing/hacking.
2778 # If a local change has been made, we will throw that away.
2779 # We also have to make sure this will switch to an older commit if
2780 # that's the latest tag in order to support release rollback.
2781 try:
Josip Sokcevicfc901b92025-03-12 20:40:49 +00002782 # Refresh index since reset --keep won't do it.
2783 rp.work_git.update_index("-q", "--refresh")
Gavin Makea2e3302023-03-11 06:46:20 +00002784 rp.work_git.reset("--keep", new_rev)
2785 except GitError as e:
Jason Chang32b59562023-07-14 16:45:35 -07002786 raise RepoUnhandledExceptionError(e)
Aravind Vasudevan83c66ec2023-09-28 19:06:59 +00002787 print("info: Restarting repo with latest version")
Gavin Makea2e3302023-03-11 06:46:20 +00002788 raise RepoChangedException(["--repo-upgraded"])
2789 else:
Aravind Vasudevane914ec22023-08-31 20:57:31 +00002790 logger.warning("warning: Skipped upgrade to unverified version")
Shawn O. Pearcee756c412009-04-13 11:51:15 -07002791 else:
Gavin Makea2e3302023-03-11 06:46:20 +00002792 if verbose:
Aravind Vasudevance0ed792023-10-06 18:36:22 +00002793 print("repo version %s is current" % rp.work_git.describe(HEAD))
Shawn O. Pearcee756c412009-04-13 11:51:15 -07002794
David Pursehouse819827a2020-02-12 15:20:19 +09002795
Mike Frysingerd4aee652023-10-19 05:13:32 -04002796class _FetchTimes:
Gavin Makea2e3302023-03-11 06:46:20 +00002797 _ALPHA = 0.5
Dave Borowitzd9478582012-10-23 16:35:39 -07002798
Gavin Makea2e3302023-03-11 06:46:20 +00002799 def __init__(self, manifest):
2800 self._path = os.path.join(manifest.repodir, ".repo_fetchtimes.json")
Gavin Mak041f9772023-05-10 20:41:12 +00002801 self._saved = None
2802 self._seen = {}
Dave Borowitz67700e92012-10-23 15:00:54 -07002803
Gavin Makea2e3302023-03-11 06:46:20 +00002804 def Get(self, project):
2805 self._Load()
Gavin Mak041f9772023-05-10 20:41:12 +00002806 return self._saved.get(project.name, _ONE_DAY_S)
Dave Borowitz67700e92012-10-23 15:00:54 -07002807
Gavin Makea2e3302023-03-11 06:46:20 +00002808 def Set(self, project, t):
Gavin Makea2e3302023-03-11 06:46:20 +00002809 name = project.name
Gavin Mak041f9772023-05-10 20:41:12 +00002810
2811 # For shared projects, save the longest time.
2812 self._seen[name] = max(self._seen.get(name, 0), t)
Dave Borowitz67700e92012-10-23 15:00:54 -07002813
Gavin Makea2e3302023-03-11 06:46:20 +00002814 def _Load(self):
Gavin Mak041f9772023-05-10 20:41:12 +00002815 if self._saved is None:
Gavin Makea2e3302023-03-11 06:46:20 +00002816 try:
2817 with open(self._path) as f:
Gavin Mak041f9772023-05-10 20:41:12 +00002818 self._saved = json.load(f)
Jason R. Coombsae824fb2023-10-20 23:32:40 +05452819 except (OSError, ValueError):
Gavin Makea2e3302023-03-11 06:46:20 +00002820 platform_utils.remove(self._path, missing_ok=True)
Gavin Mak041f9772023-05-10 20:41:12 +00002821 self._saved = {}
Dave Borowitz67700e92012-10-23 15:00:54 -07002822
Gavin Makea2e3302023-03-11 06:46:20 +00002823 def Save(self):
Gavin Mak1afe96a2025-10-20 11:13:09 -07002824 if not self._seen:
Gavin Makea2e3302023-03-11 06:46:20 +00002825 return
Dave Borowitzd9478582012-10-23 16:35:39 -07002826
Gavin Mak1afe96a2025-10-20 11:13:09 -07002827 self._Load()
2828
Gavin Mak041f9772023-05-10 20:41:12 +00002829 for name, t in self._seen.items():
2830 # Keep a moving average across the previous/current sync runs.
2831 old = self._saved.get(name, t)
Gavin Mak1afe96a2025-10-20 11:13:09 -07002832 self._saved[name] = (self._ALPHA * t) + ((1 - self._ALPHA) * old)
Dave Borowitzd9478582012-10-23 16:35:39 -07002833
Gavin Makea2e3302023-03-11 06:46:20 +00002834 try:
2835 with open(self._path, "w") as f:
Gavin Mak1afe96a2025-10-20 11:13:09 -07002836 json.dump(self._saved, f, indent=2)
Jason R. Coombsae824fb2023-10-20 23:32:40 +05452837 except (OSError, TypeError):
Gavin Makea2e3302023-03-11 06:46:20 +00002838 platform_utils.remove(self._path, missing_ok=True)
2839
Dan Willemsen0745bb22015-08-17 13:41:45 -07002840
Mike Frysingerd4aee652023-10-19 05:13:32 -04002841class LocalSyncState:
Gavin Mak1d2e99d2023-07-22 02:56:44 +00002842 _LAST_FETCH = "last_fetch"
2843 _LAST_CHECKOUT = "last_checkout"
2844
2845 def __init__(self, manifest):
Gavin Makf0aeb222023-08-08 04:43:36 +00002846 self._manifest = manifest
2847 self._path = os.path.join(
2848 self._manifest.repodir, ".repo_localsyncstate.json"
2849 )
Gavin Mak1d2e99d2023-07-22 02:56:44 +00002850 self._time = time.time()
2851 self._state = None
2852 self._Load()
2853
2854 def SetFetchTime(self, project):
2855 self._Set(project, self._LAST_FETCH)
2856
2857 def SetCheckoutTime(self, project):
2858 self._Set(project, self._LAST_CHECKOUT)
2859
2860 def GetFetchTime(self, project):
2861 return self._Get(project, self._LAST_FETCH)
2862
2863 def GetCheckoutTime(self, project):
2864 return self._Get(project, self._LAST_CHECKOUT)
2865
2866 def _Get(self, project, key):
2867 self._Load()
2868 p = project.relpath
2869 if p not in self._state:
2870 return
2871 return self._state[p].get(key)
2872
2873 def _Set(self, project, key):
2874 p = project.relpath
2875 if p not in self._state:
2876 self._state[p] = {}
2877 self._state[p][key] = self._time
2878
2879 def _Load(self):
2880 if self._state is None:
2881 try:
2882 with open(self._path) as f:
2883 self._state = json.load(f)
Jason R. Coombsae824fb2023-10-20 23:32:40 +05452884 except (OSError, ValueError):
Gavin Mak1d2e99d2023-07-22 02:56:44 +00002885 platform_utils.remove(self._path, missing_ok=True)
2886 self._state = {}
2887
2888 def Save(self):
2889 if not self._state:
2890 return
2891 try:
2892 with open(self._path, "w") as f:
2893 json.dump(self._state, f, indent=2)
Jason R. Coombsae824fb2023-10-20 23:32:40 +05452894 except (OSError, TypeError):
Gavin Mak1d2e99d2023-07-22 02:56:44 +00002895 platform_utils.remove(self._path, missing_ok=True)
2896
Gavin Makf0aeb222023-08-08 04:43:36 +00002897 def PruneRemovedProjects(self):
2898 """Remove entries don't exist on disk and save."""
2899 if not self._state:
2900 return
2901 delete = set()
2902 for path in self._state:
2903 gitdir = os.path.join(self._manifest.topdir, path, ".git")
Matt Schulte0dd0a832023-11-30 11:00:16 -08002904 if not os.path.exists(gitdir) or os.path.islink(gitdir):
Gavin Makf0aeb222023-08-08 04:43:36 +00002905 delete.add(path)
2906 if not delete:
2907 return
2908 for path in delete:
2909 del self._state[path]
2910 self.Save()
2911
2912 def IsPartiallySynced(self):
2913 """Return whether a partial sync state is detected."""
2914 self._Load()
2915 prev_checkout_t = None
Gavin Mak321b7932023-08-22 03:10:01 +00002916 for path, data in self._state.items():
2917 if path == self._manifest.repoProject.relpath:
2918 # The repo project isn't included in most syncs so we should
2919 # ignore it here.
2920 continue
Gavin Makf0aeb222023-08-08 04:43:36 +00002921 checkout_t = data.get(self._LAST_CHECKOUT)
2922 if not checkout_t:
2923 return True
2924 prev_checkout_t = prev_checkout_t or checkout_t
2925 if prev_checkout_t != checkout_t:
2926 return True
2927 return False
2928
Gavin Mak1d2e99d2023-07-22 02:56:44 +00002929
Dan Willemsen0745bb22015-08-17 13:41:45 -07002930# This is a replacement for xmlrpc.client.Transport using urllib2
2931# and supporting persistent-http[s]. It cannot change hosts from
2932# request to request like the normal transport, the real url
2933# is passed during initialization.
2934class PersistentTransport(xmlrpc.client.Transport):
Gavin Makea2e3302023-03-11 06:46:20 +00002935 def __init__(self, orig_host):
Daniel Kutikb99272c2023-10-23 21:20:07 +02002936 super().__init__()
Gavin Makea2e3302023-03-11 06:46:20 +00002937 self.orig_host = orig_host
Dan Willemsen0745bb22015-08-17 13:41:45 -07002938
Gavin Makea2e3302023-03-11 06:46:20 +00002939 def request(self, host, handler, request_body, verbose=False):
2940 with GetUrlCookieFile(self.orig_host, not verbose) as (
2941 cookiefile,
2942 proxy,
2943 ):
2944 # Python doesn't understand cookies with the #HttpOnly_ prefix
2945 # Since we're only using them for HTTP, copy the file temporarily,
2946 # stripping those prefixes away.
2947 if cookiefile:
2948 tmpcookiefile = tempfile.NamedTemporaryFile(mode="w")
2949 tmpcookiefile.write("# HTTP Cookie File")
2950 try:
2951 with open(cookiefile) as f:
2952 for line in f:
2953 if line.startswith("#HttpOnly_"):
2954 line = line[len("#HttpOnly_") :]
2955 tmpcookiefile.write(line)
2956 tmpcookiefile.flush()
Dan Willemsen0745bb22015-08-17 13:41:45 -07002957
Gavin Makea2e3302023-03-11 06:46:20 +00002958 cookiejar = cookielib.MozillaCookieJar(tmpcookiefile.name)
2959 try:
2960 cookiejar.load()
2961 except cookielib.LoadError:
2962 cookiejar = cookielib.CookieJar()
2963 finally:
2964 tmpcookiefile.close()
2965 else:
2966 cookiejar = cookielib.CookieJar()
Dan Willemsen0745bb22015-08-17 13:41:45 -07002967
Gavin Makea2e3302023-03-11 06:46:20 +00002968 proxyhandler = urllib.request.ProxyHandler
2969 if proxy:
2970 proxyhandler = urllib.request.ProxyHandler(
2971 {"http": proxy, "https": proxy}
2972 )
Dan Willemsen0745bb22015-08-17 13:41:45 -07002973
Gavin Makea2e3302023-03-11 06:46:20 +00002974 opener = urllib.request.build_opener(
2975 urllib.request.HTTPCookieProcessor(cookiejar), proxyhandler
2976 )
Dan Willemsen0745bb22015-08-17 13:41:45 -07002977
Gavin Makea2e3302023-03-11 06:46:20 +00002978 url = urllib.parse.urljoin(self.orig_host, handler)
2979 parse_results = urllib.parse.urlparse(url)
Dan Willemsen0745bb22015-08-17 13:41:45 -07002980
Gavin Makea2e3302023-03-11 06:46:20 +00002981 scheme = parse_results.scheme
2982 if scheme == "persistent-http":
2983 scheme = "http"
2984 if scheme == "persistent-https":
2985 # If we're proxying through persistent-https, use http. The
2986 # proxy itself will do the https.
2987 if proxy:
2988 scheme = "http"
2989 else:
2990 scheme = "https"
Dan Willemsen0745bb22015-08-17 13:41:45 -07002991
Gavin Makea2e3302023-03-11 06:46:20 +00002992 # Parse out any authentication information using the base class.
2993 host, extra_headers, _ = self.get_host_info(parse_results.netloc)
Dan Willemsen0745bb22015-08-17 13:41:45 -07002994
Gavin Makea2e3302023-03-11 06:46:20 +00002995 url = urllib.parse.urlunparse(
2996 (
2997 scheme,
2998 host,
2999 parse_results.path,
3000 parse_results.params,
3001 parse_results.query,
3002 parse_results.fragment,
3003 )
3004 )
Dan Willemsen0745bb22015-08-17 13:41:45 -07003005
Gavin Makea2e3302023-03-11 06:46:20 +00003006 request = urllib.request.Request(url, request_body)
3007 if extra_headers is not None:
3008 for name, header in extra_headers:
3009 request.add_header(name, header)
3010 request.add_header("Content-Type", "text/xml")
3011 try:
3012 response = opener.open(request)
3013 except urllib.error.HTTPError as e:
3014 if e.code == 501:
3015 # We may have been redirected through a login process
3016 # but our POST turned into a GET. Retry.
3017 response = opener.open(request)
3018 else:
3019 raise
Dan Willemsen0745bb22015-08-17 13:41:45 -07003020
Gavin Makea2e3302023-03-11 06:46:20 +00003021 p, u = xmlrpc.client.getparser()
3022 # Response should be fairly small, so read it all at once.
3023 # This way we can show it to the user in case of error (e.g. HTML).
3024 data = response.read()
3025 try:
3026 p.feed(data)
3027 except xml.parsers.expat.ExpatError as e:
Jason R. Coombsae824fb2023-10-20 23:32:40 +05453028 raise OSError(
Gavin Makea2e3302023-03-11 06:46:20 +00003029 f"Parsing the manifest failed: {e}\n"
3030 f"Please report this to your manifest server admin.\n"
3031 f'Here is the full response:\n{data.decode("utf-8")}'
3032 )
3033 p.close()
3034 return u.close()
Dan Willemsen0745bb22015-08-17 13:41:45 -07003035
Gavin Makea2e3302023-03-11 06:46:20 +00003036 def close(self):
3037 pass