Control: tags 962197 + pending Control: tags 962205 + patch Control: tags 962205 + pending
Dear maintainer, I've prepared an NMU for buildstream (versioned as 1.4.3-0.1) and uploaded it to DELAYED/15. Please feel free to tell me if I should cancel it. cu Adrian
diff -Nru buildstream-1.4.1/buildstream/_artifactcache/cascache.py buildstream-1.4.3/buildstream/_artifactcache/cascache.py --- buildstream-1.4.1/buildstream/_artifactcache/cascache.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/buildstream/_artifactcache/cascache.py 2020-05-12 11:35:41.000000000 +0300 @@ -49,6 +49,46 @@ _MAX_PAYLOAD_BYTES = 1024 * 1024 +class _Attempt(): + + def __init__(self, last_attempt=False): + self.__passed = None + self.__last_attempt = last_attempt + + def passed(self): + return self.__passed + + def __enter__(self): + pass + + def __exit__(self, exc_type, exc_value, traceback): + try: + if exc_type is None: + self.__passed = True + else: + self.__passed = False + if exc_value is not None: + raise exc_value + except grpc.RpcError as e: + if e.code() == grpc.StatusCode.UNAVAILABLE: + return not self.__last_attempt + elif e.code() == grpc.StatusCode.ABORTED: + raise CASRemoteError("grpc aborted: {}".format(str(e)), + detail=e.details(), + temporary=True) from e + else: + return False + return False + + +def _retry(tries=5): + for a in range(tries): + attempt = _Attempt(last_attempt=(a == tries - 1)) + yield attempt + if attempt.passed(): + break + + class BlobNotFound(ArtifactError): def __init__(self, blob, msg): @@ -248,7 +288,9 @@ request = buildstream_pb2.GetReferenceRequest() request.key = ref - response = remote.ref_storage.GetReference(request) + for attempt in _retry(): + with attempt: + response = remote.ref_storage.GetReference(request) tree = remote_execution_pb2.Digest() tree.hash = response.digest.hash @@ -296,7 +338,9 @@ try: request = buildstream_pb2.GetReferenceRequest() request.key = ref - response = remote.ref_storage.GetReference(request) + for attempt in _retry(): + with attempt: + response = remote.ref_storage.GetReference(request) if response.digest.hash == tree.hash and response.digest.size_bytes == tree.size_bytes: # ref is already on the server with the same tree @@ -313,7 +357,9 @@ request.keys.append(ref) request.digest.hash = tree.hash request.digest.size_bytes = tree.size_bytes - remote.ref_storage.UpdateReference(request) + for attempt in _retry(): + with attempt: + remote.ref_storage.UpdateReference(request) skipped_remote = False except grpc.RpcError as e: @@ -786,7 +832,9 @@ remote.init() request = buildstream_pb2.StatusRequest() - response = remote.ref_storage.Status(request) + for attempt in _retry(): + with attempt: + response = remote.ref_storage.Status(request) if remote_spec.push and not response.allow_updates: q.put('Artifact server does not allow push') @@ -986,7 +1034,9 @@ offset += chunk_size finished = request.finish_write - response = remote.bytestream.Write(request_stream(resource_name, stream)) + for attempt in _retry(): + with attempt: + response = remote.bytestream.Write(request_stream(resource_name, stream)) assert response.committed_size == digest.size_bytes @@ -1003,7 +1053,9 @@ d.hash = required_digest.hash d.size_bytes = required_digest.size_bytes - response = remote.cas.FindMissingBlobs(request) + for attempt in _retry(): + with attempt: + response = remote.cas.FindMissingBlobs(request) for missing_digest in response.missing_blob_digests: d = remote_execution_pb2.Digest() d.hash = missing_digest.hash @@ -1089,7 +1141,9 @@ self.max_batch_total_size_bytes = _MAX_PAYLOAD_BYTES try: request = remote_execution_pb2.GetCapabilitiesRequest() - response = self.capabilities.GetCapabilities(request) + for attempt in _retry(): + with attempt: + response = self.capabilities.GetCapabilities(request) server_max_batch_total_size_bytes = response.cache_capabilities.max_batch_total_size_bytes if 0 < server_max_batch_total_size_bytes < self.max_batch_total_size_bytes: self.max_batch_total_size_bytes = server_max_batch_total_size_bytes @@ -1102,7 +1156,9 @@ self.batch_read_supported = False try: request = remote_execution_pb2.BatchReadBlobsRequest() - response = self.cas.BatchReadBlobs(request) + for attempt in _retry(): + with attempt: + response = self.cas.BatchReadBlobs(request) self.batch_read_supported = True except grpc.RpcError as e: if e.code() != grpc.StatusCode.UNIMPLEMENTED: @@ -1112,7 +1168,9 @@ self.batch_update_supported = False try: request = remote_execution_pb2.BatchUpdateBlobsRequest() - response = self.cas.BatchUpdateBlobs(request) + for attempt in _retry(): + with attempt: + response = self.cas.BatchUpdateBlobs(request) self.batch_update_supported = True except grpc.RpcError as e: if (e.code() != grpc.StatusCode.UNIMPLEMENTED and @@ -1153,7 +1211,9 @@ if len(self._request.digests) == 0: return - batch_response = self._remote.cas.BatchReadBlobs(self._request) + for attempt in _retry(): + with attempt: + batch_response = self._remote.cas.BatchReadBlobs(self._request) for response in batch_response.responses: if response.status.code == grpc.StatusCode.NOT_FOUND.value[0]: @@ -1201,7 +1261,9 @@ if len(self._request.requests) == 0: return - batch_response = self._remote.cas.BatchUpdateBlobs(self._request) + for attempt in _retry(): + with attempt: + batch_response = self._remote.cas.BatchUpdateBlobs(self._request) for response in batch_response.responses: if response.status.code != grpc.StatusCode.OK.value[0]: diff -Nru buildstream-1.4.1/buildstream/element.py buildstream-1.4.3/buildstream/element.py --- buildstream-1.4.1/buildstream/element.py 2019-09-03 12:15:20.000000000 +0300 +++ buildstream-1.4.3/buildstream/element.py 2020-05-12 11:35:41.000000000 +0300 @@ -2362,7 +2362,7 @@ if include_file and not exclude_file: yield filename.lstrip(os.sep) - def __file_is_whitelisted(self, pattern): + def __file_is_whitelisted(self, path): # Considered storing the whitelist regex for re-use, but public data # can be altered mid-build. # Public data is not guaranteed to stay the same for the duration of @@ -2374,7 +2374,7 @@ whitelist_expressions = [utils._glob2re(self.__variables.subst(exp.strip())) for exp in whitelist] expression = ('^(?:' + '|'.join(whitelist_expressions) + ')$') self.__whitelist_regex = re.compile(expression) - return self.__whitelist_regex.match(pattern) + return self.__whitelist_regex.match(path) or self.__whitelist_regex.match(os.path.join(os.sep, path)) # __extract(): # diff -Nru buildstream-1.4.1/buildstream/_frontend/app.py buildstream-1.4.3/buildstream/_frontend/app.py --- buildstream-1.4.1/buildstream/_frontend/app.py 2019-09-03 12:15:20.000000000 +0300 +++ buildstream-1.4.3/buildstream/_frontend/app.py 2020-05-12 11:35:41.000000000 +0300 @@ -504,7 +504,10 @@ choice = click.prompt("Choice:", value_proc=_prefix_choice_value_proc(['continue', 'quit', 'terminate']), default='continue', err=True) - except click.Abort: + except (click.Abort, SystemError): + # In some cases, the readline buffer underlying the prompt gets corrupted on the second CTRL+C + # This throws a SystemError, which doesn't seem to be problematic for the rest of the program + # Ensure a newline after automatically printed '^C' click.echo("", err=True) choice = 'terminate' @@ -599,7 +602,10 @@ try: choice = click.prompt("Choice:", default='continue', err=True, value_proc=_prefix_choice_value_proc(choices)) - except click.Abort: + except (click.Abort, SystemError): + # In some cases, the readline buffer underlying the prompt gets corrupted on the second CTRL+C + # This throws a SystemError, which doesn't seem to be problematic for the rest of the program + # Ensure a newline after automatically printed '^C' click.echo("", err=True) choice = 'terminate' diff -Nru buildstream-1.4.1/buildstream/_frontend/complete.py buildstream-1.4.3/buildstream/_frontend/complete.py --- buildstream-1.4.1/buildstream/_frontend/complete.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/buildstream/_frontend/complete.py 2020-05-12 11:35:41.000000000 +0300 @@ -31,7 +31,7 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # -import collections +import collections.abc import copy import os @@ -218,7 +218,7 @@ return True if cmd_param.nargs == -1: return True - if isinstance(current_param_values, collections.Iterable) \ + if isinstance(current_param_values, collections.abc.Iterable) \ and cmd_param.nargs > 1 and len(current_param_values) < cmd_param.nargs: return True return False diff -Nru buildstream-1.4.1/buildstream/_fuse/mount.py buildstream-1.4.3/buildstream/_fuse/mount.py --- buildstream-1.4.1/buildstream/_fuse/mount.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/buildstream/_fuse/mount.py 2020-05-12 11:35:41.000000000 +0300 @@ -184,7 +184,7 @@ # Run fuse in foreground in this child process, internally libfuse # will handle SIGTERM and gracefully exit it's own little main loop. # - FUSE(self.__operations, self.__mountpoint, nothreads=True, foreground=True, nonempty=True) + FUSE(self.__operations, self.__mountpoint, nothreads=True, foreground=True) # Explicit 0 exit code, if the operations crashed for some reason, the exit # code will not be 0, and we want to know about it. diff -Nru buildstream-1.4.1/buildstream/_ostree.py buildstream-1.4.3/buildstream/_ostree.py --- buildstream-1.4.1/buildstream/_ostree.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/buildstream/_ostree.py 2020-05-12 11:35:41.000000000 +0300 @@ -271,6 +271,20 @@ try: gfile = Gio.File.new_for_uri(key_url) stream = gfile.read() - repo.remote_gpg_import(remote, stream, None, 0, None) + + # In ostree commit `v2019.2-10-gaa5df899`, the python + # facing API was changed by way of modifying the + # instrospection annotations. + # + # This means we need to call this API in two different + # ways depending on which ostree version is installed. + # + try: + # New API + repo.remote_gpg_import(remote, stream, None, None) + except TypeError: + # Old API + repo.remote_gpg_import(remote, stream, None, 0, None) + except GLib.GError as e: raise OSTreeError("Failed to add gpg key from url '{}': {}".format(key_url, e.message)) from e diff -Nru buildstream-1.4.1/buildstream/plugins/elements/cmake.yaml buildstream-1.4.3/buildstream/plugins/elements/cmake.yaml --- buildstream-1.4.1/buildstream/plugins/elements/cmake.yaml 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/buildstream/plugins/elements/cmake.yaml 2020-05-12 11:35:41.000000000 +0300 @@ -19,7 +19,7 @@ cmake-args: | -DCMAKE_INSTALL_PREFIX:PATH="%{prefix}" \ - -DCMAKE_INSTALL_LIBDIR=%{lib} %{cmake-extra} %{cmake-global} %{cmake-local} + -DCMAKE_INSTALL_LIBDIR:PATH="%{lib}" %{cmake-extra} %{cmake-global} %{cmake-local} cmake: | diff -Nru buildstream-1.4.1/buildstream/plugins/sources/_downloadablefilesource.py buildstream-1.4.3/buildstream/plugins/sources/_downloadablefilesource.py --- buildstream-1.4.1/buildstream/plugins/sources/_downloadablefilesource.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/buildstream/plugins/sources/_downloadablefilesource.py 2020-05-12 11:35:41.000000000 +0300 @@ -107,6 +107,7 @@ default_name = os.path.basename(self.url) request = urllib.request.Request(self.url) request.add_header('Accept', '*/*') + request.add_header('User-Agent', 'BuildStream/1') # We do not use etag in case what we have in cache is # not matching ref in order to be able to recover from @@ -121,7 +122,11 @@ with contextlib.closing(urllib.request.urlopen(request)) as response: info = response.info() - etag = info['ETag'] if 'ETag' in info else None + # some servers don't honor the 'If-None-Match' header + if self.ref and etag and info["ETag"] == etag: + return self.ref + + etag = info["ETag"] filename = info.get_filename(default_name) filename = os.path.basename(filename) diff -Nru buildstream-1.4.1/buildstream/plugins/sources/pip.py buildstream-1.4.3/buildstream/plugins/sources/pip.py --- buildstream-1.4.1/buildstream/plugins/sources/pip.py 2019-09-03 12:15:20.000000000 +0300 +++ buildstream-1.4.3/buildstream/plugins/sources/pip.py 2020-05-12 11:35:41.000000000 +0300 @@ -90,6 +90,7 @@ 'python3.5', 'python3.6', 'python3.7', + 'python3.8', ] # List of allowed extensions taken from diff -Nru buildstream-1.4.1/buildstream/_project.py buildstream-1.4.3/buildstream/_project.py --- buildstream-1.4.1/buildstream/_project.py 2019-09-03 12:15:20.000000000 +0300 +++ buildstream-1.4.3/buildstream/_project.py 2020-05-13 13:19:40.000000000 +0300 @@ -562,15 +562,6 @@ def _load_pass(self, config, output, *, ignore_unknown=False): - # Element and Source type configurations will be composited later onto - # element/source types, so we delete it from here and run our final - # assertion after. - output.element_overrides = _yaml.node_get(config, Mapping, 'elements', default_value={}) - output.source_overrides = _yaml.node_get(config, Mapping, 'sources', default_value={}) - config.pop('elements', None) - config.pop('sources', None) - _yaml.node_final_assertions(config) - self._load_plugin_factories(config, output) # Load project options @@ -594,11 +585,16 @@ # Now resolve any conditionals in the remaining configuration, # any conditionals specified for project option declarations, # or conditionally specifying the project name; will be ignored. - # - # Don't forget to also resolve options in the element and source overrides. output.options.process_node(config) - output.options.process_node(output.element_overrides) - output.options.process_node(output.source_overrides) + + # Element and Source type configurations will be composited later onto + # element/source types, so we delete it from here and run our final + # assertion after. + output.element_overrides = _yaml.node_get(config, Mapping, 'elements', default_value={}) + output.source_overrides = _yaml.node_get(config, Mapping, 'sources', default_value={}) + config.pop('elements', None) + config.pop('sources', None) + _yaml.node_final_assertions(config) # Load base variables output.base_variables = _yaml.node_get(config, Mapping, 'variables') diff -Nru buildstream-1.4.1/buildstream/_scheduler/jobs/job.py buildstream-1.4.3/buildstream/_scheduler/jobs/job.py --- buildstream-1.4.1/buildstream/_scheduler/jobs/job.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/buildstream/_scheduler/jobs/job.py 2020-05-12 11:35:41.000000000 +0300 @@ -1,5 +1,6 @@ # # Copyright (C) 2018 Codethink Limited +# Copyright (C) 2019 Bloomberg Finance LP # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public @@ -32,6 +33,7 @@ from ..._exceptions import ImplError, BstError, set_last_task_error, SkipJob from ..._message import Message, MessageType, unconditional_messages from ... import _signals, utils +from .. import _multiprocessing # Return code values shutdown of job handling child processes # @@ -64,15 +66,6 @@ self.message = message -# Process class that doesn't call waitpid on its own. -# This prevents conflicts with the asyncio child watcher. -class Process(multiprocessing.Process): - # pylint: disable=attribute-defined-outside-init - def start(self): - self._popen = self._Popen(self) - self._sentinel = self._popen.sentinel - - # Job() # # The Job object represents a parallel task, when calling Job.spawn(), @@ -127,39 +120,23 @@ self._parent_start_listening() # Spawn the process - self._process = Process(target=self._child_action, args=[self._queue]) + self._process = _multiprocessing.AsyncioSafeProcess(target=self._child_action, args=[self._queue]) # Block signals which are handled in the main process such that # the child process does not inherit the parent's state, but the main # process will be notified of any signal after we launch the child. # with _signals.blocked([signal.SIGINT, signal.SIGTSTP, signal.SIGTERM], ignore=False): - self._process.start() + with asyncio.get_child_watcher() as watcher: + self._process.start() + # Register the process to call `_parent_child_completed` once it is done + + # Here we delay the call to the next loop tick. This is in order to be running + # in the main thread, as the callback itself must be thread safe. + def on_completion(pid, returncode): + asyncio.get_event_loop().call_soon(self._parent_child_completed, pid, returncode) - # Wait for the child task to complete. - # - # This is a tricky part of python which doesnt seem to - # make it to the online docs: - # - # o asyncio.get_child_watcher() will return a SafeChildWatcher() instance - # which is the default type of watcher, and the instance belongs to the - # "event loop policy" in use (so there is only one in the main process). - # - # o SafeChildWatcher() will register a SIGCHLD handler with the asyncio - # loop, and will selectively reap any child pids which have been - # terminated. - # - # o At registration time, the process will immediately be checked with - # `os.waitpid()` and will be reaped immediately, before add_child_handler() - # returns. - # - # The self._parent_child_completed callback passed here will normally - # be called after the child task has been reaped with `os.waitpid()`, in - # an event loop callback. Otherwise, if the job completes too fast, then - # the callback is called immediately. - # - self._watcher = asyncio.get_child_watcher() - self._watcher.add_child_handler(self._process.pid, self._parent_child_completed) + watcher.add_child_handler(self._process.pid, on_completion) # terminate() # @@ -182,21 +159,15 @@ self._terminated = True - # terminate_wait() + # get_terminated() # - # Wait for terminated jobs to complete - # - # Args: - # timeout (float): Seconds to wait + # Check if a job has been terminated. # # Returns: - # (bool): True if the process terminated cleanly, otherwise False + # (bool): True in the main process if Job.terminate() was called. # - def terminate_wait(self, timeout): - - # Join the child process after sending SIGTERM - self._process.join(timeout) - return self._process.exitcode is not None + def get_terminated(self): + return self._terminated # kill() # diff -Nru buildstream-1.4.1/buildstream/_scheduler/_multiprocessing.py buildstream-1.4.3/buildstream/_scheduler/_multiprocessing.py --- buildstream-1.4.1/buildstream/_scheduler/_multiprocessing.py 1970-01-01 02:00:00.000000000 +0200 +++ buildstream-1.4.3/buildstream/_scheduler/_multiprocessing.py 2020-05-12 11:35:41.000000000 +0300 @@ -0,0 +1,79 @@ +# +# Copyright (C) 2019 Bloomberg Finance LP +# +# This program is free software; you can redistribute it and/or +# modify it under the terms of the GNU Lesser General Public +# License as published by the Free Software Foundation; either +# version 2 of the License, or (at your option) any later version. +# +# This library is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU +# Lesser General Public License for more details. +# +# You should have received a copy of the GNU Lesser General Public +# License along with this library. If not, see <http://www.gnu.org/licenses/>. +# + +# TLDR: +# ALWAYS use `.AsyncioSafeProcess` when you have an asyncio event loop running and need a `multiprocessing.Process` +# +# +# The upstream asyncio library doesn't play well with forking subprocesses while an event loop is running. +# +# The main problem that affects us is that the parent and the child will share some file handlers. +# The most important one for us is the sig_handler_fd, which the loop uses to buffer signals received +# by the app so that the asyncio loop can treat them afterwards. +# +# This sharing means that when we send a signal to the child, the sighandler in the child will write +# it back to the parent sig_handler_fd, making the parent have to treat it too. +# This is a problem for example when we sigterm the process. The scheduler will send sigterms to all its children, +# which in turn will make the scheduler receive N SIGTERMs (one per child). Which in turn will send sigterms to +# the children... +# +# We therefore provide a `AsyncioSafeProcess` derived from multiprocessing.Process that automatically +# tries to cleanup the loop and never calls `waitpid` on the child process, which breaks our child watchers. +# +# +# Relevant issues: +# - Asyncio: support fork (https://bugs.python.org/issue21998) +# - Asyncio: support multiprocessing (support fork) (https://bugs.python.org/issue22087) +# - Signal delivered to a subprocess triggers parent's handler (https://bugs.python.org/issue31489) +# +# + +import multiprocessing +import signal +import sys +from asyncio import set_event_loop_policy + + +# _AsyncioSafeForkAwareProcess() +# +# Process class that doesn't call waitpid on its own. +# This prevents conflicts with the asyncio child watcher. +# +# Also automatically close any running asyncio loop before calling +# the actual run target +# +class _AsyncioSafeForkAwareProcess(multiprocessing.Process): + # pylint: disable=attribute-defined-outside-init + def start(self): + self._popen = self._Popen(self) + self._sentinel = self._popen.sentinel + + def run(self): + signal.set_wakeup_fd(-1) + set_event_loop_policy(None) + + super().run() + + +if sys.platform != "win32": + # Set the default event loop policy to automatically close our asyncio loop in child processes + AsyncioSafeProcess = _AsyncioSafeForkAwareProcess + +else: + # Windows doesn't support ChildWatcher that way anyways, we'll need another + # implementation if we want it + AsyncioSafeProcess = multiprocessing.Process diff -Nru buildstream-1.4.1/buildstream/_scheduler/scheduler.py buildstream-1.4.3/buildstream/_scheduler/scheduler.py --- buildstream-1.4.1/buildstream/_scheduler/scheduler.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/buildstream/_scheduler/scheduler.py 2020-05-12 11:35:41.000000000 +0300 @@ -137,6 +137,12 @@ # Hold on to the queues to process self.queues = queues + # NOTE: Enforce use of `SafeChildWatcher` as we generally don't want + # background threads. + # In Python 3.8+, `ThreadedChildWatcher` is the default watcher, and + # not `SafeChildWatcher`. + asyncio.set_child_watcher(asyncio.SafeChildWatcher()) + # Ensure that we have a fresh new event loop, in case we want # to run another test in this thread. self.loop = asyncio.new_event_loop() @@ -516,21 +522,15 @@ self.loop.remove_signal_handler(signal.SIGTERM) def _terminate_jobs_real(self): - # 20 seconds is a long time, it can take a while and sometimes - # we still fail, need to look deeper into this again. - wait_start = datetime.datetime.now() - wait_limit = 20.0 + def kill_jobs(): + for job_ in self._active_jobs: + job_.kill() - # First tell all jobs to terminate - for job in self._active_jobs: - job.terminate() + # Schedule all jobs to be killed if they have not exited in 20 sec + self.loop.call_later(20, kill_jobs) - # Now wait for them to really terminate for job in self._active_jobs: - elapsed = datetime.datetime.now() - wait_start - timeout = max(wait_limit - elapsed.total_seconds(), 0.0) - if not job.terminate_wait(timeout): - job.kill() + job.terminate() # Regular timeout for driving status in the UI def _tick(self): diff -Nru buildstream-1.4.1/buildstream/_version.py buildstream-1.4.3/buildstream/_version.py --- buildstream-1.4.1/buildstream/_version.py 2019-09-03 14:07:12.000000000 +0300 +++ buildstream-1.4.3/buildstream/_version.py 2020-05-13 13:38:33.000000000 +0300 @@ -8,11 +8,11 @@ version_json = ''' { - "date": "2019-09-03T11:05:36+0000", + "date": "2020-05-13T19:31:51+0900", "dirty": false, "error": null, - "full-revisionid": "2ece1287f75c3a1eaf4bd1a9f4f9c7b895fe3993", - "version": "1.4.1" + "full-revisionid": "89765b759230c8d47e27fd52527cf3df61be9ae1", + "version": "1.4.3" } ''' # END VERSION_JSON diff -Nru buildstream-1.4.1/buildstream/_yaml.py buildstream-1.4.3/buildstream/_yaml.py --- buildstream-1.4.1/buildstream/_yaml.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/buildstream/_yaml.py 2020-05-12 11:35:41.000000000 +0300 @@ -280,7 +280,7 @@ provenance.members[key] = member target_value = target.get(key) - if isinstance(value, collections.Mapping): + if isinstance(value, collections.abc.Mapping): node_decorate_dict(filename, target_value, value, toplevel) elif isinstance(value, list): member.elements = node_decorate_list(filename, target_value, value, toplevel) @@ -295,7 +295,7 @@ target_item = target[idx] element = ElementProvenance(filename, source, idx, toplevel) - if isinstance(item, collections.Mapping): + if isinstance(item, collections.abc.Mapping): node_decorate_dict(filename, target_item, item, toplevel) elif isinstance(item, list): element.elements = node_decorate_list(filename, target_item, item, toplevel) @@ -569,7 +569,7 @@ # def is_composite_list(node): - if isinstance(node, collections.Mapping): + if isinstance(node, collections.abc.Mapping): has_directives = False has_keys = False @@ -838,7 +838,7 @@ target_value = target.get(key) - if isinstance(source_value, collections.Mapping): + if isinstance(source_value, collections.abc.Mapping): # Handle creating new dicts on target side if target_value is None: @@ -853,7 +853,7 @@ # Add a new provenance member element to the containing dict target_provenance.members[key] = source_provenance.members[key] - if not isinstance(target_value, collections.Mapping): + if not isinstance(target_value, collections.abc.Mapping): raise CompositeTypeError(thispath, type(target_value), type(source_value)) # Recurse into matching dictionary @@ -914,7 +914,7 @@ # def node_sanitize(node): - if isinstance(node, collections.Mapping): + if isinstance(node, collections.abc.Mapping): result = SanitizedDict() @@ -1052,7 +1052,7 @@ def node_chain_copy(source): copy = ChainMap({}, source) for key, value in source.items(): - if isinstance(value, collections.Mapping): + if isinstance(value, collections.abc.Mapping): copy[key] = node_chain_copy(value) elif isinstance(value, list): copy[key] = list_chain_copy(value) @@ -1065,7 +1065,7 @@ def list_chain_copy(source): copy = [] for item in source: - if isinstance(item, collections.Mapping): + if isinstance(item, collections.abc.Mapping): copy.append(node_chain_copy(item)) elif isinstance(item, list): copy.append(list_chain_copy(item)) @@ -1080,7 +1080,7 @@ def node_copy(source): copy = {} for key, value in source.items(): - if isinstance(value, collections.Mapping): + if isinstance(value, collections.abc.Mapping): copy[key] = node_copy(value) elif isinstance(value, list): copy[key] = list_copy(value) @@ -1097,7 +1097,7 @@ def list_copy(source): copy = [] for item in source: - if isinstance(item, collections.Mapping): + if isinstance(item, collections.abc.Mapping): copy.append(node_copy(item)) elif isinstance(item, list): copy.append(list_copy(item)) @@ -1132,7 +1132,7 @@ raise LoadError(LoadErrorReason.TRAILING_LIST_DIRECTIVE, "{}: Attempt to override non-existing list".format(provenance)) - if isinstance(value, collections.Mapping): + if isinstance(value, collections.abc.Mapping): node_final_assertions(value) elif isinstance(value, list): list_final_assertions(value) @@ -1140,7 +1140,7 @@ def list_final_assertions(values): for value in values: - if isinstance(value, collections.Mapping): + if isinstance(value, collections.abc.Mapping): node_final_assertions(value) elif isinstance(value, list): list_final_assertions(value) diff -Nru buildstream-1.4.1/BuildStream.egg-info/PKG-INFO buildstream-1.4.3/BuildStream.egg-info/PKG-INFO --- buildstream-1.4.1/BuildStream.egg-info/PKG-INFO 2019-09-03 14:07:12.000000000 +0300 +++ buildstream-1.4.3/BuildStream.egg-info/PKG-INFO 2020-05-13 13:38:33.000000000 +0300 @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: BuildStream -Version: 1.4.1 +Version: 1.4.3 Summary: A framework for modelling build pipelines in YAML Home-page: https://gitlab.com/BuildStream/buildstream Author: BuildStream Developers diff -Nru buildstream-1.4.1/BuildStream.egg-info/SOURCES.txt buildstream-1.4.3/BuildStream.egg-info/SOURCES.txt --- buildstream-1.4.1/BuildStream.egg-info/SOURCES.txt 2019-09-03 14:07:12.000000000 +0300 +++ buildstream-1.4.3/BuildStream.egg-info/SOURCES.txt 2020-05-13 13:38:33.000000000 +0300 @@ -118,6 +118,7 @@ buildstream/_protos/google/rpc/status_pb2.py buildstream/_protos/google/rpc/status_pb2_grpc.py buildstream/_scheduler/__init__.py +buildstream/_scheduler/_multiprocessing.py buildstream/_scheduler/resources.py buildstream/_scheduler/scheduler.py buildstream/_scheduler/jobs/__init__.py @@ -323,6 +324,7 @@ tests/format/include.py tests/format/include_composition.py tests/format/listdirectiveerrors.py +tests/format/option-list-directive.py tests/format/optionarch.py tests/format/optionbool.py tests/format/optioneltmask.py @@ -403,6 +405,7 @@ tests/format/option-flags/project.conf tests/format/option-flags-missing/element.bst tests/format/option-flags-missing/project.conf +tests/format/option-list-directive/project.conf tests/format/option-overrides/element.bst tests/format/option-overrides/project.conf tests/format/options/compound-and-condition/element.bst @@ -528,13 +531,17 @@ tests/frontend/consistencyerror/plugins/__init__.py tests/frontend/consistencyerror/plugins/consistencybug.py tests/frontend/consistencyerror/plugins/consistencyerror.py +tests/frontend/overlaps/a-whitelisted-abs.bst tests/frontend/overlaps/a-whitelisted.bst tests/frontend/overlaps/a.bst +tests/frontend/overlaps/b-whitelisted-abs.bst tests/frontend/overlaps/b-whitelisted.bst tests/frontend/overlaps/b.bst +tests/frontend/overlaps/c-whitelisted-abs.bst tests/frontend/overlaps/c-whitelisted.bst tests/frontend/overlaps/c.bst tests/frontend/overlaps/collect-partially-whitelisted.bst +tests/frontend/overlaps/collect-whitelisted-abs.bst tests/frontend/overlaps/collect-whitelisted.bst tests/frontend/overlaps/collect.bst tests/frontend/overlaps/script.bst diff -Nru buildstream-1.4.1/debian/changelog buildstream-1.4.3/debian/changelog --- buildstream-1.4.1/debian/changelog 2019-09-29 01:31:09.000000000 +0300 +++ buildstream-1.4.3/debian/changelog 2020-07-18 10:03:58.000000000 +0300 @@ -1,3 +1,12 @@ +buildstream (1.4.3-0.1) unstable; urgency=low + + * Non-maintainer upload. + * New upstream release. + - Works with Python 3.8. (Closes: #962197) + * Add the missing dependency on bubblewrap. (Closes: #962205) + + -- Adrian Bunk <b...@debian.org> Sat, 18 Jul 2020 10:03:58 +0300 + buildstream (1.4.1-1) unstable; urgency=medium * New upstream release (Closes: #933395) diff -Nru buildstream-1.4.1/debian/control buildstream-1.4.3/debian/control --- buildstream-1.4.1/debian/control 2019-09-29 01:31:09.000000000 +0300 +++ buildstream-1.4.3/debian/control 2020-07-18 10:03:58.000000000 +0300 @@ -63,6 +63,7 @@ Section: python Architecture: all Depends: ${misc:Depends}, + bubblewrap, ${python3:Depends}, python3-fuse Description: Python library for BuildStream toolset diff -Nru buildstream-1.4.1/debian/control.in buildstream-1.4.3/debian/control.in --- buildstream-1.4.1/debian/control.in 2019-09-29 01:31:09.000000000 +0300 +++ buildstream-1.4.3/debian/control.in 2020-07-18 10:03:58.000000000 +0300 @@ -59,6 +59,7 @@ Section: python Architecture: all Depends: ${misc:Depends}, + bubblewrap, ${python3:Depends}, python3-fuse Description: Python library for BuildStream toolset diff -Nru buildstream-1.4.1/NEWS buildstream-1.4.3/NEWS --- buildstream-1.4.1/NEWS 2019-09-03 14:05:52.000000000 +0300 +++ buildstream-1.4.3/NEWS 2020-05-13 13:31:19.000000000 +0300 @@ -1,4 +1,38 @@ ================= +buildstream 1.4.3 +================= + + o Fix support for conditional list append/prepend in project.conf, + Merge request !1857 + + o Fix internal imports to import from "collections" instead + of "collections.abc", this improves support for Python 3.8, + see issue #831 + + o Fix some downloads from gitlab.com by setting custom user agent, + fixes issue #1285 + + o Work around python API break from ostree's repo.remote_gpg_import(), + this was changed in ostree commit v2019.2-10-gaa5df899, and we now + have a fallback to support both versions of the API, see merge request !1917. + +================= +buildstream 1.4.2 +================= + + o Support for python 3.8 + + o Fix a stacktrace with a hang we can experience when we CTRL-C a job twice. + + o Workaround some servers which do not honor the 'If-None-Match' HTTP header + and avoid downloading files redundantly in these cases. + + o Allow specifying absolute paths in overlap-whitelist (issue #721) + + o Support systems with fuse3 (avoid passing unsupported argument + to fusermount3) + +================= buildstream 1.4.1 ================= diff -Nru buildstream-1.4.1/PKG-INFO buildstream-1.4.3/PKG-INFO --- buildstream-1.4.1/PKG-INFO 2019-09-03 14:07:12.000000000 +0300 +++ buildstream-1.4.3/PKG-INFO 2020-05-13 13:38:33.000000000 +0300 @@ -1,6 +1,6 @@ Metadata-Version: 2.1 Name: BuildStream -Version: 1.4.1 +Version: 1.4.3 Summary: A framework for modelling build pipelines in YAML Home-page: https://gitlab.com/BuildStream/buildstream Author: BuildStream Developers diff -Nru buildstream-1.4.1/requirements/dev-requirements.txt buildstream-1.4.3/requirements/dev-requirements.txt --- buildstream-1.4.1/requirements/dev-requirements.txt 2019-09-03 13:23:08.000000000 +0300 +++ buildstream-1.4.3/requirements/dev-requirements.txt 2020-05-12 11:35:41.000000000 +0300 @@ -19,7 +19,7 @@ mccabe==0.6.1 more-itertools==7.2.0 packaging==19.1 -pluggy==0.12.0 +pluggy==0.13.1 py==1.8.0 pyparsing==2.4.2 pytest-cache==1.0 diff -Nru buildstream-1.4.1/tests/format/option-list-directive/project.conf buildstream-1.4.3/tests/format/option-list-directive/project.conf --- buildstream-1.4.1/tests/format/option-list-directive/project.conf 1970-01-01 02:00:00.000000000 +0200 +++ buildstream-1.4.3/tests/format/option-list-directive/project.conf 2020-05-12 11:35:41.000000000 +0300 @@ -0,0 +1,18 @@ +name: test + +options: + shell_mount_devices: + type: bool + description: whether to mount devices in the shell + default: false + +shell: + host-files: + - '/etc/passwd' + - '/etc/group' + + (?): + - shell_mount_devices: + host-files: + (>): + - '/dev/dri' diff -Nru buildstream-1.4.1/tests/format/option-list-directive.py buildstream-1.4.3/tests/format/option-list-directive.py --- buildstream-1.4.1/tests/format/option-list-directive.py 1970-01-01 02:00:00.000000000 +0200 +++ buildstream-1.4.3/tests/format/option-list-directive.py 2020-05-12 11:35:41.000000000 +0300 @@ -0,0 +1,16 @@ +import os +import pytest +from tests.testutils.runcli import cli + +# Project directory +DATA_DIR = os.path.dirname(os.path.realpath(__file__)) + + +@pytest.mark.datafiles(DATA_DIR) +@pytest.mark.parametrize("mount_devices", [("true"), ("false")]) +def test_override(cli, datafiles, mount_devices): + project = os.path.join(datafiles.dirname, datafiles.basename, "option-list-directive") + + bst_args = ["--option", "shell_mount_devices", mount_devices, "build"] + result = cli.run(project=project, silent=True, args=bst_args) + result.assert_success() diff -Nru buildstream-1.4.1/tests/format/variables.py buildstream-1.4.3/tests/format/variables.py --- buildstream-1.4.1/tests/format/variables.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/tests/format/variables.py 2020-05-12 11:35:41.000000000 +0300 @@ -20,7 +20,7 @@ ('autotools.bst', 'make-install', "make -j1 DESTDIR=\"/buildstream-install\" install"), ('cmake.bst', 'cmake', "cmake -B_builddir -H. -G\"Unix Makefiles\" -DCMAKE_INSTALL_PREFIX:PATH=\"/usr\" \\\n" + - "-DCMAKE_INSTALL_LIBDIR=lib "), + "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\" "), ('distutils.bst', 'python-install', "python3 setup.py install --prefix \"/usr\" \\\n" + "--root \"/buildstream-install\""), @@ -46,7 +46,7 @@ ('autotools.bst', 'make-install', "make -j1 DESTDIR=\"/custom/install/root\" install"), ('cmake.bst', 'cmake', "cmake -B_builddir -H. -G\"Ninja\" -DCMAKE_INSTALL_PREFIX:PATH=\"/opt\" \\\n" + - "-DCMAKE_INSTALL_LIBDIR=lib "), + "-DCMAKE_INSTALL_LIBDIR:PATH=\"lib\" "), ('distutils.bst', 'python-install', "python3 setup.py install --prefix \"/opt\" \\\n" + "--root \"/custom/install/root\""), diff -Nru buildstream-1.4.1/tests/frontend/overlaps/a-whitelisted-abs.bst buildstream-1.4.3/tests/frontend/overlaps/a-whitelisted-abs.bst --- buildstream-1.4.1/tests/frontend/overlaps/a-whitelisted-abs.bst 1970-01-01 02:00:00.000000000 +0200 +++ buildstream-1.4.3/tests/frontend/overlaps/a-whitelisted-abs.bst 2020-05-12 11:35:41.000000000 +0300 @@ -0,0 +1,13 @@ +kind: import +config: + source: / + target: / +depends: +- b-whitelisted.bst +sources: +- kind: local + path: "a" +public: + bst: + overlap-whitelist: + - "/file*" diff -Nru buildstream-1.4.1/tests/frontend/overlaps/b-whitelisted-abs.bst buildstream-1.4.3/tests/frontend/overlaps/b-whitelisted-abs.bst --- buildstream-1.4.1/tests/frontend/overlaps/b-whitelisted-abs.bst 1970-01-01 02:00:00.000000000 +0200 +++ buildstream-1.4.3/tests/frontend/overlaps/b-whitelisted-abs.bst 2020-05-12 11:35:41.000000000 +0300 @@ -0,0 +1,16 @@ +kind: import +config: + source: / + target: / +depends: +- c.bst +sources: +- kind: local + path: "b" +variables: + FILE: /file +public: + bst: + overlap-whitelist: + - /file2 + - "%{FILE}3" diff -Nru buildstream-1.4.1/tests/frontend/overlaps/collect-whitelisted-abs.bst buildstream-1.4.3/tests/frontend/overlaps/collect-whitelisted-abs.bst --- buildstream-1.4.1/tests/frontend/overlaps/collect-whitelisted-abs.bst 1970-01-01 02:00:00.000000000 +0200 +++ buildstream-1.4.3/tests/frontend/overlaps/collect-whitelisted-abs.bst 2020-05-12 11:35:41.000000000 +0300 @@ -0,0 +1,9 @@ +kind: compose + +depends: +- filename: a-whitelisted-abs.bst + type: build +- filename: b-whitelisted-abs.bst + type: build +- filename: c.bst + type: build diff -Nru buildstream-1.4.1/tests/frontend/overlaps/c-whitelisted-abs.bst buildstream-1.4.3/tests/frontend/overlaps/c-whitelisted-abs.bst --- buildstream-1.4.1/tests/frontend/overlaps/c-whitelisted-abs.bst 1970-01-01 02:00:00.000000000 +0200 +++ buildstream-1.4.3/tests/frontend/overlaps/c-whitelisted-abs.bst 2020-05-12 11:35:41.000000000 +0300 @@ -0,0 +1,11 @@ +kind: import +config: + source: / + target: / +sources: +- kind: local + path: "c" +public: + bst: + overlap-whitelist: + - "/file*" diff -Nru buildstream-1.4.1/tests/frontend/overlaps.py buildstream-1.4.3/tests/frontend/overlaps.py --- buildstream-1.4.1/tests/frontend/overlaps.py 2019-09-03 12:15:20.000000000 +0300 +++ buildstream-1.4.3/tests/frontend/overlaps.py 2020-05-12 11:35:41.000000000 +0300 @@ -47,20 +47,22 @@ @pytest.mark.datafiles(DATA_DIR) -def test_overlaps_whitelist(cli, datafiles): +@pytest.mark.parametrize("element", ["collect-whitelisted.bst", "collect-whitelisted-abs.bst"]) +def test_overlaps_whitelist(cli, datafiles, element): project_dir = str(datafiles) gen_project(project_dir, True) result = cli.run(project=project_dir, silent=True, args=[ - 'build', 'collect-whitelisted.bst']) + 'build', element]) result.assert_success() @pytest.mark.datafiles(DATA_DIR) -def test_overlaps_whitelist_ignored(cli, datafiles): +@pytest.mark.parametrize("element", ["collect-whitelisted.bst", "collect-whitelisted-abs.bst"]) +def test_overlaps_whitelist_ignored(cli, datafiles, element): project_dir = str(datafiles) gen_project(project_dir, False) result = cli.run(project=project_dir, silent=True, args=[ - 'build', 'collect-whitelisted.bst']) + 'build', element]) result.assert_success() diff -Nru buildstream-1.4.1/tests/sources/ostree.py buildstream-1.4.3/tests/sources/ostree.py --- buildstream-1.4.1/tests/sources/ostree.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/tests/sources/ostree.py 2020-05-12 11:35:41.000000000 +0300 @@ -55,3 +55,40 @@ result = cli.run(project=project, args=['show', 'target.bst']) result.assert_main_error(ErrorDomain.SOURCE, "missing-track-and-ref") result.assert_task_error(None, None) + + +@pytest.mark.datafiles(os.path.join(DATA_DIR, 'template')) +def test_fetch_gpg_verify(cli, tmpdir, datafiles): + project = os.path.join(datafiles.dirname, datafiles.basename) + + gpg_homedir = os.path.join(DATA_DIR, "gpghome") + + # Create the repo from 'repofiles' subdir + repo = create_repo('ostree', str(tmpdir)) + ref = repo.create( + os.path.join(project, 'repofiles'), + gpg_sign="FFFF54C070353B52D046DEB087FA0F41A6EFD9E9", + gpg_homedir=gpg_homedir + ) + + # Write out our test target + ostreesource = repo.source_config(ref=ref, gpg_key='test.gpg') + element = { + 'kind': 'import', + 'sources': [ + ostreesource + ] + } + + _yaml.dump(element, os.path.join(project, 'target.bst')) + + # Assert that a fetch is needed + assert cli.get_element_state(project, 'target.bst') == 'fetch needed' + + # Now try to fetch it + result = cli.run(project=project, args=['fetch', 'target.bst']) + result.assert_success() + + # Assert that we are now buildable because the source is + # now cached. + assert cli.get_element_state(project, 'target.bst') == 'buildable' diff -Nru buildstream-1.4.1/tests/testutils/repo/ostree.py buildstream-1.4.3/tests/testutils/repo/ostree.py --- buildstream-1.4.1/tests/testutils/repo/ostree.py 2019-09-02 15:01:36.000000000 +0300 +++ buildstream-1.4.3/tests/testutils/repo/ostree.py 2020-05-12 11:35:41.000000000 +0300 @@ -13,21 +13,31 @@ super(OSTree, self).__init__(directory, subdir) - def create(self, directory): + def create(self, directory, *, gpg_sign=None, gpg_homedir=None): subprocess.call(['ostree', 'init', '--repo', self.repo, '--mode', 'archive-z2']) - subprocess.call(['ostree', 'commit', - '--repo', self.repo, - '--branch', 'master', - '--subject', 'Initial commit', - directory]) + + commit_args = ['ostree', 'commit', + '--repo', self.repo, + '--branch', 'master', + '--subject', 'Initial commit'] + + if gpg_sign and gpg_homedir: + commit_args += [ + '--gpg-sign={}'.format(gpg_sign), + '--gpg-homedir={}'.format(gpg_homedir) + ] + + commit_args += [directory] + + subprocess.call(commit_args) latest = self.latest_commit() return latest - def source_config(self, ref=None): + def source_config(self, ref=None, *, gpg_key=None): config = { 'kind': 'ostree', 'url': 'file://' + self.repo, @@ -35,6 +45,8 @@ } if ref is not None: config['ref'] = ref + if gpg_key is not None: + config['gpg-key'] = gpg_key return config