From: Stefan Herbrechtsmeier <stefan.herbrechtsme...@weidmueller.com>
Add get_src_uris and unpack_src_uris functions to share code between the early and normal tasks. The functions have an early argument to only handle the recipe SRC_URI and doesn’t parse the SRC_URI_FILES. Signed-off-by: Stefan Herbrechtsmeier <stefan.herbrechtsme...@weidmueller.com> --- meta/classes-global/base.bbclass | 47 +++++++++++++++++------- meta/classes/archiver.bbclass | 4 +- meta/classes/buildhistory.bbclass | 4 +- meta/classes/copyleft_compliance.bbclass | 2 +- meta/classes/create-spdx-2.2.bbclass | 2 +- meta/classes/externalsrc.bbclass | 2 +- 6 files changed, 41 insertions(+), 20 deletions(-) diff --git a/meta/classes-global/base.bbclass b/meta/classes-global/base.bbclass index 924f319999..2571dab3ba 100644 --- a/meta/classes-global/base.bbclass +++ b/meta/classes-global/base.bbclass @@ -139,9 +139,24 @@ do_fetch[file-checksums] = "${@bb.fetch.get_checksum_file_list(d)}" do_fetch[file-checksums] += " ${@get_lic_checksum_file_list(d)}" do_fetch[prefuncs] += "fetcher_hashes_dummyfunc" do_fetch[network] = "1" -python base_do_fetch() { - src_uri = (d.getVar('SRC_URI') or "").split() +def get_src_uris(d, early=False): + import oe.vendor + + src_uris = (d.getVar("SRC_URI") or "").split() + if early: + return src_uris + + src_uri_files = (d.getVar("SRC_URI_FILES") or "").split() + for fn in src_uri_files: + with open(fn, "r") as f: + u = oe.vendor.load(f) + src_uris.extend(u) + + return src_uris + +def fetch_src_uris(d, early=False): + src_uri = get_src_uris(d, early) if not src_uri: return @@ -150,11 +165,25 @@ python base_do_fetch() { fetcher.download() except bb.fetch2.BBFetchException as e: bb.fatal("Bitbake Fetcher Error: " + repr(e)) + +python base_do_fetch() { + fetch_src_uris(d) } addtask unpack after do_fetch do_unpack[cleandirs] = "${UNPACKDIR}" +def unpack_src_uris(d, unpackdir, early=False): + src_uri = get_src_uris(d, early) + if not src_uri: + return [] + + try: + fetcher = bb.fetch2.Fetch(src_uri, d) + fetcher.unpack(unpackdir) + except bb.fetch2.BBFetchException as e: + bb.fatal("Bitbake Fetcher Error: " + repr(e)) + python base_do_unpack() { import shutil @@ -162,12 +191,8 @@ python base_do_unpack() { # Intentionally keep SOURCE_BASEDIR internal to the task just for SDE d.setVar("SOURCE_BASEDIR", sourcedir) - src_uri = (d.getVar('SRC_URI') or "").split() - if not src_uri: - return - + unpackdir = d.getVar("UNPACKDIR") basedir = None - unpackdir = d.getVar('UNPACKDIR') workdir = d.getVar('WORKDIR') if sourcedir.startswith(workdir) and not sourcedir.startswith(unpackdir): basedir = sourcedir.replace(workdir, '').strip("/").split('/')[0] @@ -175,11 +200,7 @@ python base_do_unpack() { bb.utils.remove(workdir + '/' + basedir, True) d.setVar("SOURCE_BASEDIR", workdir + '/' + basedir) - try: - fetcher = bb.fetch2.Fetch(src_uri, d) - fetcher.unpack(d.getVar('UNPACKDIR')) - except bb.fetch2.BBFetchException as e: - bb.fatal("Bitbake Fetcher Error: " + repr(e)) + unpack_src_uris(d, unpackdir) if basedir and os.path.exists(unpackdir + '/' + basedir): # Compatibility magic to ensure ${WORKDIR}/git and ${WORKDIR}/${BP} @@ -704,7 +725,7 @@ addtask cleanall after do_cleansstate do_cleansstate[nostamp] = "1" python do_cleanall() { - src_uri = (d.getVar('SRC_URI') or "").split() + src_uri = get_src_uris(d, True) if not src_uri: return diff --git a/meta/classes/archiver.bbclass b/meta/classes/archiver.bbclass index df271feddd..c297f6008d 100644 --- a/meta/classes/archiver.bbclass +++ b/meta/classes/archiver.bbclass @@ -190,7 +190,7 @@ python do_ar_original() { ar_outdir = d.getVar('ARCHIVER_OUTDIR') bb.note('Archiving the original source...') - urls = d.getVar("SRC_URI").split() + urls = get_src_uris(d) # destsuffix (git fetcher) and subdir (everything else) are allowed to be # absolute paths (for example, destsuffix=${S}/foobar). # That messes with unpacking inside our tmpdir below, because the fetchers @@ -332,7 +332,7 @@ python do_ar_configured() { python do_ar_mirror() { import subprocess - src_uri = (d.getVar('SRC_URI') or '').split() + src_uri = get_src_uris(d) if len(src_uri) == 0: return diff --git a/meta/classes/buildhistory.bbclass b/meta/classes/buildhistory.bbclass index d735dd5fb5..1a6bed8a84 100644 --- a/meta/classes/buildhistory.bbclass +++ b/meta/classes/buildhistory.bbclass @@ -289,7 +289,7 @@ python buildhistory_emit_pkghistory() { rcpinfo.layer = layer rcpinfo.license = license rcpinfo.config = sortlist(oe.utils.squashspaces(d.getVar('PACKAGECONFIG') or "")) - rcpinfo.src_uri = oe.utils.squashspaces(d.getVar('SRC_URI') or "") + rcpinfo.src_uri = " ".join(get_src_uris(d)) write_recipehistory(rcpinfo, d) bb.build.exec_func("read_subpackage_metadata", d) @@ -933,7 +933,7 @@ def _get_srcrev_values(d): """ scms = [] - fetcher = bb.fetch.Fetch(d.getVar('SRC_URI').split(), d) + fetcher = bb.fetch.Fetch(get_src_uris(d), d) urldata = fetcher.ud for u in urldata: if urldata[u].method.supports_srcrev(): diff --git a/meta/classes/copyleft_compliance.bbclass b/meta/classes/copyleft_compliance.bbclass index 9ff9956fe9..e16427e9f4 100644 --- a/meta/classes/copyleft_compliance.bbclass +++ b/meta/classes/copyleft_compliance.bbclass @@ -29,7 +29,7 @@ python do_prepare_copyleft_sources () { sources_dir = d.getVar('COPYLEFT_SOURCES_DIR') dl_dir = d.getVar('DL_DIR') - src_uri = d.getVar('SRC_URI').split() + src_uri = get_src_uris(d) fetch = bb.fetch2.Fetch(src_uri, d) ud = fetch.ud diff --git a/meta/classes/create-spdx-2.2.bbclass b/meta/classes/create-spdx-2.2.bbclass index aa195f5aa7..43db4f6e3b 100644 --- a/meta/classes/create-spdx-2.2.bbclass +++ b/meta/classes/create-spdx-2.2.bbclass @@ -349,7 +349,7 @@ def add_download_packages(d, doc, recipe): import oe.spdx import oe.sbom - urls = d.getVar("SRC_URI").split() + urls = get_src_uris(d) fetcher = bb.fetch2.Fetch(urls, d) for download_idx, f in enumerate(fetcher.expanded_urldata()): diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass index 70e27a8d35..17a0159bc0 100644 --- a/meta/classes/externalsrc.bbclass +++ b/meta/classes/externalsrc.bbclass @@ -65,7 +65,7 @@ python () { bb.fetch.get_hashvalue(d) local_srcuri = [] - fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d) + fetch = bb.fetch2.Fetch(get_src_uris(d), d) for url in fetch.urls: url_data = fetch.ud[url] parm = url_data.parm -- 2.39.5
-=-=-=-=-=-=-=-=-=-=-=- Links: You receive all messages sent to this group. View/Reply Online (#211140): https://lists.openembedded.org/g/openembedded-core/message/211140 Mute This Topic: https://lists.openembedded.org/mt/111123536/21656 Group Owner: openembedded-core+ow...@lists.openembedded.org Unsubscribe: https://lists.openembedded.org/g/openembedded-core/unsub [arch...@mail-archive.com] -=-=-=-=-=-=-=-=-=-=-=-