summaryrefslogtreecommitdiffstats
path: root/meta/classes/externalsrc.bbclass
diff options
context:
space:
mode:
Diffstat (limited to 'meta/classes/externalsrc.bbclass')
-rw-r--r--meta/classes/externalsrc.bbclass105
1 files changed, 72 insertions, 33 deletions
diff --git a/meta/classes/externalsrc.bbclass b/meta/classes/externalsrc.bbclass
index 65dd13ddc1..70e27a8d35 100644
--- a/meta/classes/externalsrc.bbclass
+++ b/meta/classes/externalsrc.bbclass
@@ -2,7 +2,8 @@
# Author: Richard Purdie
# Some code and influence taken from srctree.bbclass:
# Copyright (C) 2009 Chris Larson <clarson@kergoth.com>
-# Released under the MIT license (see COPYING.MIT for the terms)
+#
+# SPDX-License-Identifier: MIT
#
# externalsrc.bbclass enables use of an existing source tree, usually external to
# the build system to build a piece of software rather than the usual fetch/unpack/patch
@@ -13,7 +14,7 @@
# called "myrecipe" you would do:
#
# INHERIT += "externalsrc"
-# EXTERNALSRC_pn-myrecipe = "/path/to/my/source/tree"
+# EXTERNALSRC:pn-myrecipe = "/path/to/my/source/tree"
#
# In order to make this class work for both target and native versions (or with
# multilibs/cross or other BBCLASSEXTEND variants), B is set to point to a separate
@@ -21,7 +22,7 @@
# the default, but the build directory can be set to the source directory if
# circumstances dictate by setting EXTERNALSRC_BUILD to the same value, e.g.:
#
-# EXTERNALSRC_BUILD_pn-myrecipe = "/path/to/my/source/tree"
+# EXTERNALSRC_BUILD:pn-myrecipe = "/path/to/my/source/tree"
#
SRCTREECOVEREDTASKS ?= "do_patch do_unpack do_fetch"
@@ -41,68 +42,83 @@ python () {
# re-parsed so that the file-checksums function for do_compile is run every
# time.
bpn = d.getVar('BPN')
- if bpn == d.getVar('PN'):
- classextend = (d.getVar('BBCLASSEXTEND') or '').split()
+ classextend = (d.getVar('BBCLASSEXTEND') or '').split()
+ if bpn == d.getVar('PN') or not classextend:
if (externalsrc or
('native' in classextend and
- d.getVar('EXTERNALSRC_pn-%s-native' % bpn)) or
+ d.getVar('EXTERNALSRC:pn-%s-native' % bpn)) or
('nativesdk' in classextend and
- d.getVar('EXTERNALSRC_pn-nativesdk-%s' % bpn)) or
+ d.getVar('EXTERNALSRC:pn-nativesdk-%s' % bpn)) or
('cross' in classextend and
- d.getVar('EXTERNALSRC_pn-%s-cross' % bpn))):
+ d.getVar('EXTERNALSRC:pn-%s-cross' % bpn))):
d.setVar('BB_DONT_CACHE', '1')
if externalsrc:
+ import oe.recipeutils
+ import oe.path
+
d.setVar('S', externalsrc)
if externalsrcbuild:
d.setVar('B', externalsrcbuild)
else:
- d.setVar('B', '${WORKDIR}/${BPN}-${PV}/')
+ d.setVar('B', '${WORKDIR}/${BPN}-${PV}')
+ bb.fetch.get_hashvalue(d)
local_srcuri = []
fetch = bb.fetch2.Fetch((d.getVar('SRC_URI') or '').split(), d)
for url in fetch.urls:
url_data = fetch.ud[url]
parm = url_data.parm
- if (url_data.type == 'file' or
- 'type' in parm and parm['type'] == 'kmeta'):
+ if url_data.type in ['file', 'npmsw', 'crate'] or parm.get('type') in ['kmeta', 'git-dependency']:
local_srcuri.append(url)
d.setVar('SRC_URI', ' '.join(local_srcuri))
- if '{SRCPV}' in d.getVar('PV', False):
- # Dummy value because the default function can't be called with blank SRC_URI
- d.setVar('SRCPV', '999')
+ # sstate is never going to work for external source trees, disable it
+ d.setVar('SSTATE_SKIP_CREATION', '1')
+
+ if d.getVar('CONFIGUREOPT_DEPTRACK') == '--disable-dependency-tracking':
+ d.setVar('CONFIGUREOPT_DEPTRACK', '')
tasks = filter(lambda k: d.getVarFlag(k, "task"), d.keys())
for task in tasks:
- if task.endswith("_setscene"):
- # sstate is never going to work for external source trees, disable it
- bb.build.deltask(task, d)
- else:
+ if os.path.realpath(d.getVar('S')) == os.path.realpath(d.getVar('B')):
# Since configure will likely touch ${S}, ensure only we lock so one task has access at a time
d.appendVarFlag(task, "lockfiles", " ${S}/singletask.lock")
- # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean)
- cleandirs = (d.getVarFlag(task, 'cleandirs', False) or '').split()
- setvalue = False
- for cleandir in cleandirs[:]:
- if d.expand(cleandir) == externalsrc:
- cleandirs.remove(cleandir)
- setvalue = True
- if setvalue:
- d.setVarFlag(task, 'cleandirs', ' '.join(cleandirs))
+ for v in d.keys():
+ cleandirs = d.getVarFlag(v, "cleandirs", False)
+ if cleandirs:
+ # We do not want our source to be wiped out, ever (kernel.bbclass does this for do_clean)
+ cleandirs = oe.recipeutils.split_var_value(cleandirs)
+ setvalue = False
+ for cleandir in cleandirs[:]:
+ if oe.path.is_path_parent(externalsrc, d.expand(cleandir)):
+ cleandirs.remove(cleandir)
+ setvalue = True
+ if setvalue:
+ d.setVarFlag(v, 'cleandirs', ' '.join(cleandirs))
fetch_tasks = ['do_fetch', 'do_unpack']
# If we deltask do_patch, there's no dependency to ensure do_unpack gets run, so add one
# Note that we cannot use d.appendVarFlag() here because deps is expected to be a list object, not a string
d.setVarFlag('do_configure', 'deps', (d.getVarFlag('do_configure', 'deps', False) or []) + ['do_unpack'])
+ d.setVarFlag('do_populate_lic', 'deps', (d.getVarFlag('do_populate_lic', 'deps', False) or []) + ['do_unpack'])
for task in d.getVar("SRCTREECOVEREDTASKS").split():
if local_srcuri and task in fetch_tasks:
continue
bb.build.deltask(task, d)
+ if task == 'do_unpack':
+ # The reproducible build create_source_date_epoch_stamp function must
+ # be run after the source is available and before the
+ # do_deploy_source_date_epoch task. In the normal case, it's attached
+ # to do_unpack as a postfuncs, but since we removed do_unpack (above)
+ # we need to move the function elsewhere. The easiest thing to do is
+ # move it into the prefuncs of the do_deploy_source_date_epoch task.
+ # This is safe, as externalsrc runs with the source already unpacked.
+ d.prependVarFlag('do_deploy_source_date_epoch', 'prefuncs', 'create_source_date_epoch_stamp ')
d.prependVarFlag('do_compile', 'prefuncs', "externalsrc_compile_prefunc ")
d.prependVarFlag('do_configure', 'prefuncs', "externalsrc_configure_prefunc ")
@@ -110,6 +126,9 @@ python () {
d.setVarFlag('do_compile', 'file-checksums', '${@srctree_hash_files(d)}')
d.setVarFlag('do_configure', 'file-checksums', '${@srctree_configure_hash_files(d)}')
+ d.appendVarFlag('do_compile', 'prefuncs', ' fetcher_hashes_dummyfunc')
+ d.appendVarFlag('do_configure', 'prefuncs', ' fetcher_hashes_dummyfunc')
+
# We don't want the workdir to go away
d.appendVar('RM_WORK_EXCLUDE', ' ' + d.getVar('PN'))
@@ -173,7 +192,9 @@ do_buildclean[doc] = "Call 'make clean' or equivalent in ${B}"
externalsrc_do_buildclean() {
if [ -e Makefile -o -e makefile -o -e GNUmakefile ]; then
rm -f ${@' '.join([x.split(':')[0] for x in (d.getVar('EXTERNALSRC_SYMLINKS') or '').split()])}
- oe_runmake clean || die "make failed"
+ if [ "${CLEANBROKEN}" != "1" ]; then
+ oe_runmake clean || die "make failed"
+ fi
else
bbnote "nothing to do - no makefile found"
fi
@@ -183,19 +204,24 @@ def srctree_hash_files(d, srcdir=None):
import shutil
import subprocess
import tempfile
+ import hashlib
s_dir = srcdir or d.getVar('EXTERNALSRC')
git_dir = None
try:
git_dir = os.path.join(s_dir,
- subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir']).decode("utf-8").rstrip())
+ subprocess.check_output(['git', '-C', s_dir, 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
+ top_git_dir = os.path.join(d.getVar("TOPDIR"),
+ subprocess.check_output(['git', '-C', d.getVar("TOPDIR"), 'rev-parse', '--git-dir'], stderr=subprocess.DEVNULL).decode("utf-8").rstrip())
+ if git_dir == top_git_dir:
+ git_dir = None
except subprocess.CalledProcessError:
pass
ret = " "
if git_dir is not None:
- oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1')
+ oe_hash_file = os.path.join(git_dir, 'oe-devtool-tree-sha1-%s' % d.getVar('PN'))
with tempfile.NamedTemporaryFile(prefix='oe-devtool-index') as tmp_index:
# Clone index
shutil.copyfile(os.path.join(git_dir, 'index'), tmp_index.name)
@@ -203,7 +229,18 @@ def srctree_hash_files(d, srcdir=None):
env = os.environ.copy()
env['GIT_INDEX_FILE'] = tmp_index.name
subprocess.check_output(['git', 'add', '-A', '.'], cwd=s_dir, env=env)
- sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8")
+ git_sha1 = subprocess.check_output(['git', 'write-tree'], cwd=s_dir, env=env).decode("utf-8")
+ if os.path.exists(os.path.join(s_dir, ".gitmodules")) and os.path.getsize(os.path.join(s_dir, ".gitmodules")) > 0:
+ submodule_helper = subprocess.check_output(["git", "config", "--file", ".gitmodules", "--get-regexp", "path"], cwd=s_dir, env=env).decode("utf-8")
+ for line in submodule_helper.splitlines():
+ module_dir = os.path.join(s_dir, line.rsplit(maxsplit=1)[1])
+ if os.path.isdir(module_dir):
+ proc = subprocess.Popen(['git', 'add', '-A', '.'], cwd=module_dir, env=env, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
+ proc.communicate()
+ proc = subprocess.Popen(['git', 'write-tree'], cwd=module_dir, env=env, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL)
+ stdout, _ = proc.communicate()
+ git_sha1 += stdout.decode("utf-8")
+ sha1 = hashlib.sha1(git_sha1.encode("utf-8")).hexdigest()
with open(oe_hash_file, 'w') as fobj:
fobj.write(sha1)
ret = oe_hash_file + ':True'
@@ -216,6 +253,8 @@ def srctree_configure_hash_files(d):
Get the list of files that should trigger do_configure to re-execute,
based on the value of CONFIGURE_FILES
"""
+ import fnmatch
+
in_files = (d.getVar('CONFIGURE_FILES') or '').split()
out_items = []
search_files = []
@@ -227,8 +266,8 @@ def srctree_configure_hash_files(d):
if search_files:
s_dir = d.getVar('EXTERNALSRC')
for root, _, files in os.walk(s_dir):
- for f in files:
- if f in search_files:
+ for p in search_files:
+ for f in fnmatch.filter(files, p):
out_items.append('%s:True' % os.path.join(root, f))
return ' '.join(out_items)