summaryrefslogtreecommitdiffstats
path: root/scripts/lib/devtool/standard.py
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib/devtool/standard.py')
-rw-r--r--scripts/lib/devtool/standard.py837
1 files changed, 576 insertions, 261 deletions
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
index 26187a0c41..bd009f44b1 100644
--- a/scripts/lib/devtool/standard.py
+++ b/scripts/lib/devtool/standard.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool standard plugins"""
import os
@@ -66,7 +56,7 @@ def add(args, config, basepath, workspace):
args.srctree = args.recipename
args.recipename = None
elif os.path.isdir(args.recipename):
- logger.warn('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
+ logger.warning('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
if not args.fetchuri:
if args.srcrev:
@@ -82,7 +72,7 @@ def add(args, config, basepath, workspace):
if args.fetchuri:
raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
else:
- logger.warn('-f/--fetch option is deprecated - you can now simply specify the URL to fetch as a positional argument instead')
+ logger.warning('-f/--fetch option is deprecated - you can now simply specify the URL to fetch as a positional argument instead')
args.fetchuri = args.fetch
if args.recipename:
@@ -155,8 +145,10 @@ def add(args, config, basepath, workspace):
extracmdopts += ' --src-subdir "%s"' % args.src_subdir
if args.autorev:
extracmdopts += ' -a'
- if args.fetch_dev:
- extracmdopts += ' --fetch-dev'
+ if args.npm_dev:
+ extracmdopts += ' --npm-dev'
+ if args.no_pypi:
+ extracmdopts += ' --no-pypi'
if args.mirrors:
extracmdopts += ' --mirrors'
if args.srcrev:
@@ -217,7 +209,7 @@ def add(args, config, basepath, workspace):
raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout))
attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
if os.path.exists(attic_recipe):
- logger.warn('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
+ logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
finally:
if tmpsrcdir and os.path.exists(tmpsrcdir):
shutil.rmtree(tmpsrcdir)
@@ -244,10 +236,14 @@ def add(args, config, basepath, workspace):
if args.fetchuri and not args.no_git:
setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data)
- initial_rev = None
+ initial_rev = {}
if os.path.exists(os.path.join(srctree, '.git')):
(stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
- initial_rev = stdout.rstrip()
+ initial_rev["."] = stdout.rstrip()
+ (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree)
+ for line in stdout.splitlines():
+ (rev, submodule) = line.split()
+ initial_rev[os.path.relpath(submodule, srctree)] = rev
if args.src_subdir:
srctree = os.path.join(srctree, args.src_subdir)
@@ -261,23 +257,20 @@ def add(args, config, basepath, workspace):
if b_is_s:
f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
if initial_rev:
- f.write('\n# initial_rev: %s\n' % initial_rev)
+ for key, value in initial_rev.items():
+ f.write('\n# initial_rev %s: %s\n' % (key, value))
if args.binary:
- f.write('do_install_append() {\n')
+ f.write('do_install:append() {\n')
f.write(' rm -rf ${D}/.git\n')
f.write(' rm -f ${D}/singletask.lock\n')
f.write('}\n')
if bb.data.inherits_class('npm', rd):
- f.write('do_install_append() {\n')
- f.write(' # Remove files added to source dir by devtool/externalsrc\n')
- f.write(' rm -f ${NPM_INSTALLDIR}/singletask.lock\n')
- f.write(' rm -rf ${NPM_INSTALLDIR}/.git\n')
- f.write(' rm -rf ${NPM_INSTALLDIR}/oe-local-files\n')
- f.write(' for symlink in ${EXTERNALSRC_SYMLINKS} ; do\n')
- f.write(' rm -f ${NPM_INSTALLDIR}/${symlink%%:*}\n')
- f.write(' done\n')
+ f.write('python do_configure:append() {\n')
+ f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n')
+ f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n')
+ f.write(' bb.utils.remove(lockfile)\n')
f.write('}\n')
# Check if the new layer provides recipes whose priorities have been
@@ -295,7 +288,7 @@ def add(args, config, basepath, workspace):
with open(layerconf_file, 'a') as f:
f.write('%s = "%s"\n' % (preferred_provider, recipe_name))
else:
- logger.warn('Set \'%s\' in order to use the recipe' % preferred_provider)
+ logger.warning('Set \'%s\' in order to use the recipe' % preferred_provider)
break
_add_md5(config, recipename, appendfile)
@@ -332,10 +325,6 @@ def _check_compatible_recipe(pn, d):
raise DevtoolError("The %s recipe is a packagegroup, and therefore is "
"not supported by this tool" % pn, 4)
- if bb.data.inherits_class('meta', d):
- raise DevtoolError("The %s recipe is a meta-recipe, and therefore is "
- "not supported by this tool" % pn, 4)
-
if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'):
# Not an incompatibility error per se, so we don't pass the error code
raise DevtoolError("externalsrc is currently enabled for the %s "
@@ -371,7 +360,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None):
bb.utils.mkdirhier(dst_d)
shutil.move(src, dst)
-def _copy_file(src, dst, dry_run_outdir=None):
+def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
"""Copy a file. Creates all the directory components of destination path."""
dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix))
@@ -471,11 +460,37 @@ def sync(args, config, basepath, workspace):
finally:
tinfoil.shutdown()
+def symlink_oelocal_files_srctree(rd, srctree):
+ import oe.patch
+ if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')):
+ # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
+ # (otherwise the recipe won't build as expected)
+ local_files_dir = os.path.join(srctree, 'oe-local-files')
+ addfiles = []
+ for root, _, files in os.walk(local_files_dir):
+ relpth = os.path.relpath(root, local_files_dir)
+ if relpth != '.':
+ bb.utils.mkdirhier(os.path.join(srctree, relpth))
+ for fn in files:
+ if fn == '.gitignore':
+ continue
+ destpth = os.path.join(srctree, relpth, fn)
+ if os.path.exists(destpth):
+ os.unlink(destpth)
+ if relpth != '.':
+ back_relpth = os.path.relpath(local_files_dir, root)
+ os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth)
+ else:
+ os.symlink('oe-local-files/%s' % fn, destpth)
+ addfiles.append(os.path.join(relpth, fn))
+ if addfiles:
+ oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd)
def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
"""Extract sources of a recipe"""
import oe.recipeutils
import oe.patch
+ import oe.path
pn = d.getVar('PN')
@@ -507,14 +522,20 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
history = d.varhistory.variable('SRC_URI')
for event in history:
if not 'flag' in event:
- if event['op'].startswith(('_append[', '_prepend[')):
- extra_overrides.append(event['op'].split('[')[1].split(']')[0])
+ if event['op'].startswith((':append[', ':prepend[')):
+ override = event['op'].split('[')[1].split(']')[0]
+ if not override.startswith('pn-'):
+ extra_overrides.append(override)
+ # We want to remove duplicate overrides. If a recipe had multiple
+ # SRC_URI_override += values it would cause mulitple instances of
+ # overrides. This doesn't play nicely with things like creating a
+ # branch for every instance of DEVTOOL_EXTRA_OVERRIDES.
+ extra_overrides = list(set(extra_overrides))
if extra_overrides:
logger.info('SRC_URI contains some conditional appends/prepends - will create branches to represent these')
initial_rev = None
- appendexisted = False
recipefile = d.getVar('FILE')
appendfile = recipe_to_append(recipefile, config)
is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
@@ -549,6 +570,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
logger.debug('writing append file %s' % appendfile)
with open(appendfile, 'a') as f:
f.write('###--- _extract_source\n')
+ f.write('deltask do_recipe_qa\n')
+ f.write('deltask do_recipe_qa_setscene\n')
+ f.write('ERROR_QA:remove = "patch-fuzz"\n')
f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
if not is_kernel_yocto:
@@ -566,13 +590,24 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
with open(preservestampfile, 'w') as f:
f.write(d.getVar('STAMP'))
+ tinfoil.modified_files()
try:
- if bb.data.inherits_class('kernel-yocto', d):
+ if is_kernel_yocto:
# We need to generate the kernel config
task = 'do_configure'
else:
task = 'do_patch'
+ if 'noexec' in (d.getVarFlags(task, False) or []) or 'task' not in (d.getVarFlags(task, False) or []):
+ logger.info('The %s recipe has %s disabled. Running only '
+ 'do_configure task dependencies' % (pn, task))
+
+ if 'depends' in d.getVarFlags('do_configure', False):
+ pn = d.getVarFlags('do_configure', False)['depends']
+ pn = pn.replace('${PV}', d.getVar('PV'))
+ pn = pn.replace('${COMPILERDEP}', d.getVar('COMPILERDEP'))
+ task = None
+
# Run the fetch + unpack tasks
res = tinfoil.build_targets(pn,
task,
@@ -584,6 +619,17 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
if not res:
raise DevtoolError('Extracting source for %s failed' % pn)
+ if not is_kernel_yocto and ('noexec' in (d.getVarFlags('do_patch', False) or []) or 'task' not in (d.getVarFlags('do_patch', False) or [])):
+ workshareddir = d.getVar('S')
+ if os.path.islink(srctree):
+ os.unlink(srctree)
+
+ os.symlink(workshareddir, srctree)
+
+ # The initial_rev file is created in devtool_post_unpack function that will not be executed if
+ # do_unpack/do_patch tasks are disabled so we have to directly say that source extraction was successful
+ return True, True
+
try:
with open(os.path.join(tempdir, 'initial_rev'), 'r') as f:
initial_rev = f.read()
@@ -594,19 +640,47 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir'))
+ # Check if work-shared is empty, if yes
+ # find source and copy to work-shared
+ if is_kernel_yocto:
+ workshareddir = d.getVar('STAGING_KERNEL_DIR')
+ staging_kerVer = get_staging_kver(workshareddir)
+ kernelVersion = d.getVar('LINUX_VERSION')
+
+ # handle dangling symbolic link in work-shared:
+ if os.path.islink(workshareddir):
+ os.unlink(workshareddir)
+
+ if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
+ shutil.rmtree(workshareddir)
+ oe.path.copyhardlinktree(srcsubdir, workshareddir)
+ elif not os.path.exists(workshareddir):
+ oe.path.copyhardlinktree(srcsubdir, workshareddir)
+
tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
srctree_localdir = os.path.join(srctree, 'oe-local-files')
if sync:
- bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
-
- # Move oe-local-files directory to srctree
- # As the oe-local-files is not part of the constructed git tree,
- # remove them directly during the synchrounizating might surprise
- # the users. Instead, we move it to oe-local-files.bak and remind
- # user in the log message.
+ try:
+ logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch))
+ bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree)
+
+ # Use git fetch to update the source with the current recipe
+ # To be able to update the currently checked out branch with
+ # possibly new history (no fast-forward) git needs to be told
+ # that's ok
+ logger.info('Syncing source files including patches to git branch: %s' % devbranch)
+ bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
+ except bb.process.ExecutionError as e:
+ raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e))
+
+ # Move the oe-local-files directory to srctree.
+ # As oe-local-files is not part of the constructed git tree,
+ # removing it directly during the synchronization might surprise
+ # the user. Instead, we move it to oe-local-files.bak and remind
+ # the user in the log message.
if os.path.exists(srctree_localdir + '.bak'):
- shutil.rmtree(srctree_localdir, srctree_localdir + '.bak')
+ shutil.rmtree(srctree_localdir + '.bak')
if os.path.exists(srctree_localdir):
logger.info('Backing up current local file directory %s' % srctree_localdir)
@@ -622,29 +696,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
shutil.move(tempdir_localdir, srcsubdir)
shutil.move(srcsubdir, srctree)
-
- if os.path.abspath(d.getVar('S')) == os.path.abspath(d.getVar('WORKDIR')):
- # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
- # (otherwise the recipe won't build as expected)
- local_files_dir = os.path.join(srctree, 'oe-local-files')
- addfiles = []
- for root, _, files in os.walk(local_files_dir):
- relpth = os.path.relpath(root, local_files_dir)
- if relpth != '.':
- bb.utils.mkdirhier(os.path.join(srctree, relpth))
- for fn in files:
- if fn == '.gitignore':
- continue
- destpth = os.path.join(srctree, relpth, fn)
- if os.path.exists(destpth):
- os.unlink(destpth)
- os.symlink('oe-local-files/%s' % fn, destpth)
- addfiles.append(os.path.join(relpth, fn))
- if addfiles:
- bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree)
- useroptions = []
- oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d)
- bb.process.run('git %s commit -a -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
+ symlink_oelocal_files_srctree(d, srctree)
if is_kernel_yocto:
logger.info('Copying kernel config to srctree')
@@ -704,19 +756,49 @@ def _check_preserve(config, recipename):
if splitline[2] != md5:
bb.utils.mkdirhier(preservepath)
preservefile = os.path.basename(removefile)
- logger.warn('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
+ logger.warning('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
shutil.move(removefile, os.path.join(preservepath, preservefile))
else:
os.remove(removefile)
else:
tf.write(line)
- os.rename(newfile, origfile)
+ bb.utils.rename(newfile, origfile)
+
+def get_staging_kver(srcdir):
+ # Kernel version from work-shared
+ kerver = []
+ staging_kerVer=""
+ if os.path.exists(srcdir) and os.listdir(srcdir):
+ with open(os.path.join(srcdir, "Makefile")) as f:
+ version = [next(f) for x in range(5)][1:4]
+ for word in version:
+ kerver.append(word.split('= ')[1].split('\n')[0])
+ staging_kerVer = ".".join(kerver)
+ return staging_kerVer
+
+def get_staging_kbranch(srcdir):
+ staging_kbranch = ""
+ if os.path.exists(srcdir) and os.listdir(srcdir):
+ (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
+ staging_kbranch = "".join(branch.split('\n')[0])
+ return staging_kbranch
+
+def get_real_srctree(srctree, s, workdir):
+ # Check that recipe isn't using a shared workdir
+ s = os.path.abspath(s)
+ workdir = os.path.abspath(workdir)
+ if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
+ # Handle if S is set to a subdirectory of the source
+ srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
+ srctree = os.path.join(srctree, srcsubdir)
+ return srctree
def modify(args, config, basepath, workspace):
"""Entry point for the devtool 'modify' subcommand"""
import bb
import oe.recipeutils
import oe.patch
+ import oe.path
if args.recipename in workspace:
raise DevtoolError("recipe %s is already in your workspace" %
@@ -755,23 +837,89 @@ def modify(args, config, basepath, workspace):
_check_compatible_recipe(pn, rd)
- initial_rev = None
- commits = []
+ initial_revs = {}
+ commits = {}
check_commits = False
+
+ if bb.data.inherits_class('kernel-yocto', rd):
+ # Current set kernel version
+ kernelVersion = rd.getVar('LINUX_VERSION')
+ srcdir = rd.getVar('STAGING_KERNEL_DIR')
+ kbranch = rd.getVar('KBRANCH')
+
+ staging_kerVer = get_staging_kver(srcdir)
+ staging_kbranch = get_staging_kbranch(srcdir)
+ if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
+ oe.path.copyhardlinktree(srcdir, srctree)
+ workdir = rd.getVar('WORKDIR')
+ srcsubdir = rd.getVar('S')
+ localfilesdir = os.path.join(srctree, 'oe-local-files')
+ # Move local source files into separate subdir
+ recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)]
+ local_files = oe.recipeutils.get_recipe_local_files(rd)
+
+ for key in local_files.copy():
+ if key.endswith('scc'):
+ sccfile = open(local_files[key], 'r')
+ for l in sccfile:
+ line = l.split()
+ if line and line[0] in ('kconf', 'patch'):
+ cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
+ if not cfg in local_files.values():
+ local_files[line[-1]] = cfg
+ shutil.copy2(cfg, workdir)
+ sccfile.close()
+
+ # Ignore local files with subdir={BP}
+ srcabspath = os.path.abspath(srcsubdir)
+ local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))]
+ if local_files:
+ for fname in local_files:
+ _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname))
+ with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f:
+ f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n')
+
+ symlink_oelocal_files_srctree(rd, srctree)
+
+ task = 'do_configure'
+ res = tinfoil.build_targets(pn, task, handle_events=True)
+
+ # Copy .config to workspace
+ kconfpath = rd.getVar('B')
+ logger.info('Copying kernel config to workspace')
+ shutil.copy2(os.path.join(kconfpath, '.config'), srctree)
+
+ # Set this to true, we still need to get initial_rev
+ # by parsing the git repo
+ args.no_extract = True
+
if not args.no_extract:
- initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
- if not initial_rev:
+ initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
+ if not initial_revs["."]:
return 1
logger.info('Source tree extracted to %s' % srctree)
- # Get list of commits since this revision
- (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree)
- commits = stdout.split()
- check_commits = True
+
+ if os.path.exists(os.path.join(srctree, '.git')):
+ # Get list of commits since this revision
+ (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree)
+ commits["."] = stdout.split()
+ check_commits = True
+ (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree)
+ for line in stdout.splitlines():
+ (rev, submodule_path) = line.split()
+ submodule = os.path.relpath(submodule_path, srctree)
+ initial_revs[submodule] = rev
+ (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path)
+ commits[submodule] = stdout.split()
else:
if os.path.exists(os.path.join(srctree, '.git')):
- # Check if it's a tree previously extracted by us
+ # Check if it's a tree previously extracted by us. This is done
+ # by ensuring that devtool-base and args.branch (devtool) exist.
+ # The check_commits logic will cause an exception if either one
+ # of these doesn't exist
try:
(stdout, _) = bb.process.run('git branch --contains devtool-base', cwd=srctree)
+ bb.process.run('git rev-parse %s' % args.branch, cwd=srctree)
except bb.process.ExecutionError:
stdout = ''
if stdout:
@@ -779,11 +927,11 @@ def modify(args, config, basepath, workspace):
for line in stdout.splitlines():
if line.startswith('*'):
(stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
- initial_rev = stdout.rstrip()
- if not initial_rev:
+ initial_revs["."] = stdout.rstrip()
+ if "." not in initial_revs:
# Otherwise, just grab the head revision
(stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
- initial_rev = stdout.rstrip()
+ initial_revs["."] = stdout.rstrip()
branch_patches = {}
if check_commits:
@@ -795,62 +943,86 @@ def modify(args, config, basepath, workspace):
if branchname.startswith(override_branch_prefix):
branches.append(branchname)
if branches:
- logger.warn('SRC_URI is conditionally overridden in this recipe, thus several %s* branches have been created, one for each override that makes changes to SRC_URI. It is recommended that you make changes to the %s branch first, then checkout and rebase each %s* branch and update any unique patches there (duplicates on those branches will be ignored by devtool finish/update-recipe)' % (override_branch_prefix, args.branch, override_branch_prefix))
+ logger.warning('SRC_URI is conditionally overridden in this recipe, thus several %s* branches have been created, one for each override that makes changes to SRC_URI. It is recommended that you make changes to the %s branch first, then checkout and rebase each %s* branch and update any unique patches there (duplicates on those branches will be ignored by devtool finish/update-recipe)' % (override_branch_prefix, args.branch, override_branch_prefix))
branches.insert(0, args.branch)
seen_patches = []
for branch in branches:
branch_patches[branch] = []
- (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree)
- for line in stdout.splitlines():
- line = line.strip()
- if line.startswith(oe.patch.GitApplyTree.patch_line_prefix):
- origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip()
- if not origpatch in seen_patches:
- seen_patches.append(origpatch)
- branch_patches[branch].append(origpatch)
+ (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree)
+ for sha1 in stdout.splitlines():
+ notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip())
+ origpatch = notes.get(oe.patch.GitApplyTree.original_patch)
+ if origpatch and origpatch not in seen_patches:
+ seen_patches.append(origpatch)
+ branch_patches[branch].append(origpatch)
# Need to grab this here in case the source is within a subdirectory
srctreebase = srctree
-
- # Check that recipe isn't using a shared workdir
- s = os.path.abspath(rd.getVar('S'))
- workdir = os.path.abspath(rd.getVar('WORKDIR'))
- if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
- # Handle if S is set to a subdirectory of the source
- srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
- srctree = os.path.join(srctree, srcsubdir)
+ srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
bb.utils.mkdirhier(os.path.dirname(appendfile))
with open(appendfile, 'w') as f:
- f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n')
+ # if not present, add type=git-dependency to the secondary sources
+ # (non local files) so they can be extracted correctly when building a recipe after
+ # doing a devtool modify on it
+ src_uri = rd.getVar('SRC_URI').split()
+ src_uri_append = []
+ src_uri_remove = []
+
+ # Assume first entry is main source extracted in ${S} so skip it
+ src_uri = src_uri[1::]
+
+ # Add "type=git-dependency" to all non local sources
+ for url in src_uri:
+ if not url.startswith('file://') and not 'type=' in url:
+ src_uri_remove.append(url)
+ src_uri_append.append('%s;type=git-dependency' % url)
+
+ if src_uri_remove:
+ f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove))
+ f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append))
+
+ f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n')
# Local files can be modified/tracked in separate subdir under srctree
# Mostly useful for packages with S != WORKDIR
- f.write('FILESPATH_prepend := "%s:"\n' %
+ f.write('FILESPATH:prepend := "%s:"\n' %
os.path.join(srctreebase, 'oe-local-files'))
f.write('# srctreebase: %s\n' % srctreebase)
f.write('\ninherit externalsrc\n')
f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
- f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree))
+ f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
if b_is_s:
- f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
+ f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
if bb.data.inherits_class('kernel', rd):
f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
- 'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n')
- f.write('\ndo_patch() {\n'
- ' :\n'
- '}\n')
- f.write('\ndo_configure_append() {\n'
+ 'do_fetch do_unpack do_kernel_configcheck"\n')
+ f.write('\ndo_patch[noexec] = "1"\n')
+ f.write('\ndo_configure:append() {\n'
' cp ${B}/.config ${S}/.config.baseline\n'
' ln -sfT ${B}/.config ${S}/.config.new\n'
'}\n')
- if initial_rev:
- f.write('\n# initial_rev: %s\n' % initial_rev)
- for commit in commits:
- f.write('# commit: %s\n' % commit)
+ f.write('\ndo_kernel_configme:prepend() {\n'
+ ' if [ -e ${S}/.config ]; then\n'
+ ' mv ${S}/.config ${S}/.config.old\n'
+ ' fi\n'
+ '}\n')
+ if rd.getVarFlag('do_menuconfig', 'task'):
+ f.write('\ndo_configure:append() {\n'
+ ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n'
+ ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n'
+ ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n'
+ ' fi\n'
+ '}\n')
+ if initial_revs:
+ for name, rev in initial_revs.items():
+ f.write('\n# initial_rev %s: %s\n' % (name, rev))
+ if name in commits:
+ for commit in commits[name]:
+ f.write('# commit %s: %s\n' % (name, commit))
if branch_patches:
for branch in branch_patches:
if branch == args.branch:
@@ -970,10 +1142,10 @@ def rename(args, config, basepath, workspace):
# Rename bbappend
logger.info('Renaming %s to %s' % (append, newappend))
- os.rename(append, newappend)
+ bb.utils.rename(append, newappend)
# Rename recipe file
logger.info('Renaming %s to %s' % (recipefile, newfile))
- os.rename(recipefile, newfile)
+ bb.utils.rename(recipefile, newfile)
# Rename source tree if it's the default path
appendmd5 = None
@@ -1061,7 +1233,7 @@ def rename(args, config, basepath, workspace):
return 0
-def _get_patchset_revs(srctree, recipe_path, initial_rev=None):
+def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refresh=False):
"""Get initial and update rev of a recipe. These are the start point of the
whole patchset and start point for the patches to be re-generated/updated.
"""
@@ -1073,44 +1245,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None):
branchname = stdout.rstrip()
# Parse initial rev from recipe if not specified
- commits = []
+ commits = {}
patches = []
+ initial_revs = {}
with open(recipe_path, 'r') as f:
for line in f:
- if line.startswith('# initial_rev:'):
- if not initial_rev:
- initial_rev = line.split(':')[-1].strip()
- elif line.startswith('# commit:'):
- commits.append(line.split(':')[-1].strip())
- elif line.startswith('# patches_%s:' % branchname):
- patches = line.split(':')[-1].strip().split(',')
-
- update_rev = initial_rev
- changed_revs = None
- if initial_rev:
+ pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$'
+ match = re.search(pattern, line)
+ if match:
+ name = match.group(1)
+ rev = match.group(2)
+ if line.startswith('# initial_rev'):
+ if not (name == "." and initial_rev):
+ initial_revs[name] = rev
+ elif line.startswith('# commit') and not force_patch_refresh:
+ if name not in commits:
+ commits[name] = [rev]
+ else:
+ commits[name].append(rev)
+ elif line.startswith('# patches_%s:' % branchname):
+ patches = line.split(':')[-1].strip().split(',')
+
+ update_revs = dict(initial_revs)
+ changed_revs = {}
+ for name, rev in initial_revs.items():
# Find first actually changed revision
stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
- initial_rev, cwd=srctree)
+ rev, cwd=os.path.join(srctree, name))
newcommits = stdout.split()
- for i in range(min(len(commits), len(newcommits))):
- if newcommits[i] == commits[i]:
- update_rev = commits[i]
+ if name in commits:
+ for i in range(min(len(commits[name]), len(newcommits))):
+ if newcommits[i] == commits[name][i]:
+ update_revs[name] = commits[name][i]
try:
stdout, _ = bb.process.run('git cherry devtool-patched',
- cwd=srctree)
+ cwd=os.path.join(srctree, name))
except bb.process.ExecutionError as err:
stdout = None
- if stdout is not None:
- changed_revs = []
+ if stdout is not None and not force_patch_refresh:
for line in stdout.splitlines():
if line.startswith('+ '):
rev = line.split()[1]
if rev in newcommits:
- changed_revs.append(rev)
+ if name not in changed_revs:
+ changed_revs[name] = [rev]
+ else:
+ changed_revs[name].append(rev)
- return initial_rev, update_rev, changed_revs, patches
+ return initial_revs, update_revs, changed_revs, patches
def _remove_file_entries(srcuri, filelist):
"""Remove file:// entries from SRC_URI"""
@@ -1165,14 +1349,17 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru
raise
-def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None):
+def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
"""Export patches from srctree to given location.
Returns three-tuple of dicts:
1. updated - patches that already exist in SRCURI
2. added - new patches that don't exist in SRCURI
3 removed - patches that exist in SRCURI but not in exported patches
- In each dict the key is the 'basepath' of the URI and value is the
- absolute path to the existing file in recipe space (if any).
+ In each dict the key is the 'basepath' of the URI and value is:
+ - for updated and added dicts, a dict with 2 optionnal keys:
+ - 'path': the absolute path to the existing file in recipe space (if any)
+ - 'patchdir': the directory in wich the patch should be applied (if any)
+ - for removed dict, the absolute path to the existing file in recipe space
"""
import oe.recipeutils
from oe.patch import GitApplyTree
@@ -1186,54 +1373,60 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None):
# Generate patches from Git, exclude local files directory
patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
- GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec)
-
- new_patches = sorted(os.listdir(destdir))
- for new_patch in new_patches:
- # Strip numbering from patch names. If it's a git sequence named patch,
- # the numbers might not match up since we are starting from a different
- # revision This does assume that people are using unique shortlog
- # values, but they ought to be anyway...
- new_basename = seqpatch_re.match(new_patch).group(2)
- match_name = None
- for old_patch in existing_patches:
- old_basename = seqpatch_re.match(old_patch).group(2)
- old_basename_splitext = os.path.splitext(old_basename)
- if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
- old_patch_noext = os.path.splitext(old_patch)[0]
- match_name = old_patch_noext
- break
- elif new_basename == old_basename:
- match_name = old_patch
- break
- if match_name:
- # Rename patch files
- if new_patch != match_name:
- os.rename(os.path.join(destdir, new_patch),
- os.path.join(destdir, match_name))
- # Need to pop it off the list now before checking changed_revs
- oldpath = existing_patches.pop(old_patch)
- if changed_revs is not None:
- # Avoid updating patches that have not actually changed
- with open(os.path.join(destdir, match_name), 'r') as f:
- firstlineitems = f.readline().split()
- # Looking for "From <hash>" line
- if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
- if not firstlineitems[1] in changed_revs:
- continue
- # Recompress if necessary
- if oldpath.endswith(('.gz', '.Z')):
- bb.process.run(['gzip', match_name], cwd=destdir)
- if oldpath.endswith('.gz'):
- match_name += '.gz'
- else:
- match_name += '.Z'
- elif oldpath.endswith('.bz2'):
- bb.process.run(['bzip2', match_name], cwd=destdir)
- match_name += '.bz2'
- updated[match_name] = oldpath
- else:
- added[new_patch] = None
+ GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec)
+ for dirpath, dirnames, filenames in os.walk(destdir):
+ new_patches = filenames
+ reldirpath = os.path.relpath(dirpath, destdir)
+ for new_patch in new_patches:
+ # Strip numbering from patch names. If it's a git sequence named patch,
+ # the numbers might not match up since we are starting from a different
+ # revision This does assume that people are using unique shortlog
+ # values, but they ought to be anyway...
+ new_basename = seqpatch_re.match(new_patch).group(2)
+ match_name = None
+ for old_patch in existing_patches:
+ old_basename = seqpatch_re.match(old_patch).group(2)
+ old_basename_splitext = os.path.splitext(old_basename)
+ if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
+ old_patch_noext = os.path.splitext(old_patch)[0]
+ match_name = old_patch_noext
+ break
+ elif new_basename == old_basename:
+ match_name = old_patch
+ break
+ if match_name:
+ # Rename patch files
+ if new_patch != match_name:
+ bb.utils.rename(os.path.join(destdir, new_patch),
+ os.path.join(destdir, match_name))
+ # Need to pop it off the list now before checking changed_revs
+ oldpath = existing_patches.pop(old_patch)
+ if changed_revs is not None and dirpath in changed_revs:
+ # Avoid updating patches that have not actually changed
+ with open(os.path.join(dirpath, match_name), 'r') as f:
+ firstlineitems = f.readline().split()
+ # Looking for "From <hash>" line
+ if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
+ if not firstlineitems[1] in changed_revs[dirpath]:
+ continue
+ # Recompress if necessary
+ if oldpath.endswith(('.gz', '.Z')):
+ bb.process.run(['gzip', match_name], cwd=destdir)
+ if oldpath.endswith('.gz'):
+ match_name += '.gz'
+ else:
+ match_name += '.Z'
+ elif oldpath.endswith('.bz2'):
+ bb.process.run(['bzip2', match_name], cwd=destdir)
+ match_name += '.bz2'
+ updated[match_name] = {'path' : oldpath}
+ if reldirpath != ".":
+ updated[match_name]['patchdir'] = reldirpath
+ else:
+ added[new_patch] = {}
+ if reldirpath != ".":
+ added[new_patch]['patchdir'] = reldirpath
+
return (updated, added, existing_patches)
@@ -1270,8 +1463,10 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1. updated - files that already exist in SRCURI
2. added - new files files that don't exist in SRCURI
3 removed - files that exist in SRCURI but not in exported files
- In each dict the key is the 'basepath' of the URI and value is the
- absolute path to the existing file in recipe space (if any).
+ In each dict the key is the 'basepath' of the URI and value is:
+ - for updated and added dicts, a dict with 1 optionnal key:
+ - 'path': the absolute path to the existing file in recipe space (if any)
+ - for removed dict, the absolute path to the existing file in recipe space
"""
import oe.recipeutils
@@ -1284,6 +1479,18 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
updated = OrderedDict()
added = OrderedDict()
removed = OrderedDict()
+
+ # Get current branch and return early with empty lists
+ # if on one of the override branches
+ # (local files are provided only for the main branch and processing
+ # them against lists from recipe overrides will result in mismatches
+ # and broken modifications to recipes).
+ stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
+ cwd=srctree)
+ branchname = stdout.rstrip()
+ if branchname.startswith(override_branch_prefix):
+ return (updated, added, removed)
+
local_files_dir = os.path.join(srctreebase, 'oe-local-files')
git_files = _git_ls_tree(srctree)
if 'oe-local-files' in git_files:
@@ -1321,15 +1528,29 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
if os.path.exists(os.path.join(local_files_dir, fragment_fn)):
os.unlink(os.path.join(local_files_dir, fragment_fn))
+ # Special handling for cml1, ccmake, etc bbclasses that generated
+ # configuration fragment files that are consumed as source files
+ for frag_class, frag_name in [("cml1", "fragment.cfg"), ("ccmake", "site-file.cmake")]:
+ if bb.data.inherits_class(frag_class, rd):
+ srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name)
+ if os.path.exists(srcpath):
+ if frag_name not in new_set:
+ new_set.append(frag_name)
+ # copy fragment into destdir
+ shutil.copy2(srcpath, destdir)
+ # copy fragment into local files if exists
+ if os.path.isdir(local_files_dir):
+ shutil.copy2(srcpath, local_files_dir)
+
if new_set is not None:
for fname in new_set:
if fname in existing_files:
origpath = existing_files.pop(fname)
workpath = os.path.join(local_files_dir, fname)
if not filecmp.cmp(origpath, workpath):
- updated[fname] = origpath
+ updated[fname] = {'path' : origpath}
elif fname != '.gitignore':
- added[fname] = None
+ added[fname] = {}
workdir = rd.getVar('WORKDIR')
s = rd.getVar('S')
@@ -1346,7 +1567,7 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
if os.path.exists(fpath):
origpath = existing_files.pop(fname)
if not filecmp.cmp(origpath, fpath):
- updated[fpath] = origpath
+ updated[fpath] = {'path' : origpath}
removed = existing_files
return (updated, added, removed)
@@ -1375,6 +1596,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
recipedir = os.path.basename(recipefile)
logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix))
+ # Get original SRCREV
+ old_srcrev = rd.getVar('SRCREV') or ''
+ if old_srcrev == "INVALID":
+ raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository')
+ old_srcrev = {'.': old_srcrev}
+
# Get HEAD revision
try:
stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
@@ -1401,13 +1628,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
if not no_remove:
# Find list of existing patches in recipe file
patches_dir = tempfile.mkdtemp(dir=tempdir)
- old_srcrev = rd.getVar('SRCREV') or ''
upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
patches_dir)
logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
# Remove deleted local files and "overlapping" patches
- remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values())
+ remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value]
if remove_files:
removedentries = _remove_file_entries(srcuri, remove_files)[0]
update_srcuri = True
@@ -1421,14 +1647,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
patchfields['SRC_URI'] = '\\\n '.join(srcuri)
if dry_run_outdir:
logger.info('Creating bbappend (dry-run)')
- else:
- appendfile, destpath = oe.recipeutils.bbappend_recipe(
- rd, appendlayerdir, files, wildcardver=wildcard_version,
- extralines=patchfields, removevalues=removevalues,
- redirect_output=dry_run_outdir)
+ appendfile, destpath = oe.recipeutils.bbappend_recipe(
+ rd, appendlayerdir, files, wildcardver=wildcard_version,
+ extralines=patchfields, removevalues=removevalues,
+ redirect_output=dry_run_outdir)
else:
files_dir = _determine_files_dir(rd)
- for basepath, path in upd_f.items():
+ for basepath, param in upd_f.items():
+ path = param['path']
logger.info('Updating file %s%s' % (basepath, dry_run_suffix))
if os.path.isabs(basepath):
# Original file (probably with subdir pointing inside source tree)
@@ -1438,7 +1664,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
_move_file(os.path.join(local_files_dir, basepath), path,
dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
update_srcuri= True
- for basepath, path in new_f.items():
+ for basepath, param in new_f.items():
+ path = param['path']
logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
_move_file(os.path.join(local_files_dir, basepath),
os.path.join(files_dir, basepath),
@@ -1459,7 +1686,7 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
_remove_source_files(appendlayerdir, remove_files, destpath, no_report_remove, dry_run=dry_run_outdir)
return True, appendfile, remove_files
-def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None):
+def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False):
"""Implement the 'patch' mode of update-recipe"""
import bb
import oe.recipeutils
@@ -1470,9 +1697,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
if not os.path.exists(append):
raise DevtoolError('unable to find workspace bbappend for recipe %s' %
recipename)
+ srctreebase = workspace[recipename]['srctreebase']
+ relpatchdir = os.path.relpath(srctreebase, srctree)
+ if relpatchdir == '.':
+ patchdir_params = {}
+ else:
+ patchdir_params = {'patchdir': relpatchdir}
- initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev)
- if not initial_rev:
+ def srcuri_entry(basepath, patchdir_params):
+ if patchdir_params:
+ paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items())
+ else:
+ paramstr = ''
+ return 'file://%s%s' % (basepath, paramstr)
+
+ initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh)
+ if not initial_revs:
raise DevtoolError('Unable to find initial revision - please specify '
'it with --initial-rev')
@@ -1486,61 +1726,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
tempdir = tempfile.mkdtemp(prefix='devtool')
try:
local_files_dir = tempfile.mkdtemp(dir=tempdir)
- if filter_patches:
- upd_f = {}
- new_f = {}
- del_f = {}
- else:
- srctreebase = workspace[recipename]['srctreebase']
- upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
-
- remove_files = []
- if not no_remove:
- # Get all patches from source tree and check if any should be removed
- all_patches_dir = tempfile.mkdtemp(dir=tempdir)
- _, _, del_p = _export_patches(srctree, rd, initial_rev,
- all_patches_dir)
- # Remove deleted local files and patches
- remove_files = list(del_f.values()) + list(del_p.values())
+ upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
# Get updated patches from source tree
patches_dir = tempfile.mkdtemp(dir=tempdir)
- upd_p, new_p, _ = _export_patches(srctree, rd, update_rev,
+ upd_p, new_p, _ = _export_patches(srctree, rd, update_revs,
patches_dir, changed_revs)
+ # Get all patches from source tree and check if any should be removed
+ all_patches_dir = tempfile.mkdtemp(dir=tempdir)
+ _, _, del_p = _export_patches(srctree, rd, initial_revs,
+ all_patches_dir)
logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
if filter_patches:
- new_p = {}
- upd_p = {k:v for k,v in upd_p.items() if k in filter_patches}
- remove_files = [f for f in remove_files if f in filter_patches]
+ new_p = OrderedDict()
+ upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
+ del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches)
+ remove_files = []
+ if not no_remove:
+ # Remove deleted local files and patches
+ remove_files = list(del_f.values()) + list(del_p.values())
updatefiles = False
updaterecipe = False
destpath = None
srcuri = (rd.getVar('SRC_URI', False) or '').split()
+
if appendlayerdir:
- files = dict((os.path.join(local_files_dir, key), val) for
+ files = OrderedDict((os.path.join(local_files_dir, key), val) for
key, val in list(upd_f.items()) + list(new_f.items()))
- files.update(dict((os.path.join(patches_dir, key), val) for
+ files.update(OrderedDict((os.path.join(patches_dir, key), val) for
key, val in list(upd_p.items()) + list(new_p.items())))
+
+ params = []
+ for file, param in files.items():
+ patchdir_param = dict(patchdir_params)
+ patchdir = param.get('patchdir', ".")
+ if patchdir != "." :
+ if patchdir_param:
+ patchdir_param['patchdir'] += patchdir
+ else:
+ patchdir_param['patchdir'] = patchdir
+ params.append(patchdir_param)
+
if files or remove_files:
removevalues = None
if remove_files:
removedentries, remaining = _remove_file_entries(
srcuri, remove_files)
if removedentries or remaining:
- remaining = ['file://' + os.path.basename(item) for
+ remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for
item in remaining]
removevalues = {'SRC_URI': removedentries + remaining}
appendfile, destpath = oe.recipeutils.bbappend_recipe(
rd, appendlayerdir, files,
wildcardver=wildcard_version,
removevalues=removevalues,
- redirect_output=dry_run_outdir)
+ redirect_output=dry_run_outdir,
+ params=params)
else:
logger.info('No patches or local source files needed updating')
else:
# Update existing files
files_dir = _determine_files_dir(rd)
- for basepath, path in upd_f.items():
+ for basepath, param in upd_f.items():
+ path = param['path']
logger.info('Updating file %s' % basepath)
if os.path.isabs(basepath):
# Original file (probably with subdir pointing inside source tree)
@@ -1551,14 +1799,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
_move_file(os.path.join(local_files_dir, basepath), path,
dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
updatefiles = True
- for basepath, path in upd_p.items():
- patchfn = os.path.join(patches_dir, basepath)
+ for basepath, param in upd_p.items():
+ path = param['path']
+ patchdir = param.get('patchdir', ".")
+ if patchdir != "." :
+ patchdir_param = dict(patchdir_params)
+ if patchdir_param:
+ patchdir_param['patchdir'] += patchdir
+ else:
+ patchdir_param['patchdir'] = patchdir
+ patchfn = os.path.join(patches_dir, patchdir, basepath)
if os.path.dirname(path) + '/' == dl_dir:
# This is a a downloaded patch file - we now need to
# replace the entry in SRC_URI with our local version
logger.info('Replacing remote patch %s with updated local version' % basepath)
path = os.path.join(files_dir, basepath)
- _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath)
+ _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param))
updaterecipe = True
else:
logger.info('Updating patch %s%s' % (basepath, dry_run_suffix))
@@ -1566,21 +1822,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
updatefiles = True
# Add any new files
- for basepath, path in new_f.items():
+ for basepath, param in new_f.items():
logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
_move_file(os.path.join(local_files_dir, basepath),
os.path.join(files_dir, basepath),
dry_run_outdir=dry_run_outdir,
base_outdir=recipedir)
- srcuri.append('file://%s' % basepath)
+ srcuri.append(srcuri_entry(basepath, patchdir_params))
updaterecipe = True
- for basepath, path in new_p.items():
+ for basepath, param in new_p.items():
+ patchdir = param.get('patchdir', ".")
logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix))
- _move_file(os.path.join(patches_dir, basepath),
+ _move_file(os.path.join(patches_dir, patchdir, basepath),
os.path.join(files_dir, basepath),
dry_run_outdir=dry_run_outdir,
base_outdir=recipedir)
- srcuri.append('file://%s' % basepath)
+ params = dict(patchdir_params)
+ if patchdir != "." :
+ if params:
+ params['patchdir'] += patchdir
+ else:
+ params['patchdir'] = patchdir
+
+ srcuri.append(srcuri_entry(basepath, params))
updaterecipe = True
# Update recipe, if needed
if _remove_file_entries(srcuri, remove_files)[0]:
@@ -1603,7 +1867,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
def _guess_recipe_update_mode(srctree, rdata):
"""Guess the recipe update mode to use"""
- src_uri = (rdata.getVar('SRC_URI', False) or '').split()
+ src_uri = (rdata.getVar('SRC_URI') or '').split()
git_uris = [uri for uri in src_uri if uri.startswith('git://')]
if not git_uris:
return 'patch'
@@ -1623,7 +1887,7 @@ def _guess_recipe_update_mode(srctree, rdata):
return 'patch'
-def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False):
+def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False):
srctree = workspace[recipename]['srctree']
if mode == 'auto':
mode = _guess_recipe_update_mode(srctree, rd)
@@ -1637,6 +1901,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver
for line in stdout.splitlines():
branchname = line[2:]
if line.startswith('* '):
+ if 'HEAD' in line:
+ raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"')
startbranch = branchname
if branchname.startswith(override_branch_prefix):
override_branches.append(branchname)
@@ -1677,7 +1943,7 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver
if mode == 'srcrev':
updated, appendf, removed = _update_recipe_srcrev(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir)
elif mode == 'patch':
- updated, appendf, removed = _update_recipe_patch(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir)
+ updated, appendf, removed = _update_recipe_patch(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir, force_patch_refresh)
else:
raise DevtoolError('update_recipe: invalid mode %s' % mode)
if updated:
@@ -1715,12 +1981,12 @@ def update_recipe(args, config, basepath, workspace):
if args.dry_run:
dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
dry_run_outdir = dry_run_output.name
- updated, _, _ = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides)
+ updated, _, _ = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
if updated:
rf = rd.getVar('FILE')
if rf.startswith(config.workspace_path):
- logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
+ logger.warning('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
finally:
tinfoil.shutdown()
@@ -1742,8 +2008,9 @@ def status(args, config, basepath, workspace):
return 0
-def _reset(recipes, no_clean, config, basepath, workspace):
+def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
"""Reset one or more recipes"""
+ import oe.path
def clean_preferred_provider(pn, layerconf_path):
"""Remove PREFERRED_PROVIDER from layer.conf'"""
@@ -1790,29 +2057,54 @@ def _reset(recipes, no_clean, config, basepath, workspace):
for pn in recipes:
_check_preserve(config, pn)
+ appendfile = workspace[pn]['bbappend']
+ if os.path.exists(appendfile):
+ # This shouldn't happen, but is possible if devtool errored out prior to
+ # writing the md5 file. We need to delete this here or the recipe won't
+ # actually be reset
+ os.remove(appendfile)
+
preservepath = os.path.join(config.workspace_path, 'attic', pn, pn)
def preservedir(origdir):
if os.path.exists(origdir):
for root, dirs, files in os.walk(origdir):
for fn in files:
- logger.warn('Preserving %s in %s' % (fn, preservepath))
+ logger.warning('Preserving %s in %s' % (fn, preservepath))
_move_file(os.path.join(origdir, fn),
os.path.join(preservepath, fn))
for dn in dirs:
preservedir(os.path.join(root, dn))
os.rmdir(origdir)
- preservedir(os.path.join(config.workspace_path, 'recipes', pn))
+ recipefile = workspace[pn]['recipefile']
+ if recipefile and oe.path.is_path_parent(config.workspace_path, recipefile):
+ # This should always be true if recipefile is set, but just in case
+ preservedir(os.path.dirname(recipefile))
# We don't automatically create this dir next to appends, but the user can
preservedir(os.path.join(config.workspace_path, 'appends', pn))
srctreebase = workspace[pn]['srctreebase']
if os.path.isdir(srctreebase):
if os.listdir(srctreebase):
- # We don't want to risk wiping out any work in progress
- logger.info('Leaving source tree %s as-is; if you no '
- 'longer need it then please delete it manually'
- % srctreebase)
+ if remove_work:
+ logger.info('-r argument used on %s, removing source tree.'
+ ' You will lose any unsaved work' %pn)
+ shutil.rmtree(srctreebase)
+ else:
+ # We don't want to risk wiping out any work in progress
+ if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')):
+ from datetime import datetime
+ preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S")))
+ logger.info('Preserving source tree in %s\nIf you no '
+ 'longer need it then please delete it manually.\n'
+ 'It is also possible to reuse it via devtool source tree argument.'
+ % preservesrc)
+ bb.utils.mkdirhier(os.path.dirname(preservesrc))
+ shutil.move(srctreebase, preservesrc)
+ else:
+ logger.info('Leaving source tree %s as-is; if you no '
+ 'longer need it then please delete it manually'
+ % srctreebase)
else:
# This is unlikely, but if it's empty we can just remove it
os.rmdir(srctreebase)
@@ -1822,6 +2114,10 @@ def _reset(recipes, no_clean, config, basepath, workspace):
def reset(args, config, basepath, workspace):
"""Entry point for the devtool 'reset' subcommand"""
import bb
+ import shutil
+
+ recipes = ""
+
if args.recipename:
if args.all:
raise DevtoolError("Recipe cannot be specified if -a/--all is used")
@@ -1836,7 +2132,7 @@ def reset(args, config, basepath, workspace):
else:
recipes = args.recipename
- _reset(recipes, args.no_clean, config, basepath, workspace)
+ _reset(recipes, args.no_clean, args.remove_work, config, basepath, workspace)
return 0
@@ -1844,15 +2140,27 @@ def reset(args, config, basepath, workspace):
def _get_layer(layername, d):
"""Determine the base layer path for the specified layer name/path"""
layerdirs = d.getVar('BBLAYERS').split()
- layers = {os.path.basename(p): p for p in layerdirs}
+ layers = {} # {basename: layer_paths}
+ for p in layerdirs:
+ bn = os.path.basename(p)
+ if bn not in layers:
+ layers[bn] = [p]
+ else:
+ layers[bn].append(p)
# Provide some shortcuts
if layername.lower() in ['oe-core', 'openembedded-core']:
- layerdir = layers.get('meta', None)
+ layername = 'meta'
+ layer_paths = layers.get(layername, None)
+ if not layer_paths:
+ return os.path.abspath(layername)
+ elif len(layer_paths) == 1:
+ return os.path.abspath(layer_paths[0])
else:
- layerdir = layers.get(layername, None)
- if layerdir:
- layerdir = os.path.abspath(layerdir)
- return layerdir or layername
+ # multiple layers having the same base name
+ logger.warning("Multiple layers have the same base name '%s', use the first one '%s'." % (layername, layer_paths[0]))
+ logger.warning("Consider using path instead of base name to specify layer:\n\t\t%s" % '\n\t\t'.join(layer_paths))
+ return os.path.abspath(layer_paths[0])
+
def finish(args, config, basepath, workspace):
"""Entry point for the devtool 'finish' subcommand"""
@@ -1875,7 +2183,8 @@ def finish(args, config, basepath, workspace):
else:
raise DevtoolError('Source tree is not clean:\n\n%s\nEnsure you have committed your changes or use -f/--force if you are sure there\'s nothing that needs to be committed' % dirty)
- no_clean = False
+ no_clean = args.no_clean
+ remove_work=args.remove_work
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -1938,7 +2247,7 @@ def finish(args, config, basepath, workspace):
if args.dry_run:
dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
dry_run_outdir = dry_run_output.name
- updated, appendfile, removed = _update_recipe(args.recipename, workspace, rd, args.mode, appendlayerdir, wildcard_version=True, no_remove=False, no_report_remove=removing_original, initial_rev=args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides)
+ updated, appendfile, removed = _update_recipe(args.recipename, workspace, rd, args.mode, appendlayerdir, wildcard_version=True, no_remove=False, no_report_remove=removing_original, initial_rev=args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
removed = [os.path.relpath(pth, recipedir) for pth in removed]
# Remove any old files in the case of an upgrade
@@ -2027,7 +2336,7 @@ def finish(args, config, basepath, workspace):
if args.dry_run:
logger.info('Resetting recipe (dry-run)')
else:
- _reset([args.recipename], no_clean=no_clean, config=config, basepath=basepath, workspace=workspace)
+ _reset([args.recipename], no_clean=no_clean, remove_work=remove_work, config=config, basepath=basepath, workspace=workspace)
return 0
@@ -2054,7 +2363,8 @@ def register_commands(subparsers, context):
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
- parser_add.add_argument('--fetch-dev', help='For npm, also fetch devDependencies', action="store_true")
+ parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
+ parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true")
parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
group = parser_add.add_mutually_exclusive_group()
@@ -2125,6 +2435,7 @@ def register_commands(subparsers, context):
parser_update_recipe.add_argument('--no-remove', '-n', action="store_true", help='Don\'t remove patches, only add or update')
parser_update_recipe.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
parser_update_recipe.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
+ parser_update_recipe.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
parser_update_recipe.set_defaults(func=update_recipe)
parser_status = subparsers.add_parser('status', help='Show workspace status',
@@ -2138,6 +2449,7 @@ def register_commands(subparsers, context):
parser_reset.add_argument('recipename', nargs='*', help='Recipe to reset')
parser_reset.add_argument('--all', '-a', action="store_true", help='Reset all recipes (clear workspace)')
parser_reset.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
+ parser_reset.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory along with append')
parser_reset.set_defaults(func=reset)
parser_finish = subparsers.add_parser('finish', help='Finish working on a recipe in your workspace',
@@ -2148,6 +2460,9 @@ def register_commands(subparsers, context):
parser_finish.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
parser_finish.add_argument('--initial-rev', help='Override starting revision for patches')
parser_finish.add_argument('--force', '-f', action="store_true", help='Force continuing even if there are uncommitted changes in the source tree repository')
+ parser_finish.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory under workspace')
+ parser_finish.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
parser_finish.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
parser_finish.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
+ parser_finish.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
parser_finish.set_defaults(func=finish)