summaryrefslogtreecommitdiffstats
path: root/scripts/lib/devtool
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib/devtool')
-rw-r--r--scripts/lib/devtool/__init__.py61
-rw-r--r--scripts/lib/devtool/build.py32
-rw-r--r--scripts/lib/devtool/build_image.py14
-rw-r--r--scripts/lib/devtool/build_sdk.py14
-rw-r--r--scripts/lib/devtool/deploy.py264
-rw-r--r--scripts/lib/devtool/export.py14
-rw-r--r--scripts/lib/devtool/ide_plugins/__init__.py282
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_code.py463
-rw-r--r--scripts/lib/devtool/ide_plugins/ide_none.py53
-rwxr-xr-xscripts/lib/devtool/ide_sdk.py1070
-rw-r--r--scripts/lib/devtool/import.py20
-rw-r--r--scripts/lib/devtool/menuconfig.py81
-rw-r--r--scripts/lib/devtool/package.py12
-rw-r--r--scripts/lib/devtool/runqemu.py12
-rw-r--r--scripts/lib/devtool/sdk.py20
-rw-r--r--scripts/lib/devtool/search.py113
-rw-r--r--scripts/lib/devtool/standard.py837
-rw-r--r--scripts/lib/devtool/upgrade.py364
-rw-r--r--scripts/lib/devtool/utilcmds.py12
19 files changed, 3092 insertions, 646 deletions
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py
index 07d774dfb7..6133c1c5b4 100644
--- a/scripts/lib/devtool/__init__.py
+++ b/scripts/lib/devtool/__init__.py
@@ -4,18 +4,8 @@
#
# Copyright (C) 2014 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool plugins module"""
import os
@@ -88,12 +78,15 @@ def exec_fakeroot(d, cmd, **kwargs):
"""Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions"""
# Grab the command and check it actually exists
fakerootcmd = d.getVar('FAKEROOTCMD')
+ fakerootenv = d.getVar('FAKEROOTENV')
+ exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs)
+
+def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs):
if not os.path.exists(fakerootcmd):
logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built')
return 2
# Set up the appropriate environment
newenv = dict(os.environ)
- fakerootenv = d.getVar('FAKEROOTENV')
for varvalue in fakerootenv.split():
if '=' in varvalue:
splitval = varvalue.split('=', 1)
@@ -205,7 +198,8 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
import oe.patch
if not os.path.exists(os.path.join(repodir, '.git')):
bb.process.run('git init', cwd=repodir)
- bb.process.run('git add .', cwd=repodir)
+ bb.process.run('git config --local gc.autodetach 0', cwd=repodir)
+ bb.process.run('git add -f -A .', cwd=repodir)
commit_cmd = ['git']
oe.patch.GitApplyTree.gitCommandUserOptions(commit_cmd, d=d)
commit_cmd += ['commit', '-q']
@@ -220,9 +214,50 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None):
commit_cmd += ['-m', commitmsg]
bb.process.run(commit_cmd, cwd=repodir)
+ # Ensure singletask.lock (as used by externalsrc.bbclass) is ignored by git
+ gitinfodir = os.path.join(repodir, '.git', 'info')
+ try:
+ os.mkdir(gitinfodir)
+ except FileExistsError:
+ pass
+ excludes = []
+ excludefile = os.path.join(gitinfodir, 'exclude')
+ try:
+ with open(excludefile, 'r') as f:
+ excludes = f.readlines()
+ except FileNotFoundError:
+ pass
+ if 'singletask.lock\n' not in excludes:
+ excludes.append('singletask.lock\n')
+ with open(excludefile, 'w') as f:
+ for line in excludes:
+ f.write(line)
+
bb.process.run('git checkout -b %s' % devbranch, cwd=repodir)
bb.process.run('git tag -f %s' % basetag, cwd=repodir)
+ # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now,
+ # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe
+ stdout, _ = bb.process.run("git status --porcelain", cwd=repodir)
+ found = False
+ for line in stdout.splitlines():
+ if line.endswith("/"):
+ new_dir = line.split()[1]
+ for root, dirs, files in os.walk(os.path.join(repodir, new_dir)):
+ if ".git" in dirs + files:
+ (stdout, _) = bb.process.run('git remote', cwd=root)
+ remote = stdout.splitlines()[0]
+ (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root)
+ remote_url = stdout.splitlines()[0]
+ logger.error(os.path.relpath(os.path.join(root, ".."), root))
+ bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, ".."))
+ found = True
+ if found:
+ oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d)
+ found = False
+ if os.path.exists(os.path.join(repodir, '.gitmodules')):
+ bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir)
+
def recipe_to_append(recipefile, config, wildcard=False):
"""
Convert a recipe file to a bbappend file path within the workspace.
diff --git a/scripts/lib/devtool/build.py b/scripts/lib/devtool/build.py
index 252379e9b2..935ffab46c 100644
--- a/scripts/lib/devtool/build.py
+++ b/scripts/lib/devtool/build.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool build plugin"""
import os
@@ -21,7 +11,8 @@ import bb
import logging
import argparse
import tempfile
-from devtool import exec_build_env_command, check_workspace_recipe, DevtoolError
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError
+from devtool import parse_recipe
logger = logging.getLogger('devtool')
@@ -53,8 +44,22 @@ def _get_build_tasks(config):
def build(args, config, basepath, workspace):
"""Entry point for the devtool 'build' subcommand"""
workspacepn = check_workspace_recipe(workspace, args.recipename, bbclassextend=True)
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ rd = parse_recipe(config, tinfoil, args.recipename, appends=True, filter_workspace=False)
+ if not rd:
+ return 1
+ deploytask = 'do_deploy' in rd.getVar('__BBTASKS')
+ finally:
+ tinfoil.shutdown()
- build_tasks = _get_build_tasks(config)
+ if args.clean:
+ # use clean instead of cleansstate to avoid messing things up in eSDK
+ build_tasks = ['do_clean']
+ else:
+ build_tasks = _get_build_tasks(config)
+ if deploytask:
+ build_tasks.append('do_deploy')
bbappend = workspace[workspacepn]['bbappend']
if args.disable_parallel_make:
@@ -83,4 +88,5 @@ def register_commands(subparsers, context):
group='working', order=50)
parser_build.add_argument('recipename', help='Recipe to build')
parser_build.add_argument('-s', '--disable-parallel-make', action="store_true", help='Disable make parallelism')
+ parser_build.add_argument('-c', '--clean', action='store_true', help='clean up recipe building results')
parser_build.set_defaults(func=build)
diff --git a/scripts/lib/devtool/build_image.py b/scripts/lib/devtool/build_image.py
index e5810389be..980f90ddd6 100644
--- a/scripts/lib/devtool/build_image.py
+++ b/scripts/lib/devtool/build_image.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool plugin containing the build-image subcommand."""
@@ -123,7 +113,7 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task
with open(appendfile, 'w') as afile:
if packages:
# include packages from workspace recipes into the image
- afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages))
+ afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages))
if not task:
logger.info('Building image %s with the following '
'additional packages: %s', image, ' '.join(packages))
diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py
index b89d65b0cb..1cd4831d2b 100644
--- a/scripts/lib/devtool/build_sdk.py
+++ b/scripts/lib/devtool/build_sdk.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015-2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import subprocess
@@ -23,7 +13,7 @@ import shutil
import errno
import sys
import tempfile
-from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError
+from devtool import DevtoolError
from devtool import build_image
logger = logging.getLogger('devtool')
diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py
index 9cc492788b..b5ca8f2c2f 100644
--- a/scripts/lib/devtool/deploy.py
+++ b/scripts/lib/devtool/deploy.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool plugin containing the deploy subcommands"""
import logging
@@ -26,7 +16,7 @@ import bb.utils
import argparse_oe
import oe.types
-from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError
+from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError
logger = logging.getLogger('devtool')
@@ -143,16 +133,38 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals
return '\n'.join(lines)
-
-
def deploy(args, config, basepath, workspace):
"""Entry point for the devtool 'deploy' subcommand"""
- import math
- import oe.recipeutils
- import oe.package
+ import oe.utils
check_workspace_recipe(workspace, args.recipename, checksrc=False)
+ tinfoil = setup_tinfoil(basepath=basepath)
+ try:
+ try:
+ rd = tinfoil.parse_recipe(args.recipename)
+ except Exception as e:
+ raise DevtoolError('Exception parsing recipe %s: %s' %
+ (args.recipename, e))
+
+ srcdir = rd.getVar('D')
+ workdir = rd.getVar('WORKDIR')
+ path = rd.getVar('PATH')
+ strip_cmd = rd.getVar('STRIP')
+ libdir = rd.getVar('libdir')
+ base_libdir = rd.getVar('base_libdir')
+ max_process = oe.utils.get_bb_number_threads(rd)
+ fakerootcmd = rd.getVar('FAKEROOTCMD')
+ fakerootenv = rd.getVar('FAKEROOTENV')
+ finally:
+ tinfoil.shutdown()
+
+ return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args)
+
+def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args):
+ import math
+ import oe.package
+
try:
host, destdir = args.target.split(':')
except ValueError:
@@ -162,106 +174,108 @@ def deploy(args, config, basepath, workspace):
if not destdir.endswith('/'):
destdir += '/'
- tinfoil = setup_tinfoil(basepath=basepath)
- try:
- try:
- rd = tinfoil.parse_recipe(args.recipename)
- except Exception as e:
- raise DevtoolError('Exception parsing recipe %s: %s' %
- (args.recipename, e))
- recipe_outdir = rd.getVar('D')
- if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
- raise DevtoolError('No files to deploy - have you built the %s '
- 'recipe? If so, the install step has not installed '
- 'any files.' % args.recipename)
-
- if args.strip and not args.dry_run:
- # Fakeroot copy to new destination
- srcdir = recipe_outdir
- recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'deploy-target-stripped')
- if os.path.isdir(recipe_outdir):
- bb.utils.remove(recipe_outdir, True)
- exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
- os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or ''])
- oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'),
- rd.getVar('base_libdir'))
-
- filelist = []
- ftotalsize = 0
- for root, _, files in os.walk(recipe_outdir):
- for fn in files:
- # Get the size in kiB (since we'll be comparing it to the output of du -k)
- # MUST use lstat() here not stat() or getfilesize() since we don't want to
- # dereference symlinks
- fsize = int(math.ceil(float(os.lstat(os.path.join(root, fn)).st_size)/1024))
- ftotalsize += fsize
- # The path as it would appear on the target
- fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
- filelist.append((fpath, fsize))
-
- if args.dry_run:
- print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
- for item, _ in filelist:
- print(' %s' % item)
- return 0
-
- extraoptions = ''
- if args.no_host_check:
- extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
- if not args.show_status:
- extraoptions += ' -q'
-
- scp_port = ''
- ssh_port = ''
- if not args.port:
- raise DevtoolError("If you specify -P/--port then you must provide the port to be used to connect to the target")
- else:
- scp_port = "-P %s" % args.port
- ssh_port = "-p %s" % args.port
-
- # In order to delete previously deployed files and have the manifest file on
- # the target, we write out a shell script and then copy it to the target
- # so we can then run it (piping tar output to it).
- # (We cannot use scp here, because it doesn't preserve symlinks.)
- tmpdir = tempfile.mkdtemp(prefix='devtool')
- try:
- tmpscript = '/tmp/devtool_deploy.sh'
- tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
- shellscript = _prepare_remote_script(deploy=True,
- verbose=args.show_status,
- nopreserve=args.no_preserve,
- nocheckspace=args.no_check_space)
- # Write out the script to a file
- with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
- f.write(shellscript)
- # Write out the file list
- with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
- f.write('%d\n' % ftotalsize)
- for fpath, fsize in filelist:
- f.write('%s %d\n' % (fpath, fsize))
- # Copy them to the target
- ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
- if ret != 0:
- raise DevtoolError('Failed to copy script to %s - rerun with -s to '
- 'get a complete error message' % args.target)
- finally:
- shutil.rmtree(tmpdir)
+ recipe_outdir = srcdir
+ if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir):
+ raise DevtoolError('No files to deploy - have you built the %s '
+ 'recipe? If so, the install step has not installed '
+ 'any files.' % args.recipename)
+
+ if args.strip and not args.dry_run:
+ # Fakeroot copy to new destination
+ srcdir = recipe_outdir
+ recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped')
+ if os.path.isdir(recipe_outdir):
+ exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True)
+ exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True)
+ os.environ['PATH'] = ':'.join([os.environ['PATH'], path or ''])
+ oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process)
+
+ filelist = []
+ inodes = set({})
+ ftotalsize = 0
+ for root, _, files in os.walk(recipe_outdir):
+ for fn in files:
+ fstat = os.lstat(os.path.join(root, fn))
+ # Get the size in kiB (since we'll be comparing it to the output of du -k)
+ # MUST use lstat() here not stat() or getfilesize() since we don't want to
+ # dereference symlinks
+ if fstat.st_ino in inodes:
+ fsize = 0
+ else:
+ fsize = int(math.ceil(float(fstat.st_size)/1024))
+ inodes.add(fstat.st_ino)
+ ftotalsize += fsize
+ # The path as it would appear on the target
+ fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn)
+ filelist.append((fpath, fsize))
+
+ if args.dry_run:
+ print('Files to be deployed for %s on target %s:' % (args.recipename, args.target))
+ for item, _ in filelist:
+ print(' %s' % item)
+ return 0
- # Now run the script
- ret = exec_fakeroot(rd, 'tar cf - . | ssh %s %s %s \'sh %s %s %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
- if ret != 0:
- raise DevtoolError('Deploy failed - rerun with -s to get a complete '
- 'error message')
+ extraoptions = ''
+ if args.no_host_check:
+ extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+ if not args.show_status:
+ extraoptions += ' -q'
- logger.info('Successfully deployed %s' % recipe_outdir)
+ scp_sshexec = ''
+ ssh_sshexec = 'ssh'
+ if args.ssh_exec:
+ scp_sshexec = "-S %s" % args.ssh_exec
+ ssh_sshexec = args.ssh_exec
+ scp_port = ''
+ ssh_port = ''
+ if args.port:
+ scp_port = "-P %s" % args.port
+ ssh_port = "-p %s" % args.port
- files_list = []
- for root, _, files in os.walk(recipe_outdir):
- for filename in files:
- filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
- files_list.append(os.path.join(destdir, filename))
+ if args.key:
+ extraoptions += ' -i %s' % args.key
+
+ # In order to delete previously deployed files and have the manifest file on
+ # the target, we write out a shell script and then copy it to the target
+ # so we can then run it (piping tar output to it).
+ # (We cannot use scp here, because it doesn't preserve symlinks.)
+ tmpdir = tempfile.mkdtemp(prefix='devtool')
+ try:
+ tmpscript = '/tmp/devtool_deploy.sh'
+ tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list')
+ shellscript = _prepare_remote_script(deploy=True,
+ verbose=args.show_status,
+ nopreserve=args.no_preserve,
+ nocheckspace=args.no_check_space)
+ # Write out the script to a file
+ with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
+ f.write(shellscript)
+ # Write out the file list
+ with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f:
+ f.write('%d\n' % ftotalsize)
+ for fpath, fsize in filelist:
+ f.write('%s %d\n' % (fpath, fsize))
+ # Copy them to the target
+ ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+ if ret != 0:
+ raise DevtoolError('Failed to copy script to %s - rerun with -s to '
+ 'get a complete error message' % args.target)
finally:
- tinfoil.shutdown()
+ shutil.rmtree(tmpdir)
+
+ # Now run the script
+ ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True)
+ if ret != 0:
+ raise DevtoolError('Deploy failed - rerun with -s to get a complete '
+ 'error message')
+
+ logger.info('Successfully deployed %s' % recipe_outdir)
+
+ files_list = []
+ for root, _, files in os.walk(recipe_outdir):
+ for filename in files:
+ filename = os.path.relpath(os.path.join(root, filename), recipe_outdir)
+ files_list.append(os.path.join(destdir, filename))
return 0
@@ -278,11 +292,14 @@ def undeploy(args, config, basepath, workspace):
if not args.show_status:
extraoptions += ' -q'
+ scp_sshexec = ''
+ ssh_sshexec = 'ssh'
+ if args.ssh_exec:
+ scp_sshexec = "-S %s" % args.ssh_exec
+ ssh_sshexec = args.ssh_exec
scp_port = ''
ssh_port = ''
- if not args.port:
- raise DevtoolError("If you specify -P/--port then you must provide the port to be used to connect to the target")
- else:
+ if args.port:
scp_port = "-P %s" % args.port
ssh_port = "-p %s" % args.port
@@ -296,7 +313,7 @@ def undeploy(args, config, basepath, workspace):
with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f:
f.write(shellscript)
# Copy it to the target
- ret = subprocess.call("scp %s %s %s/* %s:%s" % (scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
+ ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True)
if ret != 0:
raise DevtoolError('Failed to copy script to %s - rerun with -s to '
'get a complete error message' % args.target)
@@ -304,7 +321,7 @@ def undeploy(args, config, basepath, workspace):
shutil.rmtree(tmpdir)
# Now run the script
- ret = subprocess.call('ssh %s %s %s \'sh %s %s\'' % (ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
+ ret = subprocess.call('%s %s %s %s \'sh %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename), shell=True)
if ret != 0:
raise DevtoolError('Undeploy failed - rerun with -s to get a complete '
'error message')
@@ -328,7 +345,10 @@ def register_commands(subparsers, context):
parser_deploy.add_argument('-n', '--dry-run', help='List files to be deployed only', action='store_true')
parser_deploy.add_argument('-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
parser_deploy.add_argument('--no-check-space', help='Do not check for available space before deploying', action='store_true')
- parser_deploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target')
+ parser_deploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
+ parser_deploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
+ parser_deploy.add_argument('-I', '--key',
+ help='Specify ssh private key for connection to the target')
strip_opts = parser_deploy.add_mutually_exclusive_group(required=False)
strip_opts.add_argument('-S', '--strip',
@@ -350,5 +370,9 @@ def register_commands(subparsers, context):
parser_undeploy.add_argument('-s', '--show-status', help='Show progress/status output', action='store_true')
parser_undeploy.add_argument('-a', '--all', help='Undeploy all recipes deployed on the target', action='store_true')
parser_undeploy.add_argument('-n', '--dry-run', help='List files to be undeployed only', action='store_true')
- parser_undeploy.add_argument('-P', '--port', default='22', help='Port to use for connection to the target')
+ parser_undeploy.add_argument('-e', '--ssh-exec', help='Executable to use in place of ssh')
+ parser_undeploy.add_argument('-P', '--port', help='Specify port to use for connection to the target')
+ parser_undeploy.add_argument('-I', '--key',
+ help='Specify ssh private key for connection to the target')
+
parser_undeploy.set_defaults(func=undeploy)
diff --git a/scripts/lib/devtool/export.py b/scripts/lib/devtool/export.py
index 13ee258e7a..01174edae5 100644
--- a/scripts/lib/devtool/export.py
+++ b/scripts/lib/devtool/export.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool export plugin"""
import os
@@ -84,7 +74,7 @@ def export(args, config, basepath, workspace):
# if all workspace is excluded, quit
if not len(set(workspace.keys()).difference(set(args.exclude))):
- logger.warn('All recipes in workspace excluded, nothing to export')
+ logger.warning('All recipes in workspace excluded, nothing to export')
return 0
exported = []
diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py
new file mode 100644
index 0000000000..19c2f61c5f
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/__init__.py
@@ -0,0 +1,282 @@
+#
+# Copyright (C) 2023-2024 Siemens AG
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+"""Devtool ide-sdk IDE plugin interface definition and helper functions"""
+
+import errno
+import json
+import logging
+import os
+import stat
+from enum import Enum, auto
+from devtool import DevtoolError
+from bb.utils import mkdirhier
+
+logger = logging.getLogger('devtool')
+
+
+class BuildTool(Enum):
+ UNDEFINED = auto()
+ CMAKE = auto()
+ MESON = auto()
+
+ @property
+ def is_c_ccp(self):
+ if self is BuildTool.CMAKE:
+ return True
+ if self is BuildTool.MESON:
+ return True
+ return False
+
+
+class GdbCrossConfig:
+ """Base class defining the GDB configuration generator interface
+
+ Generate a GDB configuration for a binary on the target device.
+ Only one instance per binary is allowed. This allows to assign unique port
+ numbers for all gdbserver instances.
+ """
+ _gdbserver_port_next = 1234
+ _binaries = []
+
+ def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True):
+ self.image_recipe = image_recipe
+ self.modified_recipe = modified_recipe
+ self.gdb_cross = modified_recipe.gdb_cross
+ self.binary = binary
+ if binary in GdbCrossConfig._binaries:
+ raise DevtoolError(
+ "gdbserver config for binary %s is already generated" % binary)
+ GdbCrossConfig._binaries.append(binary)
+ self.script_dir = modified_recipe.ide_sdk_scripts_dir
+ self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit')
+ self.gdbserver_multi = gdbserver_multi
+ self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-')
+ self.gdbserver_port = GdbCrossConfig._gdbserver_port_next
+ GdbCrossConfig._gdbserver_port_next += 1
+ self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty)
+ # gdbserver start script
+ gdbserver_script_file = 'gdbserver_' + self.id_pretty
+ if self.gdbserver_multi:
+ gdbserver_script_file += "_m"
+ self.gdbserver_script = os.path.join(
+ self.script_dir, gdbserver_script_file)
+ # gdbinit file
+ self.gdbinit = os.path.join(
+ self.gdbinit_dir, 'gdbinit_' + self.id_pretty)
+ # gdb start script
+ self.gdb_script = os.path.join(
+ self.script_dir, 'gdb_' + self.id_pretty)
+
+ def _gen_gdbserver_start_script(self):
+ """Generate a shell command starting the gdbserver on the remote device via ssh
+
+ GDB supports two modes:
+ multi: gdbserver remains running over several debug sessions
+ once: gdbserver terminates after the debugged process terminates
+ """
+ cmd_lines = ['#!/bin/sh']
+ if self.gdbserver_multi:
+ temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty
+ gdbserver_cmd_start = temp_dir
+ gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; "
+ gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; "
+ gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % (
+ self.gdb_cross.gdbserver_path, self.gdbserver_port)
+ gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;"
+
+ gdbserver_cmd_stop = temp_dir
+ gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); "
+ gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; "
+
+ gdbserver_cmd_l = []
+ gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then')
+ gdbserver_cmd_l.append(' shift')
+ gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
+ self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop))
+ gdbserver_cmd_l.append('else')
+ gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % (
+ self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start))
+ gdbserver_cmd_l.append('fi')
+ gdbserver_cmd = os.linesep.join(gdbserver_cmd_l)
+ else:
+ gdbserver_cmd_start = "%s --once :%s %s" % (
+ self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary)
+ gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % (
+ self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)
+ cmd_lines.append(gdbserver_cmd)
+ GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True)
+
+ def _gen_gdbinit_config(self):
+ """Generate a gdbinit file for this binary and the corresponding gdbserver configuration"""
+ gdbinit_lines = ['# This file is generated by devtool ide-sdk']
+ if self.gdbserver_multi:
+ target_help = '# gdbserver --multi :%d' % self.gdbserver_port
+ remote_cmd = 'target extended-remote'
+ else:
+ target_help = '# gdbserver :%d %s' % (
+ self.gdbserver_port, self.binary)
+ remote_cmd = 'target remote'
+ gdbinit_lines.append('# On the remote target:')
+ gdbinit_lines.append(target_help)
+ gdbinit_lines.append('# On the build machine:')
+ gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree)
+ gdbinit_lines.append(
+ '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit)
+
+ gdbinit_lines.append('set sysroot ' + self.modified_recipe.d)
+ gdbinit_lines.append('set substitute-path "/usr/include" "' +
+ os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"')
+ # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir.
+ gdbinit_lines.append('set debuginfod enabled off')
+ if self.image_recipe.rootfs_dbg:
+ gdbinit_lines.append(
+ 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"')
+ # First: Search for sources of this recipe in the workspace folder
+ if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir:
+ gdbinit_lines.append('set substitute-path "%s" "%s"' %
+ (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree))
+ else:
+ logger.error(
+ "TARGET_DBGSRC_DIR must contain the recipe name PN.")
+ # Second: Search for sources of other recipes in the rootfs-dbg
+ if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
+ gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join(
+ self.image_recipe.rootfs_dbg, "usr", "src", "debug"))
+ else:
+ logger.error(
+ "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
+ else:
+ logger.warning(
+ "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
+ gdbinit_lines.append(
+ '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port))
+ gdbinit_lines.append('set remote exec-file ' + self.binary)
+ gdbinit_lines.append(
+ 'run ' + os.path.join(self.modified_recipe.d, self.binary))
+
+ GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines)
+
+ def _gen_gdb_start_script(self):
+ """Generate a script starting GDB with the corresponding gdbinit configuration."""
+ cmd_lines = ['#!/bin/sh']
+ cmd_lines.append('cd ' + self.modified_recipe.real_srctree)
+ cmd_lines.append(self.gdb_cross.gdb + ' -ix ' +
+ self.gdbinit + ' "$@"')
+ GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True)
+
+ def initialize(self):
+ self._gen_gdbserver_start_script()
+ self._gen_gdbinit_config()
+ self._gen_gdb_start_script()
+
+ @staticmethod
+ def write_file(script_file, cmd_lines, executable=False):
+ script_dir = os.path.dirname(script_file)
+ mkdirhier(script_dir)
+ with open(script_file, 'w') as script_f:
+ script_f.write(os.linesep.join(cmd_lines))
+ script_f.write(os.linesep)
+ if executable:
+ st = os.stat(script_file)
+ os.chmod(script_file, st.st_mode | stat.S_IEXEC)
+ logger.info("Created: %s" % script_file)
+
+
+class IdeBase:
+ """Base class defining the interface for IDE plugins"""
+
+ def __init__(self):
+ self.ide_name = 'undefined'
+ self.gdb_cross_configs = []
+
+ @classmethod
+ def ide_plugin_priority(cls):
+ """Used to find the default ide handler if --ide is not passed"""
+ return 10
+
+ def setup_shared_sysroots(self, shared_env):
+ logger.warn("Shared sysroot mode is not supported for IDE %s" %
+ self.ide_name)
+
+ def setup_modified_recipe(self, args, image_recipe, modified_recipe):
+ logger.warn("Modified recipe mode is not supported for IDE %s" %
+ self.ide_name)
+
+ def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig):
+ binaries = modified_recipe.find_installed_binaries()
+ for binary in binaries:
+ gdb_cross_config = gdb_cross_config_class(
+ image_recipe, modified_recipe, binary)
+ gdb_cross_config.initialize()
+ self.gdb_cross_configs.append(gdb_cross_config)
+
+ @staticmethod
+ def gen_oe_scrtips_sym_link(modified_recipe):
+ # create a sym-link from sources to the scripts directory
+ if os.path.isdir(modified_recipe.ide_sdk_scripts_dir):
+ IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir,
+ os.path.join(modified_recipe.real_srctree, 'oe-scripts'))
+
+ @staticmethod
+ def update_json_file(json_dir, json_file, update_dict):
+ """Update a json file
+
+ By default it uses the dict.update function. If this is not sutiable
+ the update function might be passed via update_func parameter.
+ """
+ json_path = os.path.join(json_dir, json_file)
+ logger.info("Updating IDE config file: %s (%s)" %
+ (json_file, json_path))
+ if not os.path.exists(json_dir):
+ os.makedirs(json_dir)
+ try:
+ with open(json_path) as f:
+ orig_dict = json.load(f)
+ except json.decoder.JSONDecodeError:
+ logger.info(
+ "Decoding %s failed. Probably because of comments in the json file" % json_path)
+ orig_dict = {}
+ except FileNotFoundError:
+ orig_dict = {}
+ orig_dict.update(update_dict)
+ with open(json_path, 'w') as f:
+ json.dump(orig_dict, f, indent=4)
+
+ @staticmethod
+ def symlink_force(tgt, dst):
+ try:
+ os.symlink(tgt, dst)
+ except OSError as err:
+ if err.errno == errno.EEXIST:
+ if os.readlink(dst) != tgt:
+ os.remove(dst)
+ os.symlink(tgt, dst)
+ else:
+ raise err
+
+
+def get_devtool_deploy_opts(args):
+ """Filter args for devtool deploy-target args"""
+ if not args.target:
+ return None
+ devtool_deploy_opts = [args.target]
+ if args.no_host_check:
+ devtool_deploy_opts += ["-c"]
+ if args.show_status:
+ devtool_deploy_opts += ["-s"]
+ if args.no_preserve:
+ devtool_deploy_opts += ["-p"]
+ if args.no_check_space:
+ devtool_deploy_opts += ["--no-check-space"]
+ if args.ssh_exec:
+ devtool_deploy_opts += ["-e", args.ssh.exec]
+ if args.port:
+ devtool_deploy_opts += ["-P", args.port]
+ if args.key:
+ devtool_deploy_opts += ["-I", args.key]
+ if args.strip is False:
+ devtool_deploy_opts += ["--no-strip"]
+ return devtool_deploy_opts
diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py
new file mode 100644
index 0000000000..a62b93224e
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/ide_code.py
@@ -0,0 +1,463 @@
+#
+# Copyright (C) 2023-2024 Siemens AG
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+"""Devtool ide-sdk IDE plugin for VSCode and VSCodium"""
+
+import json
+import logging
+import os
+import shutil
+from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts
+
+logger = logging.getLogger('devtool')
+
+
+class GdbCrossConfigVSCode(GdbCrossConfig):
+ def __init__(self, image_recipe, modified_recipe, binary):
+ super().__init__(image_recipe, modified_recipe, binary, False)
+
+ def initialize(self):
+ self._gen_gdbserver_start_script()
+
+
+class IdeVSCode(IdeBase):
+ """Manage IDE configurations for VSCode
+
+ Modified recipe mode:
+ - cmake: use the cmake-preset generated by devtool ide-sdk
+ - meson: meson is called via a wrapper script generated by devtool ide-sdk
+
+ Shared sysroot mode:
+ In shared sysroot mode, the cross tool-chain is exported to the user's global configuration.
+ A workspace cannot be created because there is no recipe that defines how a workspace could
+ be set up.
+ - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json
+ The cmake-kit uses the environment script and the tool-chain file
+ generated by meta-ide-support.
+ - meson: Meson needs manual workspace configuration.
+ """
+
+ @classmethod
+ def ide_plugin_priority(cls):
+ """If --ide is not passed this is the default plugin"""
+ if shutil.which('code'):
+ return 100
+ return 0
+
+ def setup_shared_sysroots(self, shared_env):
+ """Expose the toolchain of the shared sysroots SDK"""
+ datadir = shared_env.ide_support.datadir
+ deploy_dir_image = shared_env.ide_support.deploy_dir_image
+ real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
+ standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native
+ vscode_ws_path = os.path.join(
+ os.environ['HOME'], '.local', 'share', 'CMakeTools')
+ cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json')
+ oecmake_generator = "Ninja"
+ env_script = os.path.join(
+ deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
+
+ if not os.path.isdir(vscode_ws_path):
+ os.makedirs(vscode_ws_path)
+ cmake_kits_old = []
+ if os.path.exists(cmake_kits_path):
+ with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file:
+ cmake_kits_old = json.load(cmake_kits_file)
+ cmake_kits = cmake_kits_old.copy()
+
+ cmake_kit_new = {
+ "name": "OE " + real_multimach_target_sys,
+ "environmentSetupScript": env_script,
+ "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake",
+ "preferredGenerator": {
+ "name": oecmake_generator
+ }
+ }
+
+ def merge_kit(cmake_kits, cmake_kit_new):
+ i = 0
+ while i < len(cmake_kits):
+ if 'environmentSetupScript' in cmake_kits[i] and \
+ cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']:
+ cmake_kits[i] = cmake_kit_new
+ return
+ i += 1
+ cmake_kits.append(cmake_kit_new)
+ merge_kit(cmake_kits, cmake_kit_new)
+
+ if cmake_kits != cmake_kits_old:
+ logger.info("Updating: %s" % cmake_kits_path)
+ with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file:
+ json.dump(cmake_kits, cmake_kits_file, indent=4)
+ else:
+ logger.info("Already up to date: %s" % cmake_kits_path)
+
+ cmake_native = os.path.join(
+ shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake')
+ if os.path.isfile(cmake_native):
+ logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native)
+ else:
+ logger.error("Cannot find cmake native at: %s" % cmake_native)
+
+ def dot_code_dir(self, modified_recipe):
+ return os.path.join(modified_recipe.srctree, '.vscode')
+
+ def __vscode_settings_meson(self, settings_dict, modified_recipe):
+ if modified_recipe.build_tool is not BuildTool.MESON:
+ return
+ settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper
+
+ confopts = modified_recipe.mesonopts.split()
+ confopts += modified_recipe.meson_cross_file.split()
+ confopts += modified_recipe.extra_oemeson.split()
+ settings_dict["mesonbuild.configureOptions"] = confopts
+ settings_dict["mesonbuild.buildFolder"] = modified_recipe.b
+
+ def __vscode_settings_cmake(self, settings_dict, modified_recipe):
+ """Add cmake specific settings to settings.json.
+
+ Note: most settings are passed to the cmake preset.
+ """
+ if modified_recipe.build_tool is not BuildTool.CMAKE:
+ return
+ settings_dict["cmake.configureOnOpen"] = True
+ settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree
+
+ def vscode_settings(self, modified_recipe, image_recipe):
+ files_excludes = {
+ "**/.git/**": True,
+ "**/oe-logs/**": True,
+ "**/oe-workdir/**": True,
+ "**/source-date-epoch/**": True
+ }
+ python_exclude = [
+ "**/.git/**",
+ "**/oe-logs/**",
+ "**/oe-workdir/**",
+ "**/source-date-epoch/**"
+ ]
+ files_readonly = {
+ modified_recipe.recipe_sysroot + '/**': True,
+ modified_recipe.recipe_sysroot_native + '/**': True,
+ }
+ if image_recipe.rootfs_dbg is not None:
+ files_readonly[image_recipe.rootfs_dbg + '/**'] = True
+ settings_dict = {
+ "files.watcherExclude": files_excludes,
+ "files.exclude": files_excludes,
+ "files.readonlyInclude": files_readonly,
+ "python.analysis.exclude": python_exclude
+ }
+ self.__vscode_settings_cmake(settings_dict, modified_recipe)
+ self.__vscode_settings_meson(settings_dict, modified_recipe)
+
+ settings_file = 'settings.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), settings_file, settings_dict)
+
+ def __vscode_extensions_cmake(self, modified_recipe, recommendations):
+ if modified_recipe.build_tool is not BuildTool.CMAKE:
+ return
+ recommendations += [
+ "twxs.cmake",
+ "ms-vscode.cmake-tools",
+ "ms-vscode.cpptools",
+ "ms-vscode.cpptools-extension-pack",
+ "ms-vscode.cpptools-themes"
+ ]
+
+ def __vscode_extensions_meson(self, modified_recipe, recommendations):
+ if modified_recipe.build_tool is not BuildTool.MESON:
+ return
+ recommendations += [
+ 'mesonbuild.mesonbuild',
+ "ms-vscode.cpptools",
+ "ms-vscode.cpptools-extension-pack",
+ "ms-vscode.cpptools-themes"
+ ]
+
+ def vscode_extensions(self, modified_recipe):
+ recommendations = []
+ self.__vscode_extensions_cmake(modified_recipe, recommendations)
+ self.__vscode_extensions_meson(modified_recipe, recommendations)
+ extensions_file = 'extensions.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations})
+
+ def vscode_c_cpp_properties(self, modified_recipe):
+ properties_dict = {
+ "name": modified_recipe.recipe_id_pretty,
+ }
+ if modified_recipe.build_tool is BuildTool.CMAKE:
+ properties_dict["configurationProvider"] = "ms-vscode.cmake-tools"
+ elif modified_recipe.build_tool is BuildTool.MESON:
+ properties_dict["configurationProvider"] = "mesonbuild.mesonbuild"
+ properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0])
+ else: # no C/C++ build
+ return
+
+ properties_dicts = {
+ "configurations": [
+ properties_dict
+ ],
+ "version": 4
+ }
+ prop_file = 'c_cpp_properties.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), prop_file, properties_dicts)
+
+ def vscode_launch_bin_dbg(self, gdb_cross_config):
+ modified_recipe = gdb_cross_config.modified_recipe
+
+ launch_config = {
+ "name": gdb_cross_config.id_pretty,
+ "type": "cppdbg",
+ "request": "launch",
+ "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')),
+ "stopAtEntry": True,
+ "cwd": "${workspaceFolder}",
+ "environment": [],
+ "externalConsole": False,
+ "MIMode": "gdb",
+ "preLaunchTask": gdb_cross_config.id_pretty,
+ "miDebuggerPath": modified_recipe.gdb_cross.gdb,
+ "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port)
+ }
+
+ # Search for header files in recipe-sysroot.
+ src_file_map = {
+ "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include")
+ }
+ # First of all search for not stripped binaries in the image folder.
+ # These binaries are copied (and optionally stripped) by deploy-target
+ setup_commands = [
+ {
+ "description": "sysroot",
+ "text": "set sysroot " + modified_recipe.d
+ }
+ ]
+
+ if gdb_cross_config.image_recipe.rootfs_dbg:
+ launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str(
+ gdb_cross_config.image_recipe)
+ # First: Search for sources of this recipe in the workspace folder
+ if modified_recipe.pn in modified_recipe.target_dbgsrc_dir:
+ src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}"
+ else:
+ logger.error(
+ "TARGET_DBGSRC_DIR must contain the recipe name PN.")
+ # Second: Search for sources of other recipes in the rootfs-dbg
+ if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"):
+ src_file_map["/usr/src/debug"] = os.path.join(
+ gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug")
+ else:
+ logger.error(
+ "TARGET_DBGSRC_DIR must start with /usr/src/debug.")
+ else:
+ logger.warning(
+ "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.")
+
+ launch_config['sourceFileMap'] = src_file_map
+ launch_config['setupCommands'] = setup_commands
+ return launch_config
+
+ def vscode_launch(self, modified_recipe):
+ """GDB Launch configuration for binaries (elf files)"""
+
+ configurations = []
+ for gdb_cross_config in self.gdb_cross_configs:
+ if gdb_cross_config.modified_recipe is modified_recipe:
+ configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config))
+ launch_dict = {
+ "version": "0.2.0",
+ "configurations": configurations
+ }
+ launch_file = 'launch.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), launch_file, launch_dict)
+
+ def vscode_tasks_cpp(self, args, modified_recipe):
+ run_install_deploy = modified_recipe.gen_install_deploy_script(args)
+ install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty
+ tasks_dict = {
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": install_task_name,
+ "type": "shell",
+ "command": run_install_deploy,
+ "problemMatcher": []
+ }
+ ]
+ }
+ for gdb_cross_config in self.gdb_cross_configs:
+ if gdb_cross_config.modified_recipe is not modified_recipe:
+ continue
+ tasks_dict['tasks'].append(
+ {
+ "label": gdb_cross_config.id_pretty,
+ "type": "shell",
+ "isBackground": True,
+ "dependsOn": [
+ install_task_name
+ ],
+ "command": gdb_cross_config.gdbserver_script,
+ "problemMatcher": [
+ {
+ "pattern": [
+ {
+ "regexp": ".",
+ "file": 1,
+ "location": 2,
+ "message": 3
+ }
+ ],
+ "background": {
+ "activeOnStart": True,
+ "beginsPattern": ".",
+ "endsPattern": ".",
+ }
+ }
+ ]
+ })
+ tasks_file = 'tasks.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
+
+ def vscode_tasks_fallback(self, args, modified_recipe):
+ oe_init_dir = modified_recipe.oe_init_dir
+ oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir)
+ dt_build = "devtool build "
+ dt_build_label = dt_build + modified_recipe.recipe_id_pretty
+ dt_build_cmd = dt_build + modified_recipe.bpn
+ clean_opt = " --clean"
+ dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt
+ dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt
+ dt_deploy = "devtool deploy-target "
+ dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty
+ dt_deploy_cmd = dt_deploy + modified_recipe.bpn
+ dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty
+ deploy_opts = ' '.join(get_devtool_deploy_opts(args))
+ tasks_dict = {
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": dt_build_label,
+ "type": "shell",
+ "command": "bash",
+ "linux": {
+ "options": {
+ "cwd": oe_init_dir
+ }
+ },
+ "args": [
+ "--login",
+ "-c",
+ "%s%s" % (oe_init, dt_build_cmd)
+ ],
+ "problemMatcher": []
+ },
+ {
+ "label": dt_deploy_label,
+ "type": "shell",
+ "command": "bash",
+ "linux": {
+ "options": {
+ "cwd": oe_init_dir
+ }
+ },
+ "args": [
+ "--login",
+ "-c",
+ "%s%s %s" % (
+ oe_init, dt_deploy_cmd, deploy_opts)
+ ],
+ "problemMatcher": []
+ },
+ {
+ "label": dt_build_deploy_label,
+ "dependsOrder": "sequence",
+ "dependsOn": [
+ dt_build_label,
+ dt_deploy_label
+ ],
+ "problemMatcher": [],
+ "group": {
+ "kind": "build",
+ "isDefault": True
+ }
+ },
+ {
+ "label": dt_build_clean_label,
+ "type": "shell",
+ "command": "bash",
+ "linux": {
+ "options": {
+ "cwd": oe_init_dir
+ }
+ },
+ "args": [
+ "--login",
+ "-c",
+ "%s%s" % (oe_init, dt_build_clean_cmd)
+ ],
+ "problemMatcher": []
+ }
+ ]
+ }
+ if modified_recipe.gdb_cross:
+ for gdb_cross_config in self.gdb_cross_configs:
+ if gdb_cross_config.modified_recipe is not modified_recipe:
+ continue
+ tasks_dict['tasks'].append(
+ {
+ "label": gdb_cross_config.id_pretty,
+ "type": "shell",
+ "isBackground": True,
+ "dependsOn": [
+ dt_build_deploy_label
+ ],
+ "command": gdb_cross_config.gdbserver_script,
+ "problemMatcher": [
+ {
+ "pattern": [
+ {
+ "regexp": ".",
+ "file": 1,
+ "location": 2,
+ "message": 3
+ }
+ ],
+ "background": {
+ "activeOnStart": True,
+ "beginsPattern": ".",
+ "endsPattern": ".",
+ }
+ }
+ ]
+ })
+ tasks_file = 'tasks.json'
+ IdeBase.update_json_file(
+ self.dot_code_dir(modified_recipe), tasks_file, tasks_dict)
+
+ def vscode_tasks(self, args, modified_recipe):
+ if modified_recipe.build_tool.is_c_ccp:
+ self.vscode_tasks_cpp(args, modified_recipe)
+ else:
+ self.vscode_tasks_fallback(args, modified_recipe)
+
+ def setup_modified_recipe(self, args, image_recipe, modified_recipe):
+ self.vscode_settings(modified_recipe, image_recipe)
+ self.vscode_extensions(modified_recipe)
+ self.vscode_c_cpp_properties(modified_recipe)
+ if args.target:
+ self.initialize_gdb_cross_configs(
+ image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode)
+ self.vscode_launch(modified_recipe)
+ self.vscode_tasks(args, modified_recipe)
+
+
+def register_ide_plugin(ide_plugins):
+ ide_plugins['code'] = IdeVSCode
diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py
new file mode 100644
index 0000000000..f106c5a026
--- /dev/null
+++ b/scripts/lib/devtool/ide_plugins/ide_none.py
@@ -0,0 +1,53 @@
+#
+# Copyright (C) 2023-2024 Siemens AG
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+"""Devtool ide-sdk generic IDE plugin"""
+
+import os
+import logging
+from devtool.ide_plugins import IdeBase, GdbCrossConfig
+
+logger = logging.getLogger('devtool')
+
+
+class IdeNone(IdeBase):
+ """Generate some generic helpers for other IDEs
+
+ Modified recipe mode:
+ Generate some helper scripts for remote debugging with GDB
+
+ Shared sysroot mode:
+ A wrapper for bitbake meta-ide-support and bitbake build-sysroots
+ """
+
+ def __init__(self):
+ super().__init__()
+
+ def setup_shared_sysroots(self, shared_env):
+ real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys
+ deploy_dir_image = shared_env.ide_support.deploy_dir_image
+ env_script = os.path.join(
+ deploy_dir_image, 'environment-setup-' + real_multimach_target_sys)
+ logger.info(
+ "To use this SDK please source this: %s" % env_script)
+
+ def setup_modified_recipe(self, args, image_recipe, modified_recipe):
+ """generate some helper scripts and config files
+
+ - Execute the do_install task
+ - Execute devtool deploy-target
+ - Generate a gdbinit file per executable
+ - Generate the oe-scripts sym-link
+ """
+ script_path = modified_recipe.gen_install_deploy_script(args)
+ logger.info("Created: %s" % script_path)
+
+ self.initialize_gdb_cross_configs(image_recipe, modified_recipe)
+
+ IdeBase.gen_oe_scrtips_sym_link(modified_recipe)
+
+
+def register_ide_plugin(ide_plugins):
+ ide_plugins['none'] = IdeNone
diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py
new file mode 100755
index 0000000000..7807b322b3
--- /dev/null
+++ b/scripts/lib/devtool/ide_sdk.py
@@ -0,0 +1,1070 @@
+# Development tool - ide-sdk command plugin
+#
+# Copyright (C) 2023-2024 Siemens AG
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+"""Devtool ide-sdk plugin"""
+
+import json
+import logging
+import os
+import re
+import shutil
+import stat
+import subprocess
+import sys
+from argparse import RawTextHelpFormatter
+from enum import Enum
+
+import scriptutils
+import bb
+from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe
+from devtool.standard import get_real_srctree
+from devtool.ide_plugins import BuildTool
+
+
+logger = logging.getLogger('devtool')
+
+# dict of classes derived from IdeBase
+ide_plugins = {}
+
+
+class DevtoolIdeMode(Enum):
+ """Different modes are supported by the ide-sdk plugin.
+
+ The enum might be extended by more advanced modes in the future. Some ideas:
+ - auto: modified if all recipes are modified, shared if none of the recipes is modified.
+ - mixed: modified mode for modified recipes, shared mode for all other recipes.
+ """
+
+ modified = 'modified'
+ shared = 'shared'
+
+
+class TargetDevice:
+ """SSH remote login parameters"""
+
+ def __init__(self, args):
+ self.extraoptions = ''
+ if args.no_host_check:
+ self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'
+ self.ssh_sshexec = 'ssh'
+ if args.ssh_exec:
+ self.ssh_sshexec = args.ssh_exec
+ self.ssh_port = ''
+ if args.port:
+ self.ssh_port = "-p %s" % args.port
+ if args.key:
+ self.extraoptions += ' -i %s' % args.key
+
+ self.target = args.target
+ target_sp = args.target.split('@')
+ if len(target_sp) == 1:
+ self.login = ""
+ self.host = target_sp[0]
+ elif len(target_sp) == 2:
+ self.login = target_sp[0]
+ self.host = target_sp[1]
+ else:
+ logger.error("Invalid target argument: %s" % args.target)
+
+
+class RecipeNative:
+ """Base class for calling bitbake to provide a -native recipe"""
+
+ def __init__(self, name, target_arch=None):
+ self.name = name
+ self.target_arch = target_arch
+ self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot']
+ self.staging_bindir_native = None
+ self.target_sys = None
+ self.__native_bin = None
+
+ def _initialize(self, config, workspace, tinfoil):
+ """Get the parsed recipe"""
+ recipe_d = parse_recipe(
+ config, tinfoil, self.name, appends=True, filter_workspace=False)
+ if not recipe_d:
+ raise DevtoolError("Parsing %s recipe failed" % self.name)
+ self.staging_bindir_native = os.path.realpath(
+ recipe_d.getVar('STAGING_BINDIR_NATIVE'))
+ self.target_sys = recipe_d.getVar('TARGET_SYS')
+ return recipe_d
+
+ def initialize(self, config, workspace, tinfoil):
+ """Basic initialization that can be overridden by a derived class"""
+ self._initialize(config, workspace, tinfoil)
+
+ @property
+ def native_bin(self):
+ if not self.__native_bin:
+ raise DevtoolError("native binary name is not defined.")
+ return self.__native_bin
+
+
+class RecipeGdbCross(RecipeNative):
+ """Handle handle gdb-cross on the host and the gdbserver on the target device"""
+
+ def __init__(self, args, target_arch, target_device):
+ super().__init__('gdb-cross-' + target_arch, target_arch)
+ self.target_device = target_device
+ self.gdb = None
+ self.gdbserver_port_next = int(args.gdbserver_port_start)
+ self.config_db = {}
+
+ def __find_gdbserver(self, config, tinfoil):
+ """Absolute path of the gdbserver"""
+ recipe_d_gdb = parse_recipe(
+ config, tinfoil, 'gdb', appends=True, filter_workspace=False)
+ if not recipe_d_gdb:
+ raise DevtoolError("Parsing gdb recipe failed")
+ return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver')
+
+ def initialize(self, config, workspace, tinfoil):
+ super()._initialize(config, workspace, tinfoil)
+ gdb_bin = self.target_sys + '-gdb'
+ gdb_path = os.path.join(
+ self.staging_bindir_native, self.target_sys, gdb_bin)
+ self.gdb = gdb_path
+ self.gdbserver_path = self.__find_gdbserver(config, tinfoil)
+
+ @property
+ def host(self):
+ return self.target_device.host
+
+
+class RecipeImage:
+ """Handle some image recipe related properties
+
+ Most workflows require firmware that runs on the target device.
+ This firmware must be consistent with the setup of the host system.
+ In particular, the debug symbols must be compatible. For this, the
+ rootfs must be created as part of the SDK.
+ """
+
+ def __init__(self, name):
+ self.combine_dbg_image = False
+ self.gdbserver_missing = False
+ self.name = name
+ self.rootfs = None
+ self.__rootfs_dbg = None
+ self.bootstrap_tasks = [self.name + ':do_build']
+
+ def initialize(self, config, tinfoil):
+ image_d = parse_recipe(
+ config, tinfoil, self.name, appends=True, filter_workspace=False)
+ if not image_d:
+ raise DevtoolError(
+ "Parsing image recipe %s failed" % self.name)
+
+ self.combine_dbg_image = bb.data.inherits_class(
+ 'image-combined-dbg', image_d)
+
+ workdir = image_d.getVar('WORKDIR')
+ self.rootfs = os.path.join(workdir, 'rootfs')
+ if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1":
+ self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg')
+
+ self.gdbserver_missing = 'gdbserver' not in image_d.getVar(
+ 'IMAGE_INSTALL')
+
+ @property
+ def debug_support(self):
+ return bool(self.rootfs_dbg)
+
+ @property
+ def rootfs_dbg(self):
+ if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg):
+ return self.__rootfs_dbg
+ return None
+
+
+class RecipeMetaIdeSupport:
+ """For the shared sysroots mode meta-ide-support is needed
+
+ For use cases where just a cross tool-chain is required but
+ no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support
+ and bitbake build-sysroots. This also allows to expose the cross-toolchains
+ to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits.
+ """
+
+ def __init__(self):
+ self.bootstrap_tasks = ['meta-ide-support:do_build']
+ self.topdir = None
+ self.datadir = None
+ self.deploy_dir_image = None
+ self.build_sys = None
+ # From toolchain-scripts
+ self.real_multimach_target_sys = None
+
+ def initialize(self, config, tinfoil):
+ meta_ide_support_d = parse_recipe(
+ config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False)
+ if not meta_ide_support_d:
+ raise DevtoolError("Parsing meta-ide-support recipe failed")
+
+ self.topdir = meta_ide_support_d.getVar('TOPDIR')
+ self.datadir = meta_ide_support_d.getVar('datadir')
+ self.deploy_dir_image = meta_ide_support_d.getVar(
+ 'DEPLOY_DIR_IMAGE')
+ self.build_sys = meta_ide_support_d.getVar('BUILD_SYS')
+ self.real_multimach_target_sys = meta_ide_support_d.getVar(
+ 'REAL_MULTIMACH_TARGET_SYS')
+
+
+class RecipeBuildSysroots:
+ """For the shared sysroots mode build-sysroots is needed"""
+
+ def __init__(self):
+ self.standalone_sysroot = None
+ self.standalone_sysroot_native = None
+ self.bootstrap_tasks = [
+ 'build-sysroots:do_build_target_sysroot',
+ 'build-sysroots:do_build_native_sysroot'
+ ]
+
+ def initialize(self, config, tinfoil):
+ build_sysroots_d = parse_recipe(
+ config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False)
+ if not build_sysroots_d:
+ raise DevtoolError("Parsing build-sysroots recipe failed")
+ self.standalone_sysroot = build_sysroots_d.getVar(
+ 'STANDALONE_SYSROOT')
+ self.standalone_sysroot_native = build_sysroots_d.getVar(
+ 'STANDALONE_SYSROOT_NATIVE')
+
+
+class SharedSysrootsEnv:
+ """Handle the shared sysroots based workflow
+
+ Support the workflow with just a tool-chain without a recipe.
+ It's basically like:
+ bitbake some-dependencies
+ bitbake meta-ide-support
+ bitbake build-sysroots
+ Use the environment-* file found in the deploy folder
+ """
+
+ def __init__(self):
+ self.ide_support = None
+ self.build_sysroots = None
+
+ def initialize(self, ide_support, build_sysroots):
+ self.ide_support = ide_support
+ self.build_sysroots = build_sysroots
+
+ def setup_ide(self, ide):
+ ide.setup(self)
+
+
+class RecipeNotModified:
+ """Handling of recipes added to the Direct DSK shared sysroots."""
+
+ def __init__(self, name):
+ self.name = name
+ self.bootstrap_tasks = [name + ':do_populate_sysroot']
+
+
+class RecipeModified:
+ """Handling of recipes in the workspace created by devtool modify"""
+ OE_INIT_BUILD_ENV = 'oe-init-build-env'
+
+ VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$")
+
+ def __init__(self, name):
+ self.name = name
+ self.bootstrap_tasks = [name + ':do_install']
+ self.gdb_cross = None
+ # workspace
+ self.real_srctree = None
+ self.srctree = None
+ self.ide_sdk_dir = None
+ self.ide_sdk_scripts_dir = None
+ self.bbappend = None
+ # recipe variables from d.getVar
+ self.b = None
+ self.base_libdir = None
+ self.bblayers = None
+ self.bpn = None
+ self.d = None
+ self.fakerootcmd = None
+ self.fakerootenv = None
+ self.libdir = None
+ self.max_process = None
+ self.package_arch = None
+ self.package_debug_split_style = None
+ self.path = None
+ self.pn = None
+ self.recipe_sysroot = None
+ self.recipe_sysroot_native = None
+ self.staging_incdir = None
+ self.strip_cmd = None
+ self.target_arch = None
+ self.target_dbgsrc_dir = None
+ self.topdir = None
+ self.workdir = None
+ self.recipe_id = None
+ # replicate bitbake build environment
+ self.exported_vars = None
+ self.cmd_compile = None
+ self.__oe_init_dir = None
+ # main build tool used by this recipe
+ self.build_tool = BuildTool.UNDEFINED
+ # build_tool = cmake
+ self.oecmake_generator = None
+ self.cmake_cache_vars = None
+ # build_tool = meson
+ self.meson_buildtype = None
+ self.meson_wrapper = None
+ self.mesonopts = None
+ self.extra_oemeson = None
+ self.meson_cross_file = None
+
+ def initialize(self, config, workspace, tinfoil):
+ recipe_d = parse_recipe(
+ config, tinfoil, self.name, appends=True, filter_workspace=False)
+ if not recipe_d:
+ raise DevtoolError("Parsing %s recipe failed" % self.name)
+
+ # Verify this recipe is built as externalsrc setup by devtool modify
+ workspacepn = check_workspace_recipe(
+ workspace, self.name, bbclassextend=True)
+ self.srctree = workspace[workspacepn]['srctree']
+ # Need to grab this here in case the source is within a subdirectory
+ self.real_srctree = get_real_srctree(
+ self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR'))
+ self.bbappend = workspace[workspacepn]['bbappend']
+
+ self.ide_sdk_dir = os.path.join(
+ config.workspace_path, 'ide-sdk', self.name)
+ if os.path.exists(self.ide_sdk_dir):
+ shutil.rmtree(self.ide_sdk_dir)
+ self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts')
+
+ self.b = recipe_d.getVar('B')
+ self.base_libdir = recipe_d.getVar('base_libdir')
+ self.bblayers = recipe_d.getVar('BBLAYERS').split()
+ self.bpn = recipe_d.getVar('BPN')
+ self.cxx = recipe_d.getVar('CXX')
+ self.d = recipe_d.getVar('D')
+ self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD')
+ self.fakerootenv = recipe_d.getVar('FAKEROOTENV')
+ self.libdir = recipe_d.getVar('libdir')
+ self.max_process = int(recipe_d.getVar(
+ "BB_NUMBER_THREADS") or os.cpu_count() or 1)
+ self.package_arch = recipe_d.getVar('PACKAGE_ARCH')
+ self.package_debug_split_style = recipe_d.getVar(
+ 'PACKAGE_DEBUG_SPLIT_STYLE')
+ self.path = recipe_d.getVar('PATH')
+ self.pn = recipe_d.getVar('PN')
+ self.recipe_sysroot = os.path.realpath(
+ recipe_d.getVar('RECIPE_SYSROOT'))
+ self.recipe_sysroot_native = os.path.realpath(
+ recipe_d.getVar('RECIPE_SYSROOT_NATIVE'))
+ self.staging_bindir_toolchain = os.path.realpath(
+ recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN'))
+ self.staging_incdir = os.path.realpath(
+ recipe_d.getVar('STAGING_INCDIR'))
+ self.strip_cmd = recipe_d.getVar('STRIP')
+ self.target_arch = recipe_d.getVar('TARGET_ARCH')
+ self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR')
+ self.topdir = recipe_d.getVar('TOPDIR')
+ self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR'))
+
+ self.__init_exported_variables(recipe_d)
+
+ if bb.data.inherits_class('cmake', recipe_d):
+ self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR')
+ self.__init_cmake_preset_cache(recipe_d)
+ self.build_tool = BuildTool.CMAKE
+ elif bb.data.inherits_class('meson', recipe_d):
+ self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE')
+ self.mesonopts = recipe_d.getVar('MESONOPTS')
+ self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON')
+ self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE')
+ self.build_tool = BuildTool.MESON
+
+ # Recipe ID is the identifier for IDE config sections
+ self.recipe_id = self.bpn + "-" + self.package_arch
+ self.recipe_id_pretty = self.bpn + ": " + self.package_arch
+
+ def append_to_bbappend(self, append_text):
+ with open(self.bbappend, 'a') as bbap:
+ bbap.write(append_text)
+
+ def remove_from_bbappend(self, append_text):
+ with open(self.bbappend, 'r') as bbap:
+ text = bbap.read()
+ new_text = text.replace(append_text, '')
+ with open(self.bbappend, 'w') as bbap:
+ bbap.write(new_text)
+
+ @staticmethod
+ def is_valid_shell_variable(var):
+ """Skip strange shell variables like systemd
+
+ prevent from strange bugs because of strange variables which
+ are not used in this context but break various tools.
+ """
+ if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var):
+ bb.debug(1, "ignoring variable: %s" % var)
+ return True
+ return False
+
+ def debug_build_config(self, args):
+ """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise"""
+ if self.build_tool is BuildTool.CMAKE:
+ append_text = os.linesep + \
+ 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep
+ if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
+ self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = {
+ "type": "STRING",
+ "value": "Debug",
+ }
+ self.append_to_bbappend(append_text)
+ elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars:
+ del self.cmake_cache_vars['CMAKE_BUILD_TYPE']
+ self.remove_from_bbappend(append_text)
+ elif self.build_tool is BuildTool.MESON:
+ append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep
+ if args.debug_build_config and self.meson_buildtype != "debug":
+ self.mesonopts.replace(
+ '--buildtype ' + self.meson_buildtype, '--buildtype debug')
+ self.append_to_bbappend(append_text)
+ elif self.meson_buildtype == "debug":
+ self.mesonopts.replace(
+ '--buildtype debug', '--buildtype plain')
+ self.remove_from_bbappend(append_text)
+ elif args.debug_build_config:
+ logger.warn(
+ "--debug-build-config is not implemented for this build tool yet.")
+
+ def solib_search_path(self, image):
+ """Search for debug symbols in the rootfs and rootfs-dbg
+
+ The debug symbols of shared libraries which are provided by other packages
+ are grabbed from the -dbg packages in the rootfs-dbg.
+
+ But most cross debugging tools like gdb, perf, and systemtap need to find
+ executable/library first and through it debuglink note find corresponding
+ symbols file. Therefore the library paths from the rootfs are added as well.
+
+ Note: For the devtool modified recipe compiled from the IDE, the debug
+ symbols are taken from the unstripped binaries in the image folder.
+ Also, devtool deploy-target takes the files from the image folder.
+ debug symbols in the image folder refer to the corresponding source files
+ with absolute paths of the build machine. Debug symbols found in the
+ rootfs-dbg are relocated and contain paths which refer to the source files
+ installed on the target device e.g. /usr/src/...
+ """
+ base_libdir = self.base_libdir.lstrip('/')
+ libdir = self.libdir.lstrip('/')
+ so_paths = [
+ # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug
+ os.path.join(image.rootfs_dbg, base_libdir, ".debug"),
+ os.path.join(image.rootfs_dbg, libdir, ".debug"),
+ # debug symbols for package_debug_split_style: debug-file-directory
+ os.path.join(image.rootfs_dbg, "usr", "lib", "debug"),
+
+ # The binaries are required as well, the debug packages are not enough
+ # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg
+ os.path.join(image.rootfs_dbg, base_libdir),
+ os.path.join(image.rootfs_dbg, libdir),
+ # Without image-combined-dbg.bbclass the binaries are only in rootfs.
+ # Note: Stepping into source files located in rootfs-dbg does not
+ # work without image-combined-dbg.bbclass yet.
+ os.path.join(image.rootfs, base_libdir),
+ os.path.join(image.rootfs, libdir)
+ ]
+ return so_paths
+
+ def solib_search_path_str(self, image):
+ """Return a : separated list of paths usable by GDB's set solib-search-path"""
+ return ':'.join(self.solib_search_path(image))
+
+ def __init_exported_variables(self, d):
+ """Find all variables with export flag set.
+
+ This allows to generate IDE configurations which compile with the same
+ environment as bitbake does. That's at least a reasonable default behavior.
+ """
+ exported_vars = {}
+
+ vars = (key for key in d.keys() if not key.startswith(
+ "__") and not d.getVarFlag(key, "func", False))
+ for var in vars:
+ func = d.getVarFlag(var, "func", False)
+ if d.getVarFlag(var, 'python', False) and func:
+ continue
+ export = d.getVarFlag(var, "export", False)
+ unexport = d.getVarFlag(var, "unexport", False)
+ if not export and not unexport and not func:
+ continue
+ if unexport:
+ continue
+
+ val = d.getVar(var)
+ if val is None:
+ continue
+ if set(var) & set("-.{}+"):
+ logger.warn(
+ "Warning: Found invalid character in variable name %s", str(var))
+ continue
+ varExpanded = d.expand(var)
+ val = str(val)
+
+ if not RecipeModified.is_valid_shell_variable(varExpanded):
+ continue
+
+ if func:
+ code_line = "line: {0}, file: {1}\n".format(
+ d.getVarFlag(var, "lineno", False),
+ d.getVarFlag(var, "filename", False))
+ val = val.rstrip('\n')
+ logger.warn("Warning: exported shell function %s() is not exported (%s)" %
+ (varExpanded, code_line))
+ continue
+
+ if export:
+ exported_vars[varExpanded] = val.strip()
+ continue
+
+ self.exported_vars = exported_vars
+
+ def __init_cmake_preset_cache(self, d):
+ """Get the arguments passed to cmake
+
+ Replicate the cmake configure arguments with all details to
+ share on build folder between bitbake and SDK.
+ """
+ site_file = os.path.join(self.workdir, 'site-file.cmake')
+ if os.path.exists(site_file):
+ print("Warning: site-file.cmake is not supported")
+
+ cache_vars = {}
+ oecmake_args = d.getVar('OECMAKE_ARGS').split()
+ extra_oecmake = d.getVar('EXTRA_OECMAKE').split()
+ for param in oecmake_args + extra_oecmake:
+ d_pref = "-D"
+ if param.startswith(d_pref):
+ param = param[len(d_pref):]
+ else:
+ print("Error: expected a -D")
+ param_s = param.split('=', 1)
+ param_nt = param_s[0].split(':', 1)
+
+ def handle_undefined_variable(var):
+ if var.startswith('${') and var.endswith('}'):
+ return ''
+ else:
+ return var
+ # Example: FOO=ON
+ if len(param_nt) == 1:
+ cache_vars[param_s[0]] = handle_undefined_variable(param_s[1])
+ # Example: FOO:PATH=/tmp
+ elif len(param_nt) == 2:
+ cache_vars[param_nt[0]] = {
+ "type": param_nt[1],
+ "value": handle_undefined_variable(param_s[1]),
+ }
+ else:
+ print("Error: cannot parse %s" % param)
+ self.cmake_cache_vars = cache_vars
+
+ def cmake_preset(self):
+ """Create a preset for cmake that mimics how bitbake calls cmake"""
+ toolchain_file = os.path.join(self.workdir, 'toolchain.cmake')
+ cmake_executable = os.path.join(
+ self.recipe_sysroot_native, 'usr', 'bin', 'cmake')
+ self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id
+
+ preset_dict_configure = {
+ "name": self.recipe_id,
+ "displayName": self.recipe_id_pretty,
+ "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
+ "binaryDir": self.b,
+ "generator": self.oecmake_generator,
+ "toolchainFile": toolchain_file,
+ "cacheVariables": self.cmake_cache_vars,
+ "environment": self.exported_vars,
+ "cmakeExecutable": cmake_executable
+ }
+
+ preset_dict_build = {
+ "name": self.recipe_id,
+ "displayName": self.recipe_id_pretty,
+ "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
+ "configurePreset": self.recipe_id,
+ "inheritConfigureEnvironment": True
+ }
+
+ preset_dict_test = {
+ "name": self.recipe_id,
+ "displayName": self.recipe_id_pretty,
+ "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch),
+ "configurePreset": self.recipe_id,
+ "inheritConfigureEnvironment": True
+ }
+
+ preset_dict = {
+ "version": 3, # cmake 3.21, backward compatible with kirkstone
+ "configurePresets": [preset_dict_configure],
+ "buildPresets": [preset_dict_build],
+ "testPresets": [preset_dict_test]
+ }
+
+ # Finally write the json file
+ json_file = 'CMakeUserPresets.json'
+ json_path = os.path.join(self.real_srctree, json_file)
+ logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path))
+ if not os.path.exists(self.real_srctree):
+ os.makedirs(self.real_srctree)
+ try:
+ with open(json_path) as f:
+ orig_dict = json.load(f)
+ except json.decoder.JSONDecodeError:
+ logger.info(
+ "Decoding %s failed. Probably because of comments in the json file" % json_path)
+ orig_dict = {}
+ except FileNotFoundError:
+ orig_dict = {}
+
+ # Add or update the presets for the recipe and keep other presets
+ for k, v in preset_dict.items():
+ if isinstance(v, list):
+ update_preset = v[0]
+ preset_added = False
+ if k in orig_dict:
+ for index, orig_preset in enumerate(orig_dict[k]):
+ if 'name' in orig_preset:
+ if orig_preset['name'] == update_preset['name']:
+ logger.debug("Updating preset: %s" %
+ orig_preset['name'])
+ orig_dict[k][index] = update_preset
+ preset_added = True
+ break
+ else:
+ logger.debug("keeping preset: %s" %
+ orig_preset['name'])
+ else:
+ logger.warn("preset without a name found")
+ if not preset_added:
+ if not k in orig_dict:
+ orig_dict[k] = []
+ orig_dict[k].append(update_preset)
+ logger.debug("Added preset: %s" %
+ update_preset['name'])
+ else:
+ orig_dict[k] = v
+
+ with open(json_path, 'w') as f:
+ json.dump(orig_dict, f, indent=4)
+
+ def gen_meson_wrapper(self):
+ """Generate a wrapper script to call meson with the cross environment"""
+ bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
+ meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson')
+ meson_real = os.path.join(
+ self.recipe_sysroot_native, 'usr', 'bin', 'meson.real')
+ with open(meson_wrapper, 'w') as mwrap:
+ mwrap.write("#!/bin/sh" + os.linesep)
+ for var, val in self.exported_vars.items():
+ mwrap.write('export %s="%s"' % (var, val) + os.linesep)
+ mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep)
+ private_temp = os.path.join(self.b, "meson-private", "tmp")
+ mwrap.write('mkdir -p "%s"' % private_temp + os.linesep)
+ mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep)
+ mwrap.write('exec "%s" "$@"' % meson_real + os.linesep)
+ st = os.stat(meson_wrapper)
+ os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC)
+ self.meson_wrapper = meson_wrapper
+ self.cmd_compile = meson_wrapper + " compile -C " + self.b
+
+ def which(self, executable):
+ bin_path = shutil.which(executable, path=self.path)
+ if not bin_path:
+ raise DevtoolError(
+ 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn))
+ return bin_path
+
+ @staticmethod
+ def is_elf_file(file_path):
+ with open(file_path, "rb") as f:
+ data = f.read(4)
+ if data == b'\x7fELF':
+ return True
+ return False
+
+ def find_installed_binaries(self):
+ """find all executable elf files in the image directory"""
+ binaries = []
+ d_len = len(self.d)
+ re_so = re.compile(r'.*\.so[.0-9]*$')
+ for root, _, files in os.walk(self.d, followlinks=False):
+ for file in files:
+ if os.path.islink(file):
+ continue
+ if re_so.match(file):
+ continue
+ abs_name = os.path.join(root, file)
+ if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name):
+ binaries.append(abs_name[d_len:])
+ return sorted(binaries)
+
+ def gen_delete_package_dirs(self):
+ """delete folders of package tasks
+
+ This is a workaround for and issue with recipes having their sources
+ downloaded as file://
+ This likely breaks pseudo like:
+ path mismatch [3 links]: ino 79147802 db
+ .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/
+ cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp
+ .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp
+ Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue.
+ """
+ cmd_lines = ['#!/bin/sh']
+
+ # Set up the appropriate environment
+ newenv = dict(os.environ)
+ for varvalue in self.fakerootenv.split():
+ if '=' in varvalue:
+ splitval = varvalue.split('=', 1)
+ newenv[splitval[0]] = splitval[1]
+
+ # Replicate the environment variables from bitbake
+ for var, val in newenv.items():
+ if not RecipeModified.is_valid_shell_variable(var):
+ continue
+ cmd_lines.append('%s="%s"' % (var, val))
+ cmd_lines.append('export %s' % var)
+
+ # Delete the folders
+ pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [
+ "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]])
+ cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs)
+ cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd))
+
+ return self.write_script(cmd_lines, 'delete_package_dirs')
+
+ def gen_deploy_target_script(self, args):
+ """Generate a script which does what devtool deploy-target does
+
+ This script is much quicker than devtool target-deploy. Because it
+ does not need to start a bitbake server. All information from tinfoil
+ is hard-coded in the generated script.
+ """
+ cmd_lines = ['#!%s' % str(sys.executable)]
+ cmd_lines.append('import sys')
+ cmd_lines.append('devtool_sys_path = %s' % str(sys.path))
+ cmd_lines.append('devtool_sys_path.reverse()')
+ cmd_lines.append('for p in devtool_sys_path:')
+ cmd_lines.append(' if p not in sys.path:')
+ cmd_lines.append(' sys.path.insert(0, p)')
+ cmd_lines.append('from devtool.deploy import deploy_no_d')
+ args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check',
+ 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target']
+ filtered_args_dict = {key: value for key, value in vars(
+ args).items() if key in args_filter}
+ cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict))
+ cmd_lines.append('class Dict2Class(object):')
+ cmd_lines.append(' def __init__(self, my_dict):')
+ cmd_lines.append(' for key in my_dict:')
+ cmd_lines.append(' setattr(self, key, my_dict[key])')
+ cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)')
+ cmd_lines.append(
+ 'setattr(filtered_args, "recipename", "%s")' % self.bpn)
+ cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' %
+ (self.d, self.workdir, self.path, self.strip_cmd,
+ self.libdir, self.base_libdir, self.max_process,
+ self.fakerootcmd, self.fakerootenv))
+ return self.write_script(cmd_lines, 'deploy_target')
+
+ def gen_install_deploy_script(self, args):
+ """Generate a script which does install and deploy"""
+ cmd_lines = ['#!/bin/bash']
+
+ cmd_lines.append(self.gen_delete_package_dirs())
+
+ # . oe-init-build-env $BUILDDIR
+ # Note: Sourcing scripts with arguments requires bash
+ cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % (
+ self.oe_init_dir, self.oe_init_dir))
+ cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % (
+ self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir))
+
+ # bitbake -c install
+ cmd_lines.append(
+ 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn))
+
+ # Self contained devtool deploy-target
+ cmd_lines.append(self.gen_deploy_target_script(args))
+
+ return self.write_script(cmd_lines, 'install_and_deploy')
+
+ def write_script(self, cmd_lines, script_name):
+ bb.utils.mkdirhier(self.ide_sdk_scripts_dir)
+ script_name_arch = script_name + '_' + self.recipe_id
+ script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch)
+ with open(script_file, 'w') as script_f:
+ script_f.write(os.linesep.join(cmd_lines))
+ st = os.stat(script_file)
+ os.chmod(script_file, st.st_mode | stat.S_IEXEC)
+ return script_file
+
+ @property
+ def oe_init_build_env(self):
+ """Find the oe-init-build-env used for this setup"""
+ oe_init_dir = self.oe_init_dir
+ if oe_init_dir:
+ return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
+ return None
+
+ @property
+ def oe_init_dir(self):
+ """Find the directory where the oe-init-build-env is located
+
+ Assumption: There might be a layer with higher priority than poky
+ which provides to oe-init-build-env in the layer's toplevel folder.
+ """
+ if not self.__oe_init_dir:
+ for layer in reversed(self.bblayers):
+ result = subprocess.run(
+ ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True)
+ if result.returncode == 0:
+ oe_init_dir = result.stdout.decode('utf-8').strip()
+ oe_init_path = os.path.join(
+ oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV)
+ if os.path.exists(oe_init_path):
+ logger.debug("Using %s from: %s" % (
+ RecipeModified.OE_INIT_BUILD_ENV, oe_init_path))
+ self.__oe_init_dir = oe_init_dir
+ break
+ if not self.__oe_init_dir:
+ logger.error("Cannot find the bitbake top level folder")
+ return self.__oe_init_dir
+
+
+def ide_setup(args, config, basepath, workspace):
+ """Generate the IDE configuration for the workspace"""
+
+ # Explicitely passing some special recipes does not make sense
+ for recipe in args.recipenames:
+ if recipe in ['meta-ide-support', 'build-sysroots']:
+ raise DevtoolError("Invalid recipe: %s." % recipe)
+
+ # Collect information about tasks which need to be bitbaked
+ bootstrap_tasks = []
+ bootstrap_tasks_late = []
+ tinfoil = setup_tinfoil(config_only=False, basepath=basepath)
+ try:
+ # define mode depending on recipes which need to be processed
+ recipes_image_names = []
+ recipes_modified_names = []
+ recipes_other_names = []
+ for recipe in args.recipenames:
+ try:
+ check_workspace_recipe(
+ workspace, recipe, bbclassextend=True)
+ recipes_modified_names.append(recipe)
+ except DevtoolError:
+ recipe_d = parse_recipe(
+ config, tinfoil, recipe, appends=True, filter_workspace=False)
+ if not recipe_d:
+ raise DevtoolError("Parsing recipe %s failed" % recipe)
+ if bb.data.inherits_class('image', recipe_d):
+ recipes_image_names.append(recipe)
+ else:
+ recipes_other_names.append(recipe)
+
+ invalid_params = False
+ if args.mode == DevtoolIdeMode.shared:
+ if len(recipes_modified_names):
+ logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str(
+ recipes_modified_names))
+ invalid_params = True
+ if args.mode == DevtoolIdeMode.modified:
+ if len(recipes_other_names):
+ logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str(
+ recipes_other_names))
+ invalid_params = True
+ if len(recipes_image_names) != 1:
+ logger.error(
+ "One image recipe is required as the rootfs for the remote development.")
+ invalid_params = True
+ for modified_recipe_name in recipes_modified_names:
+ if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'):
+ logger.error(
+ "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name)
+ invalid_params = True
+
+ if invalid_params:
+ raise DevtoolError("Invalid parameters are passed.")
+
+ # For the shared sysroots mode, add all dependencies of all the images to the sysroots
+ # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg
+ recipes_images = []
+ for recipes_image_name in recipes_image_names:
+ logger.info("Using image: %s" % recipes_image_name)
+ recipe_image = RecipeImage(recipes_image_name)
+ recipe_image.initialize(config, tinfoil)
+ bootstrap_tasks += recipe_image.bootstrap_tasks
+ recipes_images.append(recipe_image)
+
+ # Provide a Direct SDK with shared sysroots
+ recipes_not_modified = []
+ if args.mode == DevtoolIdeMode.shared:
+ ide_support = RecipeMetaIdeSupport()
+ ide_support.initialize(config, tinfoil)
+ bootstrap_tasks += ide_support.bootstrap_tasks
+
+ logger.info("Adding %s to the Direct SDK sysroots." %
+ str(recipes_other_names))
+ for recipe_name in recipes_other_names:
+ recipe_not_modified = RecipeNotModified(recipe_name)
+ bootstrap_tasks += recipe_not_modified.bootstrap_tasks
+ recipes_not_modified.append(recipe_not_modified)
+
+ build_sysroots = RecipeBuildSysroots()
+ build_sysroots.initialize(config, tinfoil)
+ bootstrap_tasks_late += build_sysroots.bootstrap_tasks
+ shared_env = SharedSysrootsEnv()
+ shared_env.initialize(ide_support, build_sysroots)
+
+ recipes_modified = []
+ if args.mode == DevtoolIdeMode.modified:
+ logger.info("Setting up workspaces for modified recipe: %s" %
+ str(recipes_modified_names))
+ gdbs_cross = {}
+ for recipe_name in recipes_modified_names:
+ recipe_modified = RecipeModified(recipe_name)
+ recipe_modified.initialize(config, workspace, tinfoil)
+ bootstrap_tasks += recipe_modified.bootstrap_tasks
+ recipes_modified.append(recipe_modified)
+
+ if recipe_modified.target_arch not in gdbs_cross:
+ target_device = TargetDevice(args)
+ gdb_cross = RecipeGdbCross(
+ args, recipe_modified.target_arch, target_device)
+ gdb_cross.initialize(config, workspace, tinfoil)
+ bootstrap_tasks += gdb_cross.bootstrap_tasks
+ gdbs_cross[recipe_modified.target_arch] = gdb_cross
+ recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch]
+
+ finally:
+ tinfoil.shutdown()
+
+ if not args.skip_bitbake:
+ bb_cmd = 'bitbake '
+ if args.bitbake_k:
+ bb_cmd += "-k "
+ bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks)
+ exec_build_env_command(
+ config.init_path, basepath, bb_cmd_early, watch=True)
+ if bootstrap_tasks_late:
+ bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late)
+ exec_build_env_command(
+ config.init_path, basepath, bb_cmd_late, watch=True)
+
+ for recipe_image in recipes_images:
+ if (recipe_image.gdbserver_missing):
+ logger.warning(
+ "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image)
+
+ if recipe_image.combine_dbg_image is False:
+ logger.warning(
+ 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image)
+
+ # Instantiate the active IDE plugin
+ ide = ide_plugins[args.ide]()
+ if args.mode == DevtoolIdeMode.shared:
+ ide.setup_shared_sysroots(shared_env)
+ elif args.mode == DevtoolIdeMode.modified:
+ for recipe_modified in recipes_modified:
+ if recipe_modified.build_tool is BuildTool.CMAKE:
+ recipe_modified.cmake_preset()
+ if recipe_modified.build_tool is BuildTool.MESON:
+ recipe_modified.gen_meson_wrapper()
+ ide.setup_modified_recipe(
+ args, recipe_image, recipe_modified)
+ else:
+ raise DevtoolError("Must not end up here.")
+
+
+def register_commands(subparsers, context):
+ """Register devtool subcommands from this plugin"""
+
+ global ide_plugins
+
+ # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins.
+ pluginpaths = [os.path.join(path, 'ide_plugins')
+ for path in context.pluginpaths]
+ ide_plugin_modules = []
+ for pluginpath in pluginpaths:
+ scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath)
+
+ for ide_plugin_module in ide_plugin_modules:
+ if hasattr(ide_plugin_module, 'register_ide_plugin'):
+ ide_plugin_module.register_ide_plugin(ide_plugins)
+ # Sort plugins according to their priority. The first entry is the default IDE plugin.
+ ide_plugins = dict(sorted(ide_plugins.items(),
+ key=lambda p: p[1].ide_plugin_priority(), reverse=True))
+
+ parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter,
+ help='Setup the SDK and configure the IDE')
+ parser_ide_sdk.add_argument(
+ 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n'
+ 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.')
+ parser_ide_sdk.add_argument(
+ '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified,
+ help='Different SDK types are supported:\n'
+ '- "' + DevtoolIdeMode.modified.name + '" (default):\n'
+ ' devtool modify creates a workspace to work on the source code of a recipe.\n'
+ ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n'
+ ' Usage example:\n'
+ ' devtool modify cmake-example\n'
+ ' devtool ide-sdk cmake-example core-image-minimal\n'
+ ' Start the IDE in the workspace folder\n'
+ ' At least one devtool modified recipe plus one image recipe are required:\n'
+ ' The image recipe is used to generate the target image and the remote debug configuration.\n'
+ '- "' + DevtoolIdeMode.shared.name + '":\n'
+ ' Usage example:\n'
+ ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n'
+ ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n'
+ ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n'
+ ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit')
+ default_ide = list(ide_plugins.keys())[0]
+ parser_ide_sdk.add_argument(
+ '-i', '--ide', choices=ide_plugins.keys(), default=default_ide,
+ help='Setup the configuration for this IDE (default: %s)' % default_ide)
+ parser_ide_sdk.add_argument(
+ '-t', '--target', default='root@192.168.7.2',
+ help='Live target machine running an ssh server: user@hostname.')
+ parser_ide_sdk.add_argument(
+ '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.')
+ parser_ide_sdk.add_argument(
+ '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true')
+ parser_ide_sdk.add_argument(
+ '-e', '--ssh-exec', help='Executable to use in place of ssh')
+ parser_ide_sdk.add_argument(
+ '-P', '--port', help='Specify ssh port to use for connection to the target')
+ parser_ide_sdk.add_argument(
+ '-I', '--key', help='Specify ssh private key for connection to the target')
+ parser_ide_sdk.add_argument(
+ '--skip-bitbake', help='Generate IDE configuration but skip calling bibtake to update the SDK.', action='store_true')
+ parser_ide_sdk.add_argument(
+ '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true')
+ parser_ide_sdk.add_argument(
+ '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false')
+ parser_ide_sdk.add_argument(
+ '-n', '--dry-run', help='List files to be undeployed only', action='store_true')
+ parser_ide_sdk.add_argument(
+ '-s', '--show-status', help='Show progress/status output', action='store_true')
+ parser_ide_sdk.add_argument(
+ '-p', '--no-preserve', help='Do not preserve existing files', action='store_true')
+ parser_ide_sdk.add_argument(
+ '--no-check-space', help='Do not check for available space before deploying', action='store_true')
+ parser_ide_sdk.add_argument(
+ '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true')
+ parser_ide_sdk.set_defaults(func=ide_setup)
diff --git a/scripts/lib/devtool/import.py b/scripts/lib/devtool/import.py
index c13a180d14..6829851669 100644
--- a/scripts/lib/devtool/import.py
+++ b/scripts/lib/devtool/import.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool import plugin"""
import os
@@ -81,7 +71,7 @@ def devimport(args, config, basepath, workspace):
break
else:
non_importables.append(fn)
- logger.warn('No recipe to append %s.bbapppend, skipping' % fn)
+ logger.warning('No recipe to append %s.bbapppend, skipping' % fn)
# Extract
imported = []
@@ -104,9 +94,9 @@ def devimport(args, config, basepath, workspace):
try:
tar.extract(member, path=config.workspace_path)
except PermissionError as pe:
- logger.warn(pe)
+ logger.warning(pe)
else:
- logger.warn('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
+ logger.warning('File already present. Use --overwrite/-o to overwrite it: %s' % member.name)
continue
else:
tar.extract(member, path=config.workspace_path)
@@ -129,7 +119,7 @@ def devimport(args, config, basepath, workspace):
if imported:
logger.info('Imported recipes into workspace %s: %s' % (config.workspace_path, ', '.join(imported)))
else:
- logger.warn('No recipes imported into the workspace')
+ logger.warning('No recipes imported into the workspace')
return 0
diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py
new file mode 100644
index 0000000000..18daef30c3
--- /dev/null
+++ b/scripts/lib/devtool/menuconfig.py
@@ -0,0 +1,81 @@
+# OpenEmbedded Development tool - menuconfig command plugin
+#
+# Copyright (C) 2018 Xilinx
+# Written by: Chandana Kalluri <ckalluri@xilinx.com>
+#
+# SPDX-License-Identifier: MIT
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU General Public License version 2 as
+# published by the Free Software Foundation.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License along
+# with this program; if not, write to the Free Software Foundation, Inc.,
+# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+
+"""Devtool menuconfig plugin"""
+
+import os
+import bb
+import logging
+import argparse
+import re
+import glob
+from devtool import setup_tinfoil, parse_recipe, DevtoolError, standard, exec_build_env_command
+from devtool import check_workspace_recipe
+logger = logging.getLogger('devtool')
+
+def menuconfig(args, config, basepath, workspace):
+ """Entry point for the devtool 'menuconfig' subcommand"""
+
+ rd = ""
+ kconfigpath = ""
+ pn_src = ""
+ localfilesdir = ""
+ workspace_dir = ""
+ tinfoil = setup_tinfoil(basepath=basepath)
+ try:
+ rd = parse_recipe(config, tinfoil, args.component, appends=True, filter_workspace=False)
+ if not rd:
+ return 1
+
+ check_workspace_recipe(workspace, args.component)
+ pn = rd.getVar('PN')
+
+ if not rd.getVarFlag('do_menuconfig','task'):
+ raise DevtoolError("This recipe does not support menuconfig option")
+
+ workspace_dir = os.path.join(config.workspace_path,'sources')
+ kconfigpath = rd.getVar('B')
+ pn_src = os.path.join(workspace_dir,pn)
+
+ # add check to see if oe_local_files exists or not
+ localfilesdir = os.path.join(pn_src,'oe-local-files')
+ if not os.path.exists(localfilesdir):
+ bb.utils.mkdirhier(localfilesdir)
+ # Add gitignore to ensure source tree is clean
+ gitignorefile = os.path.join(localfilesdir,'.gitignore')
+ with open(gitignorefile, 'w') as f:
+ f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n')
+ f.write('*\n')
+
+ finally:
+ tinfoil.shutdown()
+
+ logger.info('Launching menuconfig')
+ exec_build_env_command(config.init_path, basepath, 'bitbake -c menuconfig %s' % pn, watch=True)
+ fragment = os.path.join(localfilesdir, 'devtool-fragment.cfg')
+ res = standard._create_kconfig_diff(pn_src,rd,fragment)
+
+ return 0
+
+def register_commands(subparsers, context):
+ """register devtool subcommands from this plugin"""
+ parser_menuconfig = subparsers.add_parser('menuconfig',help='Alter build-time configuration for a recipe', description='Launches the make menuconfig command (for recipes where do_menuconfig is available), allowing users to make changes to the build-time configuration. Creates a config fragment corresponding to changes made.', group='advanced')
+ parser_menuconfig.add_argument('component', help='compenent to alter config')
+ parser_menuconfig.set_defaults(func=menuconfig,fixed_setup=context.fixed_setup)
diff --git a/scripts/lib/devtool/package.py b/scripts/lib/devtool/package.py
index af9e8f15f5..c2367342c3 100644
--- a/scripts/lib/devtool/package.py
+++ b/scripts/lib/devtool/package.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool plugin containing the package subcommands"""
import os
diff --git a/scripts/lib/devtool/runqemu.py b/scripts/lib/devtool/runqemu.py
index e26cf28c2f..ead978aabc 100644
--- a/scripts/lib/devtool/runqemu.py
+++ b/scripts/lib/devtool/runqemu.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool runqemu plugin"""
diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py
index f46577c2ab..9aefd7e354 100644
--- a/scripts/lib/devtool/sdk.py
+++ b/scripts/lib/devtool/sdk.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015-2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import os
import subprocess
@@ -145,6 +135,9 @@ def sdk_update(args, config, basepath, workspace):
# Fetch manifest from server
tmpmanifest = os.path.join(tmpsdk_dir, 'conf', 'sdk-conf-manifest')
ret = subprocess.call("wget -q -O %s %s/conf/sdk-conf-manifest" % (tmpmanifest, updateserver), shell=True)
+ if ret != 0:
+ logger.error("Cannot dowload files from %s" % updateserver)
+ return ret
changedfiles = check_manifest(tmpmanifest, basepath)
if not changedfiles:
logger.info("Already up-to-date")
@@ -214,7 +207,7 @@ def sdk_update(args, config, basepath, workspace):
if not sstate_mirrors:
with open(os.path.join(conf_dir, 'site.conf'), 'a') as f:
f.write('SCONF_VERSION = "%s"\n' % site_conf_version)
- f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver)
+ f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver)
finally:
shutil.rmtree(tmpsdk_dir)
@@ -307,7 +300,8 @@ def sdk_install(args, config, basepath, workspace):
return 2
try:
- exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True)
+ exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True)
+ exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True)
except bb.process.ExecutionError as e:
raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e)))
diff --git a/scripts/lib/devtool/search.py b/scripts/lib/devtool/search.py
index 054985b85d..70b81cac5e 100644
--- a/scripts/lib/devtool/search.py
+++ b/scripts/lib/devtool/search.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool search plugin"""
@@ -36,44 +26,75 @@ def search(args, config, basepath, workspace):
keyword_rc = re.compile(args.keyword)
- for fn in os.listdir(pkgdata_dir):
- pfn = os.path.join(pkgdata_dir, fn)
- if not os.path.isfile(pfn):
+ def print_match(pn):
+ rd = parse_recipe(config, tinfoil, pn, True)
+ if not rd:
+ return
+ summary = rd.getVar('SUMMARY')
+ if summary == rd.expand(defsummary):
+ summary = ''
+ print("%s %s" % (pn.ljust(20), summary))
+
+
+ matches = []
+ if os.path.exists(pkgdata_dir):
+ for fn in os.listdir(pkgdata_dir):
+ pfn = os.path.join(pkgdata_dir, fn)
+ if not os.path.isfile(pfn):
+ continue
+
+ packages = []
+ match = False
+ if keyword_rc.search(fn):
+ match = True
+
+ if not match:
+ with open(pfn, 'r') as f:
+ for line in f:
+ if line.startswith('PACKAGES:'):
+ packages = line.split(':', 1)[1].strip().split()
+
+ for pkg in packages:
+ if keyword_rc.search(pkg):
+ match = True
+ break
+ if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
+ with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
+ for line in f:
+ if ': ' in line:
+ splitline = line.split(': ', 1)
+ key = splitline[0]
+ value = splitline[1].strip()
+ key = key.replace(":" + pkg, "")
+ if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']:
+ if keyword_rc.search(value):
+ match = True
+ break
+ if match:
+ print_match(fn)
+ matches.append(fn)
+ else:
+ logger.warning('Package data is not available, results may be limited')
+
+ for recipe in tinfoil.all_recipes():
+ if args.fixed_setup and 'nativesdk' in recipe.inherits():
continue
- packages = []
match = False
- if keyword_rc.search(fn):
+ if keyword_rc.search(recipe.pn):
match = True
-
- if not match:
- with open(pfn, 'r') as f:
- for line in f:
- if line.startswith('PACKAGES:'):
- packages = line.split(':', 1)[1].strip().split()
-
- for pkg in packages:
- if keyword_rc.search(pkg):
+ else:
+ for prov in recipe.provides:
+ if keyword_rc.search(prov):
match = True
break
- if os.path.exists(os.path.join(pkgdata_dir, 'runtime', pkg + '.packaged')):
- with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f:
- for line in f:
- if ': ' in line:
- splitline = line.split(':', 1)
- key = splitline[0]
- value = splitline[1].strip()
- if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'):
- if keyword_rc.search(value):
- match = True
- break
-
- if match:
- rd = parse_recipe(config, tinfoil, fn, True)
- summary = rd.getVar('SUMMARY')
- if summary == rd.expand(defsummary):
- summary = ''
- print("%s %s" % (fn.ljust(20), summary))
+ if not match:
+ for rprov in recipe.rprovides:
+ if keyword_rc.search(rprov):
+ match = True
+ break
+ if match and not recipe.pn in matches:
+ print_match(recipe.pn)
finally:
tinfoil.shutdown()
@@ -82,7 +103,7 @@ def search(args, config, basepath, workspace):
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
parser_search = subparsers.add_parser('search', help='Search available recipes',
- description='Searches for available target recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name on match.',
+ description='Searches for available recipes. Matches on recipe name, package name, description and installed files, and prints the recipe name and summary on match.',
group='info')
- parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed)')
- parser_search.set_defaults(func=search, no_workspace=True)
+ parser_search.add_argument('keyword', help='Keyword to search for (regular expression syntax allowed, use quotes to avoid shell expansion)')
+ parser_search.set_defaults(func=search, no_workspace=True, fixed_setup=context.fixed_setup)
diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py
index 26187a0c41..bd009f44b1 100644
--- a/scripts/lib/devtool/standard.py
+++ b/scripts/lib/devtool/standard.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool standard plugins"""
import os
@@ -66,7 +56,7 @@ def add(args, config, basepath, workspace):
args.srctree = args.recipename
args.recipename = None
elif os.path.isdir(args.recipename):
- logger.warn('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
+ logger.warning('Ambiguous argument "%s" - assuming you mean it to be the recipe name' % args.recipename)
if not args.fetchuri:
if args.srcrev:
@@ -82,7 +72,7 @@ def add(args, config, basepath, workspace):
if args.fetchuri:
raise DevtoolError('URI specified as positional argument as well as -f/--fetch')
else:
- logger.warn('-f/--fetch option is deprecated - you can now simply specify the URL to fetch as a positional argument instead')
+ logger.warning('-f/--fetch option is deprecated - you can now simply specify the URL to fetch as a positional argument instead')
args.fetchuri = args.fetch
if args.recipename:
@@ -155,8 +145,10 @@ def add(args, config, basepath, workspace):
extracmdopts += ' --src-subdir "%s"' % args.src_subdir
if args.autorev:
extracmdopts += ' -a'
- if args.fetch_dev:
- extracmdopts += ' --fetch-dev'
+ if args.npm_dev:
+ extracmdopts += ' --npm-dev'
+ if args.no_pypi:
+ extracmdopts += ' --no-pypi'
if args.mirrors:
extracmdopts += ' --mirrors'
if args.srcrev:
@@ -217,7 +209,7 @@ def add(args, config, basepath, workspace):
raise DevtoolError('Command \'%s\' did not create any recipe file:\n%s' % (e.command, e.stdout))
attic_recipe = os.path.join(config.workspace_path, 'attic', recipename, os.path.basename(recipefile))
if os.path.exists(attic_recipe):
- logger.warn('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
+ logger.warning('A modified recipe from a previous invocation exists in %s - you may wish to move this over the top of the new recipe if you had changes in it that you want to continue with' % attic_recipe)
finally:
if tmpsrcdir and os.path.exists(tmpsrcdir):
shutil.rmtree(tmpsrcdir)
@@ -244,10 +236,14 @@ def add(args, config, basepath, workspace):
if args.fetchuri and not args.no_git:
setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data)
- initial_rev = None
+ initial_rev = {}
if os.path.exists(os.path.join(srctree, '.git')):
(stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
- initial_rev = stdout.rstrip()
+ initial_rev["."] = stdout.rstrip()
+ (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree)
+ for line in stdout.splitlines():
+ (rev, submodule) = line.split()
+ initial_rev[os.path.relpath(submodule, srctree)] = rev
if args.src_subdir:
srctree = os.path.join(srctree, args.src_subdir)
@@ -261,23 +257,20 @@ def add(args, config, basepath, workspace):
if b_is_s:
f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree)
if initial_rev:
- f.write('\n# initial_rev: %s\n' % initial_rev)
+ for key, value in initial_rev.items():
+ f.write('\n# initial_rev %s: %s\n' % (key, value))
if args.binary:
- f.write('do_install_append() {\n')
+ f.write('do_install:append() {\n')
f.write(' rm -rf ${D}/.git\n')
f.write(' rm -f ${D}/singletask.lock\n')
f.write('}\n')
if bb.data.inherits_class('npm', rd):
- f.write('do_install_append() {\n')
- f.write(' # Remove files added to source dir by devtool/externalsrc\n')
- f.write(' rm -f ${NPM_INSTALLDIR}/singletask.lock\n')
- f.write(' rm -rf ${NPM_INSTALLDIR}/.git\n')
- f.write(' rm -rf ${NPM_INSTALLDIR}/oe-local-files\n')
- f.write(' for symlink in ${EXTERNALSRC_SYMLINKS} ; do\n')
- f.write(' rm -f ${NPM_INSTALLDIR}/${symlink%%:*}\n')
- f.write(' done\n')
+ f.write('python do_configure:append() {\n')
+ f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n')
+ f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n')
+ f.write(' bb.utils.remove(lockfile)\n')
f.write('}\n')
# Check if the new layer provides recipes whose priorities have been
@@ -295,7 +288,7 @@ def add(args, config, basepath, workspace):
with open(layerconf_file, 'a') as f:
f.write('%s = "%s"\n' % (preferred_provider, recipe_name))
else:
- logger.warn('Set \'%s\' in order to use the recipe' % preferred_provider)
+ logger.warning('Set \'%s\' in order to use the recipe' % preferred_provider)
break
_add_md5(config, recipename, appendfile)
@@ -332,10 +325,6 @@ def _check_compatible_recipe(pn, d):
raise DevtoolError("The %s recipe is a packagegroup, and therefore is "
"not supported by this tool" % pn, 4)
- if bb.data.inherits_class('meta', d):
- raise DevtoolError("The %s recipe is a meta-recipe, and therefore is "
- "not supported by this tool" % pn, 4)
-
if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'):
# Not an incompatibility error per se, so we don't pass the error code
raise DevtoolError("externalsrc is currently enabled for the %s "
@@ -371,7 +360,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None):
bb.utils.mkdirhier(dst_d)
shutil.move(src, dst)
-def _copy_file(src, dst, dry_run_outdir=None):
+def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None):
"""Copy a file. Creates all the directory components of destination path."""
dry_run_suffix = ' (dry-run)' if dry_run_outdir else ''
logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix))
@@ -471,11 +460,37 @@ def sync(args, config, basepath, workspace):
finally:
tinfoil.shutdown()
+def symlink_oelocal_files_srctree(rd, srctree):
+ import oe.patch
+ if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')):
+ # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
+ # (otherwise the recipe won't build as expected)
+ local_files_dir = os.path.join(srctree, 'oe-local-files')
+ addfiles = []
+ for root, _, files in os.walk(local_files_dir):
+ relpth = os.path.relpath(root, local_files_dir)
+ if relpth != '.':
+ bb.utils.mkdirhier(os.path.join(srctree, relpth))
+ for fn in files:
+ if fn == '.gitignore':
+ continue
+ destpth = os.path.join(srctree, relpth, fn)
+ if os.path.exists(destpth):
+ os.unlink(destpth)
+ if relpth != '.':
+ back_relpth = os.path.relpath(local_files_dir, root)
+ os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth)
+ else:
+ os.symlink('oe-local-files/%s' % fn, destpth)
+ addfiles.append(os.path.join(relpth, fn))
+ if addfiles:
+ oe.patch.GitApplyTree.commitIgnored("Add local file symlinks", dir=srctree, files=addfiles, d=rd)
def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False):
"""Extract sources of a recipe"""
import oe.recipeutils
import oe.patch
+ import oe.path
pn = d.getVar('PN')
@@ -507,14 +522,20 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
history = d.varhistory.variable('SRC_URI')
for event in history:
if not 'flag' in event:
- if event['op'].startswith(('_append[', '_prepend[')):
- extra_overrides.append(event['op'].split('[')[1].split(']')[0])
+ if event['op'].startswith((':append[', ':prepend[')):
+ override = event['op'].split('[')[1].split(']')[0]
+ if not override.startswith('pn-'):
+ extra_overrides.append(override)
+ # We want to remove duplicate overrides. If a recipe had multiple
+ # SRC_URI_override += values it would cause mulitple instances of
+ # overrides. This doesn't play nicely with things like creating a
+ # branch for every instance of DEVTOOL_EXTRA_OVERRIDES.
+ extra_overrides = list(set(extra_overrides))
if extra_overrides:
logger.info('SRC_URI contains some conditional appends/prepends - will create branches to represent these')
initial_rev = None
- appendexisted = False
recipefile = d.getVar('FILE')
appendfile = recipe_to_append(recipefile, config)
is_kernel_yocto = bb.data.inherits_class('kernel-yocto', d)
@@ -549,6 +570,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
logger.debug('writing append file %s' % appendfile)
with open(appendfile, 'a') as f:
f.write('###--- _extract_source\n')
+ f.write('deltask do_recipe_qa\n')
+ f.write('deltask do_recipe_qa_setscene\n')
+ f.write('ERROR_QA:remove = "patch-fuzz"\n')
f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir)
f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch)
if not is_kernel_yocto:
@@ -566,13 +590,24 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps')
with open(preservestampfile, 'w') as f:
f.write(d.getVar('STAMP'))
+ tinfoil.modified_files()
try:
- if bb.data.inherits_class('kernel-yocto', d):
+ if is_kernel_yocto:
# We need to generate the kernel config
task = 'do_configure'
else:
task = 'do_patch'
+ if 'noexec' in (d.getVarFlags(task, False) or []) or 'task' not in (d.getVarFlags(task, False) or []):
+ logger.info('The %s recipe has %s disabled. Running only '
+ 'do_configure task dependencies' % (pn, task))
+
+ if 'depends' in d.getVarFlags('do_configure', False):
+ pn = d.getVarFlags('do_configure', False)['depends']
+ pn = pn.replace('${PV}', d.getVar('PV'))
+ pn = pn.replace('${COMPILERDEP}', d.getVar('COMPILERDEP'))
+ task = None
+
# Run the fetch + unpack tasks
res = tinfoil.build_targets(pn,
task,
@@ -584,6 +619,17 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
if not res:
raise DevtoolError('Extracting source for %s failed' % pn)
+ if not is_kernel_yocto and ('noexec' in (d.getVarFlags('do_patch', False) or []) or 'task' not in (d.getVarFlags('do_patch', False) or [])):
+ workshareddir = d.getVar('S')
+ if os.path.islink(srctree):
+ os.unlink(srctree)
+
+ os.symlink(workshareddir, srctree)
+
+ # The initial_rev file is created in devtool_post_unpack function that will not be executed if
+ # do_unpack/do_patch tasks are disabled so we have to directly say that source extraction was successful
+ return True, True
+
try:
with open(os.path.join(tempdir, 'initial_rev'), 'r') as f:
initial_rev = f.read()
@@ -594,19 +640,47 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
raise DevtoolError('Something went wrong with source extraction - the devtool-source class was not active or did not function correctly:\n%s' % str(e))
srcsubdir_rel = os.path.relpath(srcsubdir, os.path.join(tempdir, 'workdir'))
+ # Check if work-shared is empty, if yes
+ # find source and copy to work-shared
+ if is_kernel_yocto:
+ workshareddir = d.getVar('STAGING_KERNEL_DIR')
+ staging_kerVer = get_staging_kver(workshareddir)
+ kernelVersion = d.getVar('LINUX_VERSION')
+
+ # handle dangling symbolic link in work-shared:
+ if os.path.islink(workshareddir):
+ os.unlink(workshareddir)
+
+ if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer):
+ shutil.rmtree(workshareddir)
+ oe.path.copyhardlinktree(srcsubdir, workshareddir)
+ elif not os.path.exists(workshareddir):
+ oe.path.copyhardlinktree(srcsubdir, workshareddir)
+
tempdir_localdir = os.path.join(tempdir, 'oe-local-files')
srctree_localdir = os.path.join(srctree, 'oe-local-files')
if sync:
- bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
-
- # Move oe-local-files directory to srctree
- # As the oe-local-files is not part of the constructed git tree,
- # remove them directly during the synchrounizating might surprise
- # the users. Instead, we move it to oe-local-files.bak and remind
- # user in the log message.
+ try:
+ logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch))
+ bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree)
+
+ # Use git fetch to update the source with the current recipe
+ # To be able to update the currently checked out branch with
+ # possibly new history (no fast-forward) git needs to be told
+ # that's ok
+ logger.info('Syncing source files including patches to git branch: %s' % devbranch)
+ bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree)
+ except bb.process.ExecutionError as e:
+ raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e))
+
+ # Move the oe-local-files directory to srctree.
+ # As oe-local-files is not part of the constructed git tree,
+ # removing it directly during the synchronization might surprise
+ # the user. Instead, we move it to oe-local-files.bak and remind
+ # the user in the log message.
if os.path.exists(srctree_localdir + '.bak'):
- shutil.rmtree(srctree_localdir, srctree_localdir + '.bak')
+ shutil.rmtree(srctree_localdir + '.bak')
if os.path.exists(srctree_localdir):
logger.info('Backing up current local file directory %s' % srctree_localdir)
@@ -622,29 +696,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works
shutil.move(tempdir_localdir, srcsubdir)
shutil.move(srcsubdir, srctree)
-
- if os.path.abspath(d.getVar('S')) == os.path.abspath(d.getVar('WORKDIR')):
- # If recipe extracts to ${WORKDIR}, symlink the files into the srctree
- # (otherwise the recipe won't build as expected)
- local_files_dir = os.path.join(srctree, 'oe-local-files')
- addfiles = []
- for root, _, files in os.walk(local_files_dir):
- relpth = os.path.relpath(root, local_files_dir)
- if relpth != '.':
- bb.utils.mkdirhier(os.path.join(srctree, relpth))
- for fn in files:
- if fn == '.gitignore':
- continue
- destpth = os.path.join(srctree, relpth, fn)
- if os.path.exists(destpth):
- os.unlink(destpth)
- os.symlink('oe-local-files/%s' % fn, destpth)
- addfiles.append(os.path.join(relpth, fn))
- if addfiles:
- bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree)
- useroptions = []
- oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=d)
- bb.process.run('git %s commit -a -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree)
+ symlink_oelocal_files_srctree(d, srctree)
if is_kernel_yocto:
logger.info('Copying kernel config to srctree')
@@ -704,19 +756,49 @@ def _check_preserve(config, recipename):
if splitline[2] != md5:
bb.utils.mkdirhier(preservepath)
preservefile = os.path.basename(removefile)
- logger.warn('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
+ logger.warning('File %s modified since it was written, preserving in %s' % (preservefile, preservepath))
shutil.move(removefile, os.path.join(preservepath, preservefile))
else:
os.remove(removefile)
else:
tf.write(line)
- os.rename(newfile, origfile)
+ bb.utils.rename(newfile, origfile)
+
+def get_staging_kver(srcdir):
+ # Kernel version from work-shared
+ kerver = []
+ staging_kerVer=""
+ if os.path.exists(srcdir) and os.listdir(srcdir):
+ with open(os.path.join(srcdir, "Makefile")) as f:
+ version = [next(f) for x in range(5)][1:4]
+ for word in version:
+ kerver.append(word.split('= ')[1].split('\n')[0])
+ staging_kerVer = ".".join(kerver)
+ return staging_kerVer
+
+def get_staging_kbranch(srcdir):
+ staging_kbranch = ""
+ if os.path.exists(srcdir) and os.listdir(srcdir):
+ (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir)
+ staging_kbranch = "".join(branch.split('\n')[0])
+ return staging_kbranch
+
+def get_real_srctree(srctree, s, workdir):
+ # Check that recipe isn't using a shared workdir
+ s = os.path.abspath(s)
+ workdir = os.path.abspath(workdir)
+ if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
+ # Handle if S is set to a subdirectory of the source
+ srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
+ srctree = os.path.join(srctree, srcsubdir)
+ return srctree
def modify(args, config, basepath, workspace):
"""Entry point for the devtool 'modify' subcommand"""
import bb
import oe.recipeutils
import oe.patch
+ import oe.path
if args.recipename in workspace:
raise DevtoolError("recipe %s is already in your workspace" %
@@ -755,23 +837,89 @@ def modify(args, config, basepath, workspace):
_check_compatible_recipe(pn, rd)
- initial_rev = None
- commits = []
+ initial_revs = {}
+ commits = {}
check_commits = False
+
+ if bb.data.inherits_class('kernel-yocto', rd):
+ # Current set kernel version
+ kernelVersion = rd.getVar('LINUX_VERSION')
+ srcdir = rd.getVar('STAGING_KERNEL_DIR')
+ kbranch = rd.getVar('KBRANCH')
+
+ staging_kerVer = get_staging_kver(srcdir)
+ staging_kbranch = get_staging_kbranch(srcdir)
+ if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch):
+ oe.path.copyhardlinktree(srcdir, srctree)
+ workdir = rd.getVar('WORKDIR')
+ srcsubdir = rd.getVar('S')
+ localfilesdir = os.path.join(srctree, 'oe-local-files')
+ # Move local source files into separate subdir
+ recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)]
+ local_files = oe.recipeutils.get_recipe_local_files(rd)
+
+ for key in local_files.copy():
+ if key.endswith('scc'):
+ sccfile = open(local_files[key], 'r')
+ for l in sccfile:
+ line = l.split()
+ if line and line[0] in ('kconf', 'patch'):
+ cfg = os.path.join(os.path.dirname(local_files[key]), line[-1])
+ if not cfg in local_files.values():
+ local_files[line[-1]] = cfg
+ shutil.copy2(cfg, workdir)
+ sccfile.close()
+
+ # Ignore local files with subdir={BP}
+ srcabspath = os.path.abspath(srcsubdir)
+ local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))]
+ if local_files:
+ for fname in local_files:
+ _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname))
+ with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f:
+ f.write('# Ignore local files, by default. Remove this file if you want to commit the directory to Git\n*\n')
+
+ symlink_oelocal_files_srctree(rd, srctree)
+
+ task = 'do_configure'
+ res = tinfoil.build_targets(pn, task, handle_events=True)
+
+ # Copy .config to workspace
+ kconfpath = rd.getVar('B')
+ logger.info('Copying kernel config to workspace')
+ shutil.copy2(os.path.join(kconfpath, '.config'), srctree)
+
+ # Set this to true, we still need to get initial_rev
+ # by parsing the git repo
+ args.no_extract = True
+
if not args.no_extract:
- initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
- if not initial_rev:
+ initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
+ if not initial_revs["."]:
return 1
logger.info('Source tree extracted to %s' % srctree)
- # Get list of commits since this revision
- (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree)
- commits = stdout.split()
- check_commits = True
+
+ if os.path.exists(os.path.join(srctree, '.git')):
+ # Get list of commits since this revision
+ (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree)
+ commits["."] = stdout.split()
+ check_commits = True
+ (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree)
+ for line in stdout.splitlines():
+ (rev, submodule_path) = line.split()
+ submodule = os.path.relpath(submodule_path, srctree)
+ initial_revs[submodule] = rev
+ (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path)
+ commits[submodule] = stdout.split()
else:
if os.path.exists(os.path.join(srctree, '.git')):
- # Check if it's a tree previously extracted by us
+ # Check if it's a tree previously extracted by us. This is done
+ # by ensuring that devtool-base and args.branch (devtool) exist.
+ # The check_commits logic will cause an exception if either one
+ # of these doesn't exist
try:
(stdout, _) = bb.process.run('git branch --contains devtool-base', cwd=srctree)
+ bb.process.run('git rev-parse %s' % args.branch, cwd=srctree)
except bb.process.ExecutionError:
stdout = ''
if stdout:
@@ -779,11 +927,11 @@ def modify(args, config, basepath, workspace):
for line in stdout.splitlines():
if line.startswith('*'):
(stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree)
- initial_rev = stdout.rstrip()
- if not initial_rev:
+ initial_revs["."] = stdout.rstrip()
+ if "." not in initial_revs:
# Otherwise, just grab the head revision
(stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree)
- initial_rev = stdout.rstrip()
+ initial_revs["."] = stdout.rstrip()
branch_patches = {}
if check_commits:
@@ -795,62 +943,86 @@ def modify(args, config, basepath, workspace):
if branchname.startswith(override_branch_prefix):
branches.append(branchname)
if branches:
- logger.warn('SRC_URI is conditionally overridden in this recipe, thus several %s* branches have been created, one for each override that makes changes to SRC_URI. It is recommended that you make changes to the %s branch first, then checkout and rebase each %s* branch and update any unique patches there (duplicates on those branches will be ignored by devtool finish/update-recipe)' % (override_branch_prefix, args.branch, override_branch_prefix))
+ logger.warning('SRC_URI is conditionally overridden in this recipe, thus several %s* branches have been created, one for each override that makes changes to SRC_URI. It is recommended that you make changes to the %s branch first, then checkout and rebase each %s* branch and update any unique patches there (duplicates on those branches will be ignored by devtool finish/update-recipe)' % (override_branch_prefix, args.branch, override_branch_prefix))
branches.insert(0, args.branch)
seen_patches = []
for branch in branches:
branch_patches[branch] = []
- (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree)
- for line in stdout.splitlines():
- line = line.strip()
- if line.startswith(oe.patch.GitApplyTree.patch_line_prefix):
- origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip()
- if not origpatch in seen_patches:
- seen_patches.append(origpatch)
- branch_patches[branch].append(origpatch)
+ (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree)
+ for sha1 in stdout.splitlines():
+ notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip())
+ origpatch = notes.get(oe.patch.GitApplyTree.original_patch)
+ if origpatch and origpatch not in seen_patches:
+ seen_patches.append(origpatch)
+ branch_patches[branch].append(origpatch)
# Need to grab this here in case the source is within a subdirectory
srctreebase = srctree
-
- # Check that recipe isn't using a shared workdir
- s = os.path.abspath(rd.getVar('S'))
- workdir = os.path.abspath(rd.getVar('WORKDIR'))
- if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir:
- # Handle if S is set to a subdirectory of the source
- srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1]
- srctree = os.path.join(srctree, srcsubdir)
+ srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
bb.utils.mkdirhier(os.path.dirname(appendfile))
with open(appendfile, 'w') as f:
- f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n')
+ # if not present, add type=git-dependency to the secondary sources
+ # (non local files) so they can be extracted correctly when building a recipe after
+ # doing a devtool modify on it
+ src_uri = rd.getVar('SRC_URI').split()
+ src_uri_append = []
+ src_uri_remove = []
+
+ # Assume first entry is main source extracted in ${S} so skip it
+ src_uri = src_uri[1::]
+
+ # Add "type=git-dependency" to all non local sources
+ for url in src_uri:
+ if not url.startswith('file://') and not 'type=' in url:
+ src_uri_remove.append(url)
+ src_uri_append.append('%s;type=git-dependency' % url)
+
+ if src_uri_remove:
+ f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove))
+ f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append))
+
+ f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n')
# Local files can be modified/tracked in separate subdir under srctree
# Mostly useful for packages with S != WORKDIR
- f.write('FILESPATH_prepend := "%s:"\n' %
+ f.write('FILESPATH:prepend := "%s:"\n' %
os.path.join(srctreebase, 'oe-local-files'))
f.write('# srctreebase: %s\n' % srctreebase)
f.write('\ninherit externalsrc\n')
f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n')
- f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree))
+ f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd)
if b_is_s:
- f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
+ f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
if bb.data.inherits_class('kernel', rd):
f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout '
- 'do_fetch do_unpack do_kernel_configme do_kernel_configcheck"\n')
- f.write('\ndo_patch() {\n'
- ' :\n'
- '}\n')
- f.write('\ndo_configure_append() {\n'
+ 'do_fetch do_unpack do_kernel_configcheck"\n')
+ f.write('\ndo_patch[noexec] = "1"\n')
+ f.write('\ndo_configure:append() {\n'
' cp ${B}/.config ${S}/.config.baseline\n'
' ln -sfT ${B}/.config ${S}/.config.new\n'
'}\n')
- if initial_rev:
- f.write('\n# initial_rev: %s\n' % initial_rev)
- for commit in commits:
- f.write('# commit: %s\n' % commit)
+ f.write('\ndo_kernel_configme:prepend() {\n'
+ ' if [ -e ${S}/.config ]; then\n'
+ ' mv ${S}/.config ${S}/.config.old\n'
+ ' fi\n'
+ '}\n')
+ if rd.getVarFlag('do_menuconfig', 'task'):
+ f.write('\ndo_configure:append() {\n'
+ ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n'
+ ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n'
+ ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n'
+ ' fi\n'
+ '}\n')
+ if initial_revs:
+ for name, rev in initial_revs.items():
+ f.write('\n# initial_rev %s: %s\n' % (name, rev))
+ if name in commits:
+ for commit in commits[name]:
+ f.write('# commit %s: %s\n' % (name, commit))
if branch_patches:
for branch in branch_patches:
if branch == args.branch:
@@ -970,10 +1142,10 @@ def rename(args, config, basepath, workspace):
# Rename bbappend
logger.info('Renaming %s to %s' % (append, newappend))
- os.rename(append, newappend)
+ bb.utils.rename(append, newappend)
# Rename recipe file
logger.info('Renaming %s to %s' % (recipefile, newfile))
- os.rename(recipefile, newfile)
+ bb.utils.rename(recipefile, newfile)
# Rename source tree if it's the default path
appendmd5 = None
@@ -1061,7 +1233,7 @@ def rename(args, config, basepath, workspace):
return 0
-def _get_patchset_revs(srctree, recipe_path, initial_rev=None):
+def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refresh=False):
"""Get initial and update rev of a recipe. These are the start point of the
whole patchset and start point for the patches to be re-generated/updated.
"""
@@ -1073,44 +1245,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None):
branchname = stdout.rstrip()
# Parse initial rev from recipe if not specified
- commits = []
+ commits = {}
patches = []
+ initial_revs = {}
with open(recipe_path, 'r') as f:
for line in f:
- if line.startswith('# initial_rev:'):
- if not initial_rev:
- initial_rev = line.split(':')[-1].strip()
- elif line.startswith('# commit:'):
- commits.append(line.split(':')[-1].strip())
- elif line.startswith('# patches_%s:' % branchname):
- patches = line.split(':')[-1].strip().split(',')
-
- update_rev = initial_rev
- changed_revs = None
- if initial_rev:
+ pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$'
+ match = re.search(pattern, line)
+ if match:
+ name = match.group(1)
+ rev = match.group(2)
+ if line.startswith('# initial_rev'):
+ if not (name == "." and initial_rev):
+ initial_revs[name] = rev
+ elif line.startswith('# commit') and not force_patch_refresh:
+ if name not in commits:
+ commits[name] = [rev]
+ else:
+ commits[name].append(rev)
+ elif line.startswith('# patches_%s:' % branchname):
+ patches = line.split(':')[-1].strip().split(',')
+
+ update_revs = dict(initial_revs)
+ changed_revs = {}
+ for name, rev in initial_revs.items():
# Find first actually changed revision
stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' %
- initial_rev, cwd=srctree)
+ rev, cwd=os.path.join(srctree, name))
newcommits = stdout.split()
- for i in range(min(len(commits), len(newcommits))):
- if newcommits[i] == commits[i]:
- update_rev = commits[i]
+ if name in commits:
+ for i in range(min(len(commits[name]), len(newcommits))):
+ if newcommits[i] == commits[name][i]:
+ update_revs[name] = commits[name][i]
try:
stdout, _ = bb.process.run('git cherry devtool-patched',
- cwd=srctree)
+ cwd=os.path.join(srctree, name))
except bb.process.ExecutionError as err:
stdout = None
- if stdout is not None:
- changed_revs = []
+ if stdout is not None and not force_patch_refresh:
for line in stdout.splitlines():
if line.startswith('+ '):
rev = line.split()[1]
if rev in newcommits:
- changed_revs.append(rev)
+ if name not in changed_revs:
+ changed_revs[name] = [rev]
+ else:
+ changed_revs[name].append(rev)
- return initial_rev, update_rev, changed_revs, patches
+ return initial_revs, update_revs, changed_revs, patches
def _remove_file_entries(srcuri, filelist):
"""Remove file:// entries from SRC_URI"""
@@ -1165,14 +1349,17 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru
raise
-def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None):
+def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None):
"""Export patches from srctree to given location.
Returns three-tuple of dicts:
1. updated - patches that already exist in SRCURI
2. added - new patches that don't exist in SRCURI
3 removed - patches that exist in SRCURI but not in exported patches
- In each dict the key is the 'basepath' of the URI and value is the
- absolute path to the existing file in recipe space (if any).
+ In each dict the key is the 'basepath' of the URI and value is:
+ - for updated and added dicts, a dict with 2 optionnal keys:
+ - 'path': the absolute path to the existing file in recipe space (if any)
+ - 'patchdir': the directory in wich the patch should be applied (if any)
+ - for removed dict, the absolute path to the existing file in recipe space
"""
import oe.recipeutils
from oe.patch import GitApplyTree
@@ -1186,54 +1373,60 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None):
# Generate patches from Git, exclude local files directory
patch_pathspec = _git_exclude_path(srctree, 'oe-local-files')
- GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec)
-
- new_patches = sorted(os.listdir(destdir))
- for new_patch in new_patches:
- # Strip numbering from patch names. If it's a git sequence named patch,
- # the numbers might not match up since we are starting from a different
- # revision This does assume that people are using unique shortlog
- # values, but they ought to be anyway...
- new_basename = seqpatch_re.match(new_patch).group(2)
- match_name = None
- for old_patch in existing_patches:
- old_basename = seqpatch_re.match(old_patch).group(2)
- old_basename_splitext = os.path.splitext(old_basename)
- if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
- old_patch_noext = os.path.splitext(old_patch)[0]
- match_name = old_patch_noext
- break
- elif new_basename == old_basename:
- match_name = old_patch
- break
- if match_name:
- # Rename patch files
- if new_patch != match_name:
- os.rename(os.path.join(destdir, new_patch),
- os.path.join(destdir, match_name))
- # Need to pop it off the list now before checking changed_revs
- oldpath = existing_patches.pop(old_patch)
- if changed_revs is not None:
- # Avoid updating patches that have not actually changed
- with open(os.path.join(destdir, match_name), 'r') as f:
- firstlineitems = f.readline().split()
- # Looking for "From <hash>" line
- if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
- if not firstlineitems[1] in changed_revs:
- continue
- # Recompress if necessary
- if oldpath.endswith(('.gz', '.Z')):
- bb.process.run(['gzip', match_name], cwd=destdir)
- if oldpath.endswith('.gz'):
- match_name += '.gz'
- else:
- match_name += '.Z'
- elif oldpath.endswith('.bz2'):
- bb.process.run(['bzip2', match_name], cwd=destdir)
- match_name += '.bz2'
- updated[match_name] = oldpath
- else:
- added[new_patch] = None
+ GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec)
+ for dirpath, dirnames, filenames in os.walk(destdir):
+ new_patches = filenames
+ reldirpath = os.path.relpath(dirpath, destdir)
+ for new_patch in new_patches:
+ # Strip numbering from patch names. If it's a git sequence named patch,
+ # the numbers might not match up since we are starting from a different
+ # revision This does assume that people are using unique shortlog
+ # values, but they ought to be anyway...
+ new_basename = seqpatch_re.match(new_patch).group(2)
+ match_name = None
+ for old_patch in existing_patches:
+ old_basename = seqpatch_re.match(old_patch).group(2)
+ old_basename_splitext = os.path.splitext(old_basename)
+ if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename:
+ old_patch_noext = os.path.splitext(old_patch)[0]
+ match_name = old_patch_noext
+ break
+ elif new_basename == old_basename:
+ match_name = old_patch
+ break
+ if match_name:
+ # Rename patch files
+ if new_patch != match_name:
+ bb.utils.rename(os.path.join(destdir, new_patch),
+ os.path.join(destdir, match_name))
+ # Need to pop it off the list now before checking changed_revs
+ oldpath = existing_patches.pop(old_patch)
+ if changed_revs is not None and dirpath in changed_revs:
+ # Avoid updating patches that have not actually changed
+ with open(os.path.join(dirpath, match_name), 'r') as f:
+ firstlineitems = f.readline().split()
+ # Looking for "From <hash>" line
+ if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40:
+ if not firstlineitems[1] in changed_revs[dirpath]:
+ continue
+ # Recompress if necessary
+ if oldpath.endswith(('.gz', '.Z')):
+ bb.process.run(['gzip', match_name], cwd=destdir)
+ if oldpath.endswith('.gz'):
+ match_name += '.gz'
+ else:
+ match_name += '.Z'
+ elif oldpath.endswith('.bz2'):
+ bb.process.run(['bzip2', match_name], cwd=destdir)
+ match_name += '.bz2'
+ updated[match_name] = {'path' : oldpath}
+ if reldirpath != ".":
+ updated[match_name]['patchdir'] = reldirpath
+ else:
+ added[new_patch] = {}
+ if reldirpath != ".":
+ added[new_patch]['patchdir'] = reldirpath
+
return (updated, added, existing_patches)
@@ -1270,8 +1463,10 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
1. updated - files that already exist in SRCURI
2. added - new files files that don't exist in SRCURI
3 removed - files that exist in SRCURI but not in exported files
- In each dict the key is the 'basepath' of the URI and value is the
- absolute path to the existing file in recipe space (if any).
+ In each dict the key is the 'basepath' of the URI and value is:
+ - for updated and added dicts, a dict with 1 optionnal key:
+ - 'path': the absolute path to the existing file in recipe space (if any)
+ - for removed dict, the absolute path to the existing file in recipe space
"""
import oe.recipeutils
@@ -1284,6 +1479,18 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
updated = OrderedDict()
added = OrderedDict()
removed = OrderedDict()
+
+ # Get current branch and return early with empty lists
+ # if on one of the override branches
+ # (local files are provided only for the main branch and processing
+ # them against lists from recipe overrides will result in mismatches
+ # and broken modifications to recipes).
+ stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD',
+ cwd=srctree)
+ branchname = stdout.rstrip()
+ if branchname.startswith(override_branch_prefix):
+ return (updated, added, removed)
+
local_files_dir = os.path.join(srctreebase, 'oe-local-files')
git_files = _git_ls_tree(srctree)
if 'oe-local-files' in git_files:
@@ -1321,15 +1528,29 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
if os.path.exists(os.path.join(local_files_dir, fragment_fn)):
os.unlink(os.path.join(local_files_dir, fragment_fn))
+ # Special handling for cml1, ccmake, etc bbclasses that generated
+ # configuration fragment files that are consumed as source files
+ for frag_class, frag_name in [("cml1", "fragment.cfg"), ("ccmake", "site-file.cmake")]:
+ if bb.data.inherits_class(frag_class, rd):
+ srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name)
+ if os.path.exists(srcpath):
+ if frag_name not in new_set:
+ new_set.append(frag_name)
+ # copy fragment into destdir
+ shutil.copy2(srcpath, destdir)
+ # copy fragment into local files if exists
+ if os.path.isdir(local_files_dir):
+ shutil.copy2(srcpath, local_files_dir)
+
if new_set is not None:
for fname in new_set:
if fname in existing_files:
origpath = existing_files.pop(fname)
workpath = os.path.join(local_files_dir, fname)
if not filecmp.cmp(origpath, workpath):
- updated[fname] = origpath
+ updated[fname] = {'path' : origpath}
elif fname != '.gitignore':
- added[fname] = None
+ added[fname] = {}
workdir = rd.getVar('WORKDIR')
s = rd.getVar('S')
@@ -1346,7 +1567,7 @@ def _export_local_files(srctree, rd, destdir, srctreebase):
if os.path.exists(fpath):
origpath = existing_files.pop(fname)
if not filecmp.cmp(origpath, fpath):
- updated[fpath] = origpath
+ updated[fpath] = {'path' : origpath}
removed = existing_files
return (updated, added, removed)
@@ -1375,6 +1596,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
recipedir = os.path.basename(recipefile)
logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix))
+ # Get original SRCREV
+ old_srcrev = rd.getVar('SRCREV') or ''
+ if old_srcrev == "INVALID":
+ raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository')
+ old_srcrev = {'.': old_srcrev}
+
# Get HEAD revision
try:
stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree)
@@ -1401,13 +1628,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
if not no_remove:
# Find list of existing patches in recipe file
patches_dir = tempfile.mkdtemp(dir=tempdir)
- old_srcrev = rd.getVar('SRCREV') or ''
upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev,
patches_dir)
logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p)))
# Remove deleted local files and "overlapping" patches
- remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values())
+ remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value]
if remove_files:
removedentries = _remove_file_entries(srcuri, remove_files)[0]
update_srcuri = True
@@ -1421,14 +1647,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
patchfields['SRC_URI'] = '\\\n '.join(srcuri)
if dry_run_outdir:
logger.info('Creating bbappend (dry-run)')
- else:
- appendfile, destpath = oe.recipeutils.bbappend_recipe(
- rd, appendlayerdir, files, wildcardver=wildcard_version,
- extralines=patchfields, removevalues=removevalues,
- redirect_output=dry_run_outdir)
+ appendfile, destpath = oe.recipeutils.bbappend_recipe(
+ rd, appendlayerdir, files, wildcardver=wildcard_version,
+ extralines=patchfields, removevalues=removevalues,
+ redirect_output=dry_run_outdir)
else:
files_dir = _determine_files_dir(rd)
- for basepath, path in upd_f.items():
+ for basepath, param in upd_f.items():
+ path = param['path']
logger.info('Updating file %s%s' % (basepath, dry_run_suffix))
if os.path.isabs(basepath):
# Original file (probably with subdir pointing inside source tree)
@@ -1438,7 +1664,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
_move_file(os.path.join(local_files_dir, basepath), path,
dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
update_srcuri= True
- for basepath, path in new_f.items():
+ for basepath, param in new_f.items():
+ path = param['path']
logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
_move_file(os.path.join(local_files_dir, basepath),
os.path.join(files_dir, basepath),
@@ -1459,7 +1686,7 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi
_remove_source_files(appendlayerdir, remove_files, destpath, no_report_remove, dry_run=dry_run_outdir)
return True, appendfile, remove_files
-def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None):
+def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir=None, force_patch_refresh=False):
"""Implement the 'patch' mode of update-recipe"""
import bb
import oe.recipeutils
@@ -1470,9 +1697,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
if not os.path.exists(append):
raise DevtoolError('unable to find workspace bbappend for recipe %s' %
recipename)
+ srctreebase = workspace[recipename]['srctreebase']
+ relpatchdir = os.path.relpath(srctreebase, srctree)
+ if relpatchdir == '.':
+ patchdir_params = {}
+ else:
+ patchdir_params = {'patchdir': relpatchdir}
- initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev)
- if not initial_rev:
+ def srcuri_entry(basepath, patchdir_params):
+ if patchdir_params:
+ paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items())
+ else:
+ paramstr = ''
+ return 'file://%s%s' % (basepath, paramstr)
+
+ initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh)
+ if not initial_revs:
raise DevtoolError('Unable to find initial revision - please specify '
'it with --initial-rev')
@@ -1486,61 +1726,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
tempdir = tempfile.mkdtemp(prefix='devtool')
try:
local_files_dir = tempfile.mkdtemp(dir=tempdir)
- if filter_patches:
- upd_f = {}
- new_f = {}
- del_f = {}
- else:
- srctreebase = workspace[recipename]['srctreebase']
- upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
-
- remove_files = []
- if not no_remove:
- # Get all patches from source tree and check if any should be removed
- all_patches_dir = tempfile.mkdtemp(dir=tempdir)
- _, _, del_p = _export_patches(srctree, rd, initial_rev,
- all_patches_dir)
- # Remove deleted local files and patches
- remove_files = list(del_f.values()) + list(del_p.values())
+ upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase)
# Get updated patches from source tree
patches_dir = tempfile.mkdtemp(dir=tempdir)
- upd_p, new_p, _ = _export_patches(srctree, rd, update_rev,
+ upd_p, new_p, _ = _export_patches(srctree, rd, update_revs,
patches_dir, changed_revs)
+ # Get all patches from source tree and check if any should be removed
+ all_patches_dir = tempfile.mkdtemp(dir=tempdir)
+ _, _, del_p = _export_patches(srctree, rd, initial_revs,
+ all_patches_dir)
logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p)))
if filter_patches:
- new_p = {}
- upd_p = {k:v for k,v in upd_p.items() if k in filter_patches}
- remove_files = [f for f in remove_files if f in filter_patches]
+ new_p = OrderedDict()
+ upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches)
+ del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches)
+ remove_files = []
+ if not no_remove:
+ # Remove deleted local files and patches
+ remove_files = list(del_f.values()) + list(del_p.values())
updatefiles = False
updaterecipe = False
destpath = None
srcuri = (rd.getVar('SRC_URI', False) or '').split()
+
if appendlayerdir:
- files = dict((os.path.join(local_files_dir, key), val) for
+ files = OrderedDict((os.path.join(local_files_dir, key), val) for
key, val in list(upd_f.items()) + list(new_f.items()))
- files.update(dict((os.path.join(patches_dir, key), val) for
+ files.update(OrderedDict((os.path.join(patches_dir, key), val) for
key, val in list(upd_p.items()) + list(new_p.items())))
+
+ params = []
+ for file, param in files.items():
+ patchdir_param = dict(patchdir_params)
+ patchdir = param.get('patchdir', ".")
+ if patchdir != "." :
+ if patchdir_param:
+ patchdir_param['patchdir'] += patchdir
+ else:
+ patchdir_param['patchdir'] = patchdir
+ params.append(patchdir_param)
+
if files or remove_files:
removevalues = None
if remove_files:
removedentries, remaining = _remove_file_entries(
srcuri, remove_files)
if removedentries or remaining:
- remaining = ['file://' + os.path.basename(item) for
+ remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for
item in remaining]
removevalues = {'SRC_URI': removedentries + remaining}
appendfile, destpath = oe.recipeutils.bbappend_recipe(
rd, appendlayerdir, files,
wildcardver=wildcard_version,
removevalues=removevalues,
- redirect_output=dry_run_outdir)
+ redirect_output=dry_run_outdir,
+ params=params)
else:
logger.info('No patches or local source files needed updating')
else:
# Update existing files
files_dir = _determine_files_dir(rd)
- for basepath, path in upd_f.items():
+ for basepath, param in upd_f.items():
+ path = param['path']
logger.info('Updating file %s' % basepath)
if os.path.isabs(basepath):
# Original file (probably with subdir pointing inside source tree)
@@ -1551,14 +1799,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
_move_file(os.path.join(local_files_dir, basepath), path,
dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
updatefiles = True
- for basepath, path in upd_p.items():
- patchfn = os.path.join(patches_dir, basepath)
+ for basepath, param in upd_p.items():
+ path = param['path']
+ patchdir = param.get('patchdir', ".")
+ if patchdir != "." :
+ patchdir_param = dict(patchdir_params)
+ if patchdir_param:
+ patchdir_param['patchdir'] += patchdir
+ else:
+ patchdir_param['patchdir'] = patchdir
+ patchfn = os.path.join(patches_dir, patchdir, basepath)
if os.path.dirname(path) + '/' == dl_dir:
# This is a a downloaded patch file - we now need to
# replace the entry in SRC_URI with our local version
logger.info('Replacing remote patch %s with updated local version' % basepath)
path = os.path.join(files_dir, basepath)
- _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath)
+ _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param))
updaterecipe = True
else:
logger.info('Updating patch %s%s' % (basepath, dry_run_suffix))
@@ -1566,21 +1822,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
dry_run_outdir=dry_run_outdir, base_outdir=recipedir)
updatefiles = True
# Add any new files
- for basepath, path in new_f.items():
+ for basepath, param in new_f.items():
logger.info('Adding new file %s%s' % (basepath, dry_run_suffix))
_move_file(os.path.join(local_files_dir, basepath),
os.path.join(files_dir, basepath),
dry_run_outdir=dry_run_outdir,
base_outdir=recipedir)
- srcuri.append('file://%s' % basepath)
+ srcuri.append(srcuri_entry(basepath, patchdir_params))
updaterecipe = True
- for basepath, path in new_p.items():
+ for basepath, param in new_p.items():
+ patchdir = param.get('patchdir', ".")
logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix))
- _move_file(os.path.join(patches_dir, basepath),
+ _move_file(os.path.join(patches_dir, patchdir, basepath),
os.path.join(files_dir, basepath),
dry_run_outdir=dry_run_outdir,
base_outdir=recipedir)
- srcuri.append('file://%s' % basepath)
+ params = dict(patchdir_params)
+ if patchdir != "." :
+ if params:
+ params['patchdir'] += patchdir
+ else:
+ params['patchdir'] = patchdir
+
+ srcuri.append(srcuri_entry(basepath, params))
updaterecipe = True
# Update recipe, if needed
if _remove_file_entries(srcuri, remove_files)[0]:
@@ -1603,7 +1867,7 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil
def _guess_recipe_update_mode(srctree, rdata):
"""Guess the recipe update mode to use"""
- src_uri = (rdata.getVar('SRC_URI', False) or '').split()
+ src_uri = (rdata.getVar('SRC_URI') or '').split()
git_uris = [uri for uri in src_uri if uri.startswith('git://')]
if not git_uris:
return 'patch'
@@ -1623,7 +1887,7 @@ def _guess_recipe_update_mode(srctree, rdata):
return 'patch'
-def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False):
+def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_version, no_remove, initial_rev, no_report_remove=False, dry_run_outdir=None, no_overrides=False, force_patch_refresh=False):
srctree = workspace[recipename]['srctree']
if mode == 'auto':
mode = _guess_recipe_update_mode(srctree, rd)
@@ -1637,6 +1901,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver
for line in stdout.splitlines():
branchname = line[2:]
if line.startswith('* '):
+ if 'HEAD' in line:
+ raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"')
startbranch = branchname
if branchname.startswith(override_branch_prefix):
override_branches.append(branchname)
@@ -1677,7 +1943,7 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver
if mode == 'srcrev':
updated, appendf, removed = _update_recipe_srcrev(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, dry_run_outdir)
elif mode == 'patch':
- updated, appendf, removed = _update_recipe_patch(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir)
+ updated, appendf, removed = _update_recipe_patch(recipename, workspace, srctree, crd, appendlayerdir, wildcard_version, no_remove, no_report_remove, initial_rev, dry_run_outdir, force_patch_refresh)
else:
raise DevtoolError('update_recipe: invalid mode %s' % mode)
if updated:
@@ -1715,12 +1981,12 @@ def update_recipe(args, config, basepath, workspace):
if args.dry_run:
dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
dry_run_outdir = dry_run_output.name
- updated, _, _ = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides)
+ updated, _, _ = _update_recipe(args.recipename, workspace, rd, args.mode, args.append, args.wildcard_version, args.no_remove, args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
if updated:
rf = rd.getVar('FILE')
if rf.startswith(config.workspace_path):
- logger.warn('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
+ logger.warning('Recipe file %s has been updated but is inside the workspace - you will need to move it (and any associated files next to it) out to the desired layer before using "devtool reset" in order to keep any changes' % rf)
finally:
tinfoil.shutdown()
@@ -1742,8 +2008,9 @@ def status(args, config, basepath, workspace):
return 0
-def _reset(recipes, no_clean, config, basepath, workspace):
+def _reset(recipes, no_clean, remove_work, config, basepath, workspace):
"""Reset one or more recipes"""
+ import oe.path
def clean_preferred_provider(pn, layerconf_path):
"""Remove PREFERRED_PROVIDER from layer.conf'"""
@@ -1790,29 +2057,54 @@ def _reset(recipes, no_clean, config, basepath, workspace):
for pn in recipes:
_check_preserve(config, pn)
+ appendfile = workspace[pn]['bbappend']
+ if os.path.exists(appendfile):
+ # This shouldn't happen, but is possible if devtool errored out prior to
+ # writing the md5 file. We need to delete this here or the recipe won't
+ # actually be reset
+ os.remove(appendfile)
+
preservepath = os.path.join(config.workspace_path, 'attic', pn, pn)
def preservedir(origdir):
if os.path.exists(origdir):
for root, dirs, files in os.walk(origdir):
for fn in files:
- logger.warn('Preserving %s in %s' % (fn, preservepath))
+ logger.warning('Preserving %s in %s' % (fn, preservepath))
_move_file(os.path.join(origdir, fn),
os.path.join(preservepath, fn))
for dn in dirs:
preservedir(os.path.join(root, dn))
os.rmdir(origdir)
- preservedir(os.path.join(config.workspace_path, 'recipes', pn))
+ recipefile = workspace[pn]['recipefile']
+ if recipefile and oe.path.is_path_parent(config.workspace_path, recipefile):
+ # This should always be true if recipefile is set, but just in case
+ preservedir(os.path.dirname(recipefile))
# We don't automatically create this dir next to appends, but the user can
preservedir(os.path.join(config.workspace_path, 'appends', pn))
srctreebase = workspace[pn]['srctreebase']
if os.path.isdir(srctreebase):
if os.listdir(srctreebase):
- # We don't want to risk wiping out any work in progress
- logger.info('Leaving source tree %s as-is; if you no '
- 'longer need it then please delete it manually'
- % srctreebase)
+ if remove_work:
+ logger.info('-r argument used on %s, removing source tree.'
+ ' You will lose any unsaved work' %pn)
+ shutil.rmtree(srctreebase)
+ else:
+ # We don't want to risk wiping out any work in progress
+ if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')):
+ from datetime import datetime
+ preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S")))
+ logger.info('Preserving source tree in %s\nIf you no '
+ 'longer need it then please delete it manually.\n'
+ 'It is also possible to reuse it via devtool source tree argument.'
+ % preservesrc)
+ bb.utils.mkdirhier(os.path.dirname(preservesrc))
+ shutil.move(srctreebase, preservesrc)
+ else:
+ logger.info('Leaving source tree %s as-is; if you no '
+ 'longer need it then please delete it manually'
+ % srctreebase)
else:
# This is unlikely, but if it's empty we can just remove it
os.rmdir(srctreebase)
@@ -1822,6 +2114,10 @@ def _reset(recipes, no_clean, config, basepath, workspace):
def reset(args, config, basepath, workspace):
"""Entry point for the devtool 'reset' subcommand"""
import bb
+ import shutil
+
+ recipes = ""
+
if args.recipename:
if args.all:
raise DevtoolError("Recipe cannot be specified if -a/--all is used")
@@ -1836,7 +2132,7 @@ def reset(args, config, basepath, workspace):
else:
recipes = args.recipename
- _reset(recipes, args.no_clean, config, basepath, workspace)
+ _reset(recipes, args.no_clean, args.remove_work, config, basepath, workspace)
return 0
@@ -1844,15 +2140,27 @@ def reset(args, config, basepath, workspace):
def _get_layer(layername, d):
"""Determine the base layer path for the specified layer name/path"""
layerdirs = d.getVar('BBLAYERS').split()
- layers = {os.path.basename(p): p for p in layerdirs}
+ layers = {} # {basename: layer_paths}
+ for p in layerdirs:
+ bn = os.path.basename(p)
+ if bn not in layers:
+ layers[bn] = [p]
+ else:
+ layers[bn].append(p)
# Provide some shortcuts
if layername.lower() in ['oe-core', 'openembedded-core']:
- layerdir = layers.get('meta', None)
+ layername = 'meta'
+ layer_paths = layers.get(layername, None)
+ if not layer_paths:
+ return os.path.abspath(layername)
+ elif len(layer_paths) == 1:
+ return os.path.abspath(layer_paths[0])
else:
- layerdir = layers.get(layername, None)
- if layerdir:
- layerdir = os.path.abspath(layerdir)
- return layerdir or layername
+ # multiple layers having the same base name
+ logger.warning("Multiple layers have the same base name '%s', use the first one '%s'." % (layername, layer_paths[0]))
+ logger.warning("Consider using path instead of base name to specify layer:\n\t\t%s" % '\n\t\t'.join(layer_paths))
+ return os.path.abspath(layer_paths[0])
+
def finish(args, config, basepath, workspace):
"""Entry point for the devtool 'finish' subcommand"""
@@ -1875,7 +2183,8 @@ def finish(args, config, basepath, workspace):
else:
raise DevtoolError('Source tree is not clean:\n\n%s\nEnsure you have committed your changes or use -f/--force if you are sure there\'s nothing that needs to be committed' % dirty)
- no_clean = False
+ no_clean = args.no_clean
+ remove_work=args.remove_work
tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
try:
rd = parse_recipe(config, tinfoil, args.recipename, True)
@@ -1938,7 +2247,7 @@ def finish(args, config, basepath, workspace):
if args.dry_run:
dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
dry_run_outdir = dry_run_output.name
- updated, appendfile, removed = _update_recipe(args.recipename, workspace, rd, args.mode, appendlayerdir, wildcard_version=True, no_remove=False, no_report_remove=removing_original, initial_rev=args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides)
+ updated, appendfile, removed = _update_recipe(args.recipename, workspace, rd, args.mode, appendlayerdir, wildcard_version=True, no_remove=False, no_report_remove=removing_original, initial_rev=args.initial_rev, dry_run_outdir=dry_run_outdir, no_overrides=args.no_overrides, force_patch_refresh=args.force_patch_refresh)
removed = [os.path.relpath(pth, recipedir) for pth in removed]
# Remove any old files in the case of an upgrade
@@ -2027,7 +2336,7 @@ def finish(args, config, basepath, workspace):
if args.dry_run:
logger.info('Resetting recipe (dry-run)')
else:
- _reset([args.recipename], no_clean=no_clean, config=config, basepath=basepath, workspace=workspace)
+ _reset([args.recipename], no_clean=no_clean, remove_work=remove_work, config=config, basepath=basepath, workspace=workspace)
return 0
@@ -2054,7 +2363,8 @@ def register_commands(subparsers, context):
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI')
- parser_add.add_argument('--fetch-dev', help='For npm, also fetch devDependencies', action="store_true")
+ parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true")
+ parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true")
parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)')
parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true")
group = parser_add.add_mutually_exclusive_group()
@@ -2125,6 +2435,7 @@ def register_commands(subparsers, context):
parser_update_recipe.add_argument('--no-remove', '-n', action="store_true", help='Don\'t remove patches, only add or update')
parser_update_recipe.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
parser_update_recipe.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
+ parser_update_recipe.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
parser_update_recipe.set_defaults(func=update_recipe)
parser_status = subparsers.add_parser('status', help='Show workspace status',
@@ -2138,6 +2449,7 @@ def register_commands(subparsers, context):
parser_reset.add_argument('recipename', nargs='*', help='Recipe to reset')
parser_reset.add_argument('--all', '-a', action="store_true", help='Reset all recipes (clear workspace)')
parser_reset.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
+ parser_reset.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory along with append')
parser_reset.set_defaults(func=reset)
parser_finish = subparsers.add_parser('finish', help='Finish working on a recipe in your workspace',
@@ -2148,6 +2460,9 @@ def register_commands(subparsers, context):
parser_finish.add_argument('--mode', '-m', choices=['patch', 'srcrev', 'auto'], default='auto', help='Update mode (where %(metavar)s is %(choices)s; default is %(default)s)', metavar='MODE')
parser_finish.add_argument('--initial-rev', help='Override starting revision for patches')
parser_finish.add_argument('--force', '-f', action="store_true", help='Force continuing even if there are uncommitted changes in the source tree repository')
+ parser_finish.add_argument('--remove-work', '-r', action="store_true", help='Clean the sources directory under workspace')
+ parser_finish.add_argument('--no-clean', '-n', action="store_true", help='Don\'t clean the sysroot to remove recipe output')
parser_finish.add_argument('--no-overrides', '-O', action="store_true", help='Do not handle other override branches (if they exist)')
parser_finish.add_argument('--dry-run', '-N', action="store_true", help='Dry-run (just report changes instead of writing them)')
+ parser_finish.add_argument('--force-patch-refresh', action="store_true", help='Update patches in the layer even if they have not been modified (useful for refreshing patch context)')
parser_finish.set_defaults(func=finish)
diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py
index f6141bfdc3..fa5b8ef3c7 100644
--- a/scripts/lib/devtool/upgrade.py
+++ b/scripts/lib/devtool/upgrade.py
@@ -2,18 +2,7 @@
#
# Copyright (C) 2014-2017 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
+# SPDX-License-Identifier: GPL-2.0-only
#
"""Devtool upgrade plugin"""
@@ -43,9 +32,11 @@ def _run(cmd, cwd=''):
def _get_srctree(tmpdir):
srctree = tmpdir
- dirs = os.listdir(tmpdir)
+ dirs = scriptutils.filter_src_subdirs(tmpdir)
if len(dirs) == 1:
srctree = os.path.join(tmpdir, dirs[0])
+ else:
+ raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs))
return srctree
def _copy_source_code(orig, dest):
@@ -82,7 +73,8 @@ def _rename_recipe_dirs(oldpv, newpv, path):
if oldfile.find(oldpv) != -1:
newfile = oldfile.replace(oldpv, newpv)
if oldfile != newfile:
- os.rename(os.path.join(path, oldfile), os.path.join(path, newfile))
+ bb.utils.rename(os.path.join(path, oldfile),
+ os.path.join(path, newfile))
def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path):
oldrecipe = os.path.basename(oldrecipe)
@@ -98,7 +90,7 @@ def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path):
_rename_recipe_dirs(oldpv, newpv, path)
return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path)
-def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d):
+def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d):
"""Writes an append file"""
if not os.path.exists(rc):
raise DevtoolError("bbappend not created because %s does not exist" % rc)
@@ -113,38 +105,44 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d)
pn = d.getVar('PN')
af = os.path.join(appendpath, '%s.bbappend' % brf)
with open(af, 'w') as f:
- f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n')
+ f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n')
+ # Local files can be modified/tracked in separate subdir under srctree
+ # Mostly useful for packages with S != WORKDIR
+ f.write('FILESPATH:prepend := "%s:"\n' %
+ os.path.join(srctreebase, 'oe-local-files'))
+ f.write('# srctreebase: %s\n' % srctreebase)
f.write('inherit externalsrc\n')
f.write(('# NOTE: We use pn- overrides here to avoid affecting'
'multiple variants in the case where the recipe uses BBCLASSEXTEND\n'))
- f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree))
+ f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree))
b_is_s = use_external_build(same_dir, no_same_dir, d)
if b_is_s:
- f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree))
+ f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree))
f.write('\n')
- if rev:
- f.write('# initial_rev: %s\n' % rev)
+ if revs:
+ for name, rev in revs.items():
+ f.write('# initial_rev %s: %s\n' % (name, rev))
if copied:
f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE')))
f.write('# original_files: %s\n' % ' '.join(copied))
return af
-def _cleanup_on_error(rf, srctree):
- rfp = os.path.split(rf)[0] # recipe folder
- rfpp = os.path.split(rfp)[0] # recipes folder
- if os.path.exists(rfp):
- shutil.rmtree(b)
- if not len(os.listdir(rfpp)):
- os.rmdir(rfpp)
+def _cleanup_on_error(rd, srctree):
+ if os.path.exists(rd):
+ shutil.rmtree(rd)
srctree = os.path.abspath(srctree)
if os.path.exists(srctree):
shutil.rmtree(srctree)
-def _upgrade_error(e, rf, srctree):
- if rf:
- cleanup_on_error(rf, srctree)
+def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None):
+ if not keep_failure:
+ _cleanup_on_error(rd, srctree)
logger.error(e)
- raise DevtoolError(e)
+ if extramsg:
+ logger.error(extramsg)
+ if keep_failure:
+ logger.info('Preserving failed upgrade files (--keep-failure)')
+ sys.exit(1)
def _get_uri(rd):
srcuris = rd.getVar('SRC_URI').split()
@@ -185,12 +183,16 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
uri, rev = _get_uri(crd)
if srcrev:
rev = srcrev
- if uri.startswith('git://'):
+ paths = [srctree]
+ if uri.startswith('git://') or uri.startswith('gitsm://'):
__run('git fetch')
__run('git checkout %s' % rev)
__run('git tag -f devtool-base-new')
- md5 = None
- sha256 = None
+ __run('git submodule update --recursive')
+ __run('git submodule foreach \'git tag -f devtool-base-new\'')
+ (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'')
+ paths += [os.path.join(srctree, p) for p in stdout.splitlines()]
+ checksums = {}
_, _, _, _, _, params = bb.fetch2.decodeurl(uri)
srcsubdir_rel = params.get('destsuffix', 'git')
if not srcbranch:
@@ -198,14 +200,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
get_branch = [x.strip() for x in check_branch.splitlines()]
# Remove HEAD reference point and drop remote prefix
get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')]
- if 'master' in get_branch:
- # If it is master, we do not need to append 'branch=master' as this is default.
- # Even with the case where get_branch has multiple objects, if 'master' is one
- # of them, we should default take from 'master'
- srcbranch = ''
- elif len(get_branch) == 1:
- # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch'
+ if len(get_branch) == 1:
+ # If srcrev is on only ONE branch, then use that branch
srcbranch = get_branch[0]
+ elif 'main' in get_branch:
+ # If srcrev is on multiple branches, then choose 'main' if it is one of them
+ srcbranch = 'main'
+ elif 'master' in get_branch:
+ # Otherwise choose 'master' if it is one of the branches
+ srcbranch = 'master'
else:
# If get_branch contains more than one objects, then display error and exit.
mbrch = '\n ' + '\n '.join(get_branch)
@@ -222,9 +225,6 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
if ftmpdir and keep_temp:
logger.info('Fetch temp directory is %s' % ftmpdir)
- md5 = checksums['md5sum']
- sha256 = checksums['sha256sum']
-
tmpsrctree = _get_srctree(tmpdir)
srctree = os.path.abspath(srctree)
srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir)
@@ -242,14 +242,14 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
# Copy in new ones
_copy_source_code(tmpsrctree, srctree)
- (stdout,_) = __run('git ls-files --modified --others --exclude-standard')
+ (stdout,_) = __run('git ls-files --modified --others')
filelist = stdout.splitlines()
pbar = bb.ui.knotty.BBProgress('Adding changed files', len(filelist))
pbar.start()
batchsize = 100
for i in range(0, len(filelist), batchsize):
batch = filelist[i:i+batchsize]
- __run('git add -A %s' % ' '.join(['"%s"' % item for item in batch]))
+ __run('git add -f -A %s' % ' '.join(['"%s"' % item for item in batch]))
pbar.update(i)
pbar.finish()
@@ -258,40 +258,81 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee
__run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv))
__run('git tag -f devtool-base-%s' % newpv)
- (stdout, _) = __run('git rev-parse HEAD')
- rev = stdout.rstrip()
+ revs = {}
+ for path in paths:
+ (stdout, _) = _run('git rev-parse HEAD', cwd=path)
+ revs[os.path.relpath(path, srctree)] = stdout.rstrip()
if no_patch:
patches = oe.recipeutils.get_recipe_patches(crd)
if patches:
- logger.warn('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
+ logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches]))
else:
- __run('git checkout devtool-patched -b %s' % branch)
- skiptag = False
- try:
- __run('git rebase %s' % rev)
- except bb.process.ExecutionError as e:
- skiptag = True
- if 'conflict' in e.stdout:
- logger.warn('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
- else:
- logger.warn('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
- if not skiptag:
- if uri.startswith('git://'):
- suffix = 'new'
- else:
- suffix = newpv
- __run('git tag -f devtool-patched-%s' % suffix)
+ for path in paths:
+ _run('git checkout devtool-patched -b %s' % branch, cwd=path)
+ (stdout, _) = _run('git branch --list devtool-override-*', cwd=path)
+ branches_to_rebase = [branch] + stdout.split()
+ target_branch = revs[os.path.relpath(path, srctree)]
+
+ # There is a bug (or feature?) in git rebase where if a commit with
+ # a note is fully rebased away by being part of an old commit, the
+ # note is still attached to the old commit. Avoid this by making
+ # sure all old devtool related commits have a note attached to them
+ # (this assumes git config notes.rewriteMode is set to ignore).
+ (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
+ for rev in stdout.splitlines():
+ if not oe.patch.GitApplyTree.getNotes(path, rev):
+ oe.patch.GitApplyTree.addNote(path, rev, "dummy")
+
+ for b in branches_to_rebase:
+ logger.info("Rebasing {} onto {}".format(b, target_branch))
+ _run('git checkout %s' % b, cwd=path)
+ try:
+ _run('git rebase %s' % target_branch, cwd=path)
+ except bb.process.ExecutionError as e:
+ if 'conflict' in e.stdout:
+ logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip()))
+ _run('git rebase --abort', cwd=path)
+ else:
+ logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout))
+
+ # Remove any dummy notes added above.
+ (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch)
+ for rev in stdout.splitlines():
+ oe.patch.GitApplyTree.removeNote(path, rev, "dummy")
+
+ _run('git checkout %s' % branch, cwd=path)
if tmpsrctree:
if keep_temp:
logger.info('Preserving temporary directory %s' % tmpsrctree)
else:
shutil.rmtree(tmpsrctree)
+ if tmpdir != tmpsrctree:
+ shutil.rmtree(tmpdir)
- return (rev, md5, sha256, srcbranch, srcsubdir_rel)
+ return (revs, checksums, srcbranch, srcsubdir_rel)
-def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd):
+def _add_license_diff_to_recipe(path, diff):
+ notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'.
+# The following is the difference between the old and the new license text.
+# Please update the LICENSE value if needed, and summarize the changes in
+# the commit message via 'License-Update:' tag.
+# (example: 'License-Update: copyright years updated.')
+#
+# The changes:
+#
+"""
+ commented_diff = "\n".join(["# {}".format(l) for l in diff.split('\n')])
+ with open(path, 'rb') as f:
+ orig_content = f.read()
+ with open(path, 'wb') as f:
+ f.write(notice_text.encode())
+ f.write(commented_diff.encode())
+ f.write("\n#\n\n".encode())
+ f.write(orig_content)
+
+def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure):
"""Creates the new recipe under workspace"""
bpn = rd.getVar('BPN')
@@ -322,7 +363,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
replacing = True
new_src_uri = []
for entry in src_uri:
- scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
+ try:
+ scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry)
+ except bb.fetch2.MalformedUrl as e:
+ raise DevtoolError("Could not decode SRC_URI: {}".format(e))
if replacing and scheme in ['git', 'gitsm']:
branch = params.get('branch', 'master')
if rd.expand(branch) != srcbranch:
@@ -360,30 +404,39 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
addnames.append(params['name'])
# Find what's been set in the original recipe
oldnames = []
+ oldsums = []
noname = False
for varflag in rd.getVarFlags('SRC_URI'):
- if varflag.endswith(('.md5sum', '.sha256sum')):
- name = varflag.rsplit('.', 1)[0]
- if name not in oldnames:
- oldnames.append(name)
- elif varflag in ['md5sum', 'sha256sum']:
- noname = True
+ for checksum in checksums:
+ if varflag.endswith('.' + checksum):
+ name = varflag.rsplit('.', 1)[0]
+ if name not in oldnames:
+ oldnames.append(name)
+ oldsums.append(checksum)
+ elif varflag == checksum:
+ noname = True
+ oldsums.append(checksum)
# Even if SRC_URI has named entries it doesn't have to actually use the name
if noname and addnames and addnames[0] not in oldnames:
addnames = []
# Drop any old names (the name actually might include ${PV})
for name in oldnames:
if name not in newnames:
- newvalues['SRC_URI[%s.md5sum]' % name] = None
- newvalues['SRC_URI[%s.sha256sum]' % name] = None
+ for checksum in oldsums:
+ newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None
- if md5 and sha256:
- if addnames:
- nameprefix = '%s.' % addnames[0]
- else:
- nameprefix = ''
- newvalues['SRC_URI[%smd5sum]' % nameprefix] = md5
- newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256
+ nameprefix = '%s.' % addnames[0] if addnames else ''
+
+ # md5sum is deprecated, remove any traces of it. If it was the only old
+ # checksum, then replace it with the default checksums.
+ if 'md5sum' in oldsums:
+ newvalues['SRC_URI[%smd5sum]' % nameprefix] = None
+ oldsums.remove('md5sum')
+ if not oldsums:
+ oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
+
+ for checksum in oldsums:
+ newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum]
if srcsubdir_new != srcsubdir_old:
s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR'))
@@ -400,7 +453,19 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
else:
logger.info('Source subdirectory has changed, updating S value')
- rd = tinfoil.parse_recipe_file(fullpath, False)
+ if license_diff:
+ newlicchksum = " ".join(["file://{}".format(l['path']) +
+ (";beginline={}".format(l['beginline']) if l['beginline'] else "") +
+ (";endline={}".format(l['endline']) if l['endline'] else "") +
+ (";md5={}".format(l['actual_md5'])) for l in new_licenses])
+ newvalues["LIC_FILES_CHKSUM"] = newlicchksum
+ _add_license_diff_to_recipe(fullpath, license_diff)
+
+ tinfoil.modified_files()
+ try:
+ rd = tinfoil.parse_recipe_file(fullpath, False)
+ except bb.tinfoil.TinfoilCommandFailed as e:
+ _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed')
oe.recipeutils.patch_recipe(rd, fullpath, newvalues)
return fullpath, copied
@@ -409,7 +474,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src
def _check_git_config():
def getconfig(name):
try:
- value = bb.process.run('git config --global %s' % name)[0].strip()
+ value = bb.process.run('git config %s' % name)[0].strip()
except bb.process.ExecutionError as e:
if e.exitcode == 1:
value = None
@@ -427,14 +492,53 @@ def _check_git_config():
if configerr:
raise DevtoolError('Your git configuration is incomplete which will prevent rebases from working:\n' + '\n'.join(configerr))
+def _extract_licenses(srcpath, recipe_licenses):
+ licenses = []
+ for url in recipe_licenses.split():
+ license = {}
+ (type, host, path, user, pswd, parm) = bb.fetch.decodeurl(url)
+ license['path'] = path
+ license['md5'] = parm.get('md5', '')
+ license['beginline'], license['endline'] = 0, 0
+ if 'beginline' in parm:
+ license['beginline'] = int(parm['beginline'])
+ if 'endline' in parm:
+ license['endline'] = int(parm['endline'])
+ license['text'] = []
+ with open(os.path.join(srcpath, path), 'rb') as f:
+ import hashlib
+ actual_md5 = hashlib.md5()
+ lineno = 0
+ for line in f:
+ lineno += 1
+ if (lineno >= license['beginline']) and ((lineno <= license['endline']) or not license['endline']):
+ license['text'].append(line.decode(errors='ignore'))
+ actual_md5.update(line)
+ license['actual_md5'] = actual_md5.hexdigest()
+ licenses.append(license)
+ return licenses
+
+def _generate_license_diff(old_licenses, new_licenses):
+ need_diff = False
+ for l in new_licenses:
+ if l['md5'] != l['actual_md5']:
+ need_diff = True
+ break
+ if need_diff == False:
+ return None
+
+ import difflib
+ diff = ''
+ for old, new in zip(old_licenses, new_licenses):
+ for line in difflib.unified_diff(old['text'], new['text'], old['path'], new['path']):
+ diff = diff + line
+ return diff
def upgrade(args, config, basepath, workspace):
"""Entry point for the devtool 'upgrade' subcommand"""
if args.recipename in workspace:
raise DevtoolError("recipe %s is already in your workspace" % args.recipename)
- if not args.version and not args.srcrev:
- raise DevtoolError("You must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option")
if args.srcbranch and not args.srcrev:
raise DevtoolError("If you specify --srcbranch/-B then you must use --srcrev/-S to specify the revision" % args.recipename)
@@ -457,6 +561,18 @@ def upgrade(args, config, basepath, workspace):
else:
srctree = standard.get_default_srctree(config, pn)
+ srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR'))
+
+ # try to automatically discover latest version and revision if not provided on command line
+ if not args.version and not args.srcrev:
+ version_info = oe.recipeutils.get_recipe_upstream_version(rd)
+ if version_info['version'] and not version_info['version'].endswith("new-commits-available"):
+ args.version = version_info['version']
+ if version_info['revision']:
+ args.srcrev = version_info['revision']
+ if not args.version and not args.srcrev:
+ raise DevtoolError("Automatic discovery of latest version/revision failed - you must provide a version using the --version/-V option, or for recipes that fetch from an SCM such as git, the --srcrev/-S option.")
+
standard._check_compatible_recipe(pn, rd)
old_srcrev = rd.getVar('SRCREV')
if old_srcrev == 'INVALID':
@@ -472,21 +588,24 @@ def upgrade(args, config, basepath, workspace):
check_prerelease_version(args.version, 'devtool upgrade')
rf = None
+ license_diff = None
try:
logger.info('Extracting current version source...')
rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides)
+ old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
logger.info('Extracting upgraded version source...')
- rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
+ rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch,
args.srcrev, args.srcbranch, args.branch, args.keep_temp,
tinfoil, rd)
- rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd)
- except bb.process.CmdError as e:
- _upgrade_error(e, rf, srctree)
- except DevtoolError as e:
- _upgrade_error(e, rf, srctree)
+ new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or ""))
+ license_diff = _generate_license_diff(old_licenses, new_licenses)
+ rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure)
+ except (bb.process.CmdError, DevtoolError) as e:
+ recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('BPN'))
+ _upgrade_error(e, recipedir, srctree, args.keep_failure)
standard._add_md5(config, pn, os.path.dirname(rf))
- af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2,
+ af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2,
copied, config.workspace_path, rd)
standard._add_md5(config, pn, af)
@@ -494,10 +613,49 @@ def upgrade(args, config, basepath, workspace):
logger.info('Upgraded source extracted to %s' % srctree)
logger.info('New recipe is %s' % rf)
+ if license_diff:
+ logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.')
+ preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN'))
+ if preferred_version:
+ logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version)
finally:
tinfoil.shutdown()
return 0
+def latest_version(args, config, basepath, workspace):
+ """Entry point for the devtool 'latest_version' subcommand"""
+ tinfoil = setup_tinfoil(basepath=basepath, tracking=True)
+ try:
+ rd = parse_recipe(config, tinfoil, args.recipename, True)
+ if not rd:
+ return 1
+ version_info = oe.recipeutils.get_recipe_upstream_version(rd)
+ # "new-commits-available" is an indication that upstream never issues version tags
+ if not version_info['version'].endswith("new-commits-available"):
+ logger.info("Current version: {}".format(version_info['current_version']))
+ logger.info("Latest version: {}".format(version_info['version']))
+ if version_info['revision']:
+ logger.info("Latest version's commit: {}".format(version_info['revision']))
+ else:
+ logger.info("Latest commit: {}".format(version_info['revision']))
+ finally:
+ tinfoil.shutdown()
+ return 0
+
+def check_upgrade_status(args, config, basepath, workspace):
+ if not args.recipe:
+ logger.info("Checking the upstream status for all recipes may take a few minutes")
+ results = oe.recipeutils.get_recipe_upgrade_status(args.recipe)
+ for result in results:
+ # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason
+ if args.all or result[1] != 'MATCH':
+ print("{:25} {:15} {:15} {} {} {}".format( result[0],
+ result[2],
+ result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"),
+ result[4],
+ result[5] if result[5] != 'N/A' else "",
+ "cannot be updated due to: %s" %(result[6]) if result[6] else ""))
+
def register_commands(subparsers, context):
"""Register devtool subcommands from this plugin"""
@@ -508,8 +666,8 @@ def register_commands(subparsers, context):
group='starting')
parser_upgrade.add_argument('recipename', help='Name of recipe to upgrade (just name - no version, path or extension)')
parser_upgrade.add_argument('srctree', nargs='?', help='Path to where to extract the source tree. If not specified, a subdirectory of %s will be used.' % defsrctree)
- parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV)')
- parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (required if fetching from an SCM such as git)')
+ parser_upgrade.add_argument('--version', '-V', help='Version to upgrade to (PV). If omitted, latest upstream version will be determined and used, if possible.')
+ parser_upgrade.add_argument('--srcrev', '-S', help='Source revision to upgrade to (useful when fetching from an SCM such as git)')
parser_upgrade.add_argument('--srcbranch', '-B', help='Branch in source repository containing the revision to use (if fetching from an SCM such as git)')
parser_upgrade.add_argument('--branch', '-b', default="devtool", help='Name for new development branch to checkout (default "%(default)s")')
parser_upgrade.add_argument('--no-patch', action="store_true", help='Do not apply patches from the recipe to the new source code')
@@ -518,4 +676,18 @@ def register_commands(subparsers, context):
group.add_argument('--same-dir', '-s', help='Build in same directory as source', action="store_true")
group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true")
parser_upgrade.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
+ parser_upgrade.add_argument('--keep-failure', action="store_true", help='Keep failed upgrade recipe and associated files (for debugging)')
parser_upgrade.set_defaults(func=upgrade, fixed_setup=context.fixed_setup)
+
+ parser_latest_version = subparsers.add_parser('latest-version', help='Report the latest version of an existing recipe',
+ description='Queries the upstream server for what the latest upstream release is (for git, tags are checked, for tarballs, a list of them is obtained, and one with the highest version number is reported)',
+ group='info')
+ parser_latest_version.add_argument('recipename', help='Name of recipe to query (just name - no version, path or extension)')
+ parser_latest_version.set_defaults(func=latest_version)
+
+ parser_check_upgrade_status = subparsers.add_parser('check-upgrade-status', help="Report upgradability for multiple (or all) recipes",
+ description="Prints a table of recipes together with versions currently provided by recipes, and latest upstream versions, when there is a later version available",
+ group='info')
+ parser_check_upgrade_status.add_argument('recipe', help='Name of the recipe to report (omit to report upgrade info for all recipes)', nargs='*')
+ parser_check_upgrade_status.add_argument('--all', '-a', help='Show all recipes, not just recipes needing upgrade', action="store_true")
+ parser_check_upgrade_status.set_defaults(func=check_upgrade_status)
diff --git a/scripts/lib/devtool/utilcmds.py b/scripts/lib/devtool/utilcmds.py
index 7cd139fb8b..964817766b 100644
--- a/scripts/lib/devtool/utilcmds.py
+++ b/scripts/lib/devtool/utilcmds.py
@@ -2,18 +2,8 @@
#
# Copyright (C) 2015-2016 Intel Corporation
#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
+# SPDX-License-Identifier: GPL-2.0-only
#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""Devtool utility plugins"""