diff options
Diffstat (limited to 'scripts/lib/devtool')
-rw-r--r-- | scripts/lib/devtool/__init__.py | 27 | ||||
-rw-r--r-- | scripts/lib/devtool/build_image.py | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/build_sdk.py | 2 | ||||
-rw-r--r-- | scripts/lib/devtool/deploy.py | 240 | ||||
-rw-r--r-- | scripts/lib/devtool/ide_plugins/__init__.py | 282 | ||||
-rw-r--r-- | scripts/lib/devtool/ide_plugins/ide_code.py | 463 | ||||
-rw-r--r-- | scripts/lib/devtool/ide_plugins/ide_none.py | 53 | ||||
-rwxr-xr-x | scripts/lib/devtool/ide_sdk.py | 1070 | ||||
-rw-r--r-- | scripts/lib/devtool/menuconfig.py | 4 | ||||
-rw-r--r-- | scripts/lib/devtool/sdk.py | 5 | ||||
-rw-r--r-- | scripts/lib/devtool/search.py | 5 | ||||
-rw-r--r-- | scripts/lib/devtool/standard.py | 703 | ||||
-rw-r--r-- | scripts/lib/devtool/upgrade.py | 248 |
13 files changed, 2568 insertions, 536 deletions
diff --git a/scripts/lib/devtool/__init__.py b/scripts/lib/devtool/__init__.py index 702db669de..6133c1c5b4 100644 --- a/scripts/lib/devtool/__init__.py +++ b/scripts/lib/devtool/__init__.py @@ -78,12 +78,15 @@ def exec_fakeroot(d, cmd, **kwargs): """Run a command under fakeroot (pseudo, in fact) so that it picks up the appropriate file permissions""" # Grab the command and check it actually exists fakerootcmd = d.getVar('FAKEROOTCMD') + fakerootenv = d.getVar('FAKEROOTENV') + exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, kwargs) + +def exec_fakeroot_no_d(fakerootcmd, fakerootenv, cmd, **kwargs): if not os.path.exists(fakerootcmd): logger.error('pseudo executable %s could not be found - have you run a build yet? pseudo-native should install this and if you have run any build then that should have been built') return 2 # Set up the appropriate environment newenv = dict(os.environ) - fakerootenv = d.getVar('FAKEROOTENV') for varvalue in fakerootenv.split(): if '=' in varvalue: splitval = varvalue.split('=', 1) @@ -233,6 +236,28 @@ def setup_git_repo(repodir, version, devbranch, basetag='devtool-base', d=None): bb.process.run('git checkout -b %s' % devbranch, cwd=repodir) bb.process.run('git tag -f %s' % basetag, cwd=repodir) + # if recipe unpacks another git repo inside S, we need to declare it as a regular git submodule now, + # so we will be able to tag branches on it and extract patches when doing finish/update on the recipe + stdout, _ = bb.process.run("git status --porcelain", cwd=repodir) + found = False + for line in stdout.splitlines(): + if line.endswith("/"): + new_dir = line.split()[1] + for root, dirs, files in os.walk(os.path.join(repodir, new_dir)): + if ".git" in dirs + files: + (stdout, _) = bb.process.run('git remote', cwd=root) + remote = stdout.splitlines()[0] + (stdout, _) = bb.process.run('git remote get-url %s' % remote, cwd=root) + remote_url = stdout.splitlines()[0] + logger.error(os.path.relpath(os.path.join(root, ".."), root)) + bb.process.run('git submodule add %s %s' % (remote_url, os.path.relpath(root, os.path.join(root, ".."))), cwd=os.path.join(root, "..")) + found = True + if found: + oe.patch.GitApplyTree.commitIgnored("Add additional submodule from SRC_URI", dir=os.path.join(root, ".."), d=d) + found = False + if os.path.exists(os.path.join(repodir, '.gitmodules')): + bb.process.run('git submodule foreach --recursive "git tag -f %s"' % basetag, cwd=repodir) + def recipe_to_append(recipefile, config, wildcard=False): """ Convert a recipe file to a bbappend file path within the workspace. diff --git a/scripts/lib/devtool/build_image.py b/scripts/lib/devtool/build_image.py index 9388abbacf..980f90ddd6 100644 --- a/scripts/lib/devtool/build_image.py +++ b/scripts/lib/devtool/build_image.py @@ -113,7 +113,7 @@ def build_image_task(config, basepath, workspace, image, add_packages=None, task with open(appendfile, 'w') as afile: if packages: # include packages from workspace recipes into the image - afile.write('IMAGE_INSTALL_append = " %s"\n' % ' '.join(packages)) + afile.write('IMAGE_INSTALL:append = " %s"\n' % ' '.join(packages)) if not task: logger.info('Building image %s with the following ' 'additional packages: %s', image, ' '.join(packages)) diff --git a/scripts/lib/devtool/build_sdk.py b/scripts/lib/devtool/build_sdk.py index 6fe02fff2a..1cd4831d2b 100644 --- a/scripts/lib/devtool/build_sdk.py +++ b/scripts/lib/devtool/build_sdk.py @@ -13,7 +13,7 @@ import shutil import errno import sys import tempfile -from devtool import exec_build_env_command, setup_tinfoil, parse_recipe, DevtoolError +from devtool import DevtoolError from devtool import build_image logger = logging.getLogger('devtool') diff --git a/scripts/lib/devtool/deploy.py b/scripts/lib/devtool/deploy.py index e5af2c95ae..b5ca8f2c2f 100644 --- a/scripts/lib/devtool/deploy.py +++ b/scripts/lib/devtool/deploy.py @@ -16,7 +16,7 @@ import bb.utils import argparse_oe import oe.types -from devtool import exec_fakeroot, setup_tinfoil, check_workspace_recipe, DevtoolError +from devtool import exec_fakeroot_no_d, setup_tinfoil, check_workspace_recipe, DevtoolError logger = logging.getLogger('devtool') @@ -133,16 +133,38 @@ def _prepare_remote_script(deploy, verbose=False, dryrun=False, undeployall=Fals return '\n'.join(lines) - - def deploy(args, config, basepath, workspace): """Entry point for the devtool 'deploy' subcommand""" - import math - import oe.recipeutils - import oe.package + import oe.utils check_workspace_recipe(workspace, args.recipename, checksrc=False) + tinfoil = setup_tinfoil(basepath=basepath) + try: + try: + rd = tinfoil.parse_recipe(args.recipename) + except Exception as e: + raise DevtoolError('Exception parsing recipe %s: %s' % + (args.recipename, e)) + + srcdir = rd.getVar('D') + workdir = rd.getVar('WORKDIR') + path = rd.getVar('PATH') + strip_cmd = rd.getVar('STRIP') + libdir = rd.getVar('libdir') + base_libdir = rd.getVar('base_libdir') + max_process = oe.utils.get_bb_number_threads(rd) + fakerootcmd = rd.getVar('FAKEROOTCMD') + fakerootenv = rd.getVar('FAKEROOTENV') + finally: + tinfoil.shutdown() + + return deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args) + +def deploy_no_d(srcdir, workdir, path, strip_cmd, libdir, base_libdir, max_process, fakerootcmd, fakerootenv, args): + import math + import oe.package + try: host, destdir = args.target.split(':') except ValueError: @@ -152,118 +174,108 @@ def deploy(args, config, basepath, workspace): if not destdir.endswith('/'): destdir += '/' - tinfoil = setup_tinfoil(basepath=basepath) - try: - try: - rd = tinfoil.parse_recipe(args.recipename) - except Exception as e: - raise DevtoolError('Exception parsing recipe %s: %s' % - (args.recipename, e)) - recipe_outdir = rd.getVar('D') - if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): - raise DevtoolError('No files to deploy - have you built the %s ' - 'recipe? If so, the install step has not installed ' - 'any files.' % args.recipename) - - if args.strip and not args.dry_run: - # Fakeroot copy to new destination - srcdir = recipe_outdir - recipe_outdir = os.path.join(rd.getVar('WORKDIR'), 'deploy-target-stripped') - if os.path.isdir(recipe_outdir): - bb.utils.remove(recipe_outdir, True) - exec_fakeroot(rd, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) - os.environ['PATH'] = ':'.join([os.environ['PATH'], rd.getVar('PATH') or '']) - oe.package.strip_execs(args.recipename, recipe_outdir, rd.getVar('STRIP'), rd.getVar('libdir'), - rd.getVar('base_libdir'), rd) - - filelist = [] - inodes = set({}) - ftotalsize = 0 - for root, _, files in os.walk(recipe_outdir): - for fn in files: - fstat = os.lstat(os.path.join(root, fn)) - # Get the size in kiB (since we'll be comparing it to the output of du -k) - # MUST use lstat() here not stat() or getfilesize() since we don't want to - # dereference symlinks - if fstat.st_ino in inodes: - fsize = 0 - else: - fsize = int(math.ceil(float(fstat.st_size)/1024)) - inodes.add(fstat.st_ino) - ftotalsize += fsize - # The path as it would appear on the target - fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn) - filelist.append((fpath, fsize)) - - if args.dry_run: - print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) - for item, _ in filelist: - print(' %s' % item) - return 0 - - extraoptions = '' - if args.no_host_check: - extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' - if not args.show_status: - extraoptions += ' -q' - - scp_sshexec = '' - ssh_sshexec = 'ssh' - if args.ssh_exec: - scp_sshexec = "-S %s" % args.ssh_exec - ssh_sshexec = args.ssh_exec - scp_port = '' - ssh_port = '' - if args.port: - scp_port = "-P %s" % args.port - ssh_port = "-p %s" % args.port - - if args.key: - extraoptions += ' -i %s' % args.key - - # In order to delete previously deployed files and have the manifest file on - # the target, we write out a shell script and then copy it to the target - # so we can then run it (piping tar output to it). - # (We cannot use scp here, because it doesn't preserve symlinks.) - tmpdir = tempfile.mkdtemp(prefix='devtool') - try: - tmpscript = '/tmp/devtool_deploy.sh' - tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list') - shellscript = _prepare_remote_script(deploy=True, - verbose=args.show_status, - nopreserve=args.no_preserve, - nocheckspace=args.no_check_space) - # Write out the script to a file - with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f: - f.write(shellscript) - # Write out the file list - with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f: - f.write('%d\n' % ftotalsize) - for fpath, fsize in filelist: - f.write('%s %d\n' % (fpath, fsize)) - # Copy them to the target - ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) - if ret != 0: - raise DevtoolError('Failed to copy script to %s - rerun with -s to ' - 'get a complete error message' % args.target) - finally: - shutil.rmtree(tmpdir) + recipe_outdir = srcdir + if not os.path.exists(recipe_outdir) or not os.listdir(recipe_outdir): + raise DevtoolError('No files to deploy - have you built the %s ' + 'recipe? If so, the install step has not installed ' + 'any files.' % args.recipename) + + if args.strip and not args.dry_run: + # Fakeroot copy to new destination + srcdir = recipe_outdir + recipe_outdir = os.path.join(workdir, 'devtool-deploy-target-stripped') + if os.path.isdir(recipe_outdir): + exec_fakeroot_no_d(fakerootcmd, fakerootenv, "rm -rf %s" % recipe_outdir, shell=True) + exec_fakeroot_no_d(fakerootcmd, fakerootenv, "cp -af %s %s" % (os.path.join(srcdir, '.'), recipe_outdir), shell=True) + os.environ['PATH'] = ':'.join([os.environ['PATH'], path or '']) + oe.package.strip_execs(args.recipename, recipe_outdir, strip_cmd, libdir, base_libdir, max_process) + + filelist = [] + inodes = set({}) + ftotalsize = 0 + for root, _, files in os.walk(recipe_outdir): + for fn in files: + fstat = os.lstat(os.path.join(root, fn)) + # Get the size in kiB (since we'll be comparing it to the output of du -k) + # MUST use lstat() here not stat() or getfilesize() since we don't want to + # dereference symlinks + if fstat.st_ino in inodes: + fsize = 0 + else: + fsize = int(math.ceil(float(fstat.st_size)/1024)) + inodes.add(fstat.st_ino) + ftotalsize += fsize + # The path as it would appear on the target + fpath = os.path.join(destdir, os.path.relpath(root, recipe_outdir), fn) + filelist.append((fpath, fsize)) + + if args.dry_run: + print('Files to be deployed for %s on target %s:' % (args.recipename, args.target)) + for item, _ in filelist: + print(' %s' % item) + return 0 - # Now run the script - ret = exec_fakeroot(rd, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) - if ret != 0: - raise DevtoolError('Deploy failed - rerun with -s to get a complete ' - 'error message') + extraoptions = '' + if args.no_host_check: + extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' + if not args.show_status: + extraoptions += ' -q' - logger.info('Successfully deployed %s' % recipe_outdir) + scp_sshexec = '' + ssh_sshexec = 'ssh' + if args.ssh_exec: + scp_sshexec = "-S %s" % args.ssh_exec + ssh_sshexec = args.ssh_exec + scp_port = '' + ssh_port = '' + if args.port: + scp_port = "-P %s" % args.port + ssh_port = "-p %s" % args.port + + if args.key: + extraoptions += ' -i %s' % args.key - files_list = [] - for root, _, files in os.walk(recipe_outdir): - for filename in files: - filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) - files_list.append(os.path.join(destdir, filename)) + # In order to delete previously deployed files and have the manifest file on + # the target, we write out a shell script and then copy it to the target + # so we can then run it (piping tar output to it). + # (We cannot use scp here, because it doesn't preserve symlinks.) + tmpdir = tempfile.mkdtemp(prefix='devtool') + try: + tmpscript = '/tmp/devtool_deploy.sh' + tmpfilelist = os.path.join(os.path.dirname(tmpscript), 'devtool_deploy.list') + shellscript = _prepare_remote_script(deploy=True, + verbose=args.show_status, + nopreserve=args.no_preserve, + nocheckspace=args.no_check_space) + # Write out the script to a file + with open(os.path.join(tmpdir, os.path.basename(tmpscript)), 'w') as f: + f.write(shellscript) + # Write out the file list + with open(os.path.join(tmpdir, os.path.basename(tmpfilelist)), 'w') as f: + f.write('%d\n' % ftotalsize) + for fpath, fsize in filelist: + f.write('%s %d\n' % (fpath, fsize)) + # Copy them to the target + ret = subprocess.call("scp %s %s %s %s/* %s:%s" % (scp_sshexec, scp_port, extraoptions, tmpdir, args.target, os.path.dirname(tmpscript)), shell=True) + if ret != 0: + raise DevtoolError('Failed to copy script to %s - rerun with -s to ' + 'get a complete error message' % args.target) finally: - tinfoil.shutdown() + shutil.rmtree(tmpdir) + + # Now run the script + ret = exec_fakeroot_no_d(fakerootcmd, fakerootenv, 'tar cf - . | %s %s %s %s \'sh %s %s %s %s\'' % (ssh_sshexec, ssh_port, extraoptions, args.target, tmpscript, args.recipename, destdir, tmpfilelist), cwd=recipe_outdir, shell=True) + if ret != 0: + raise DevtoolError('Deploy failed - rerun with -s to get a complete ' + 'error message') + + logger.info('Successfully deployed %s' % recipe_outdir) + + files_list = [] + for root, _, files in os.walk(recipe_outdir): + for filename in files: + filename = os.path.relpath(os.path.join(root, filename), recipe_outdir) + files_list.append(os.path.join(destdir, filename)) return 0 diff --git a/scripts/lib/devtool/ide_plugins/__init__.py b/scripts/lib/devtool/ide_plugins/__init__.py new file mode 100644 index 0000000000..19c2f61c5f --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/__init__.py @@ -0,0 +1,282 @@ +# +# Copyright (C) 2023-2024 Siemens AG +# +# SPDX-License-Identifier: GPL-2.0-only +# +"""Devtool ide-sdk IDE plugin interface definition and helper functions""" + +import errno +import json +import logging +import os +import stat +from enum import Enum, auto +from devtool import DevtoolError +from bb.utils import mkdirhier + +logger = logging.getLogger('devtool') + + +class BuildTool(Enum): + UNDEFINED = auto() + CMAKE = auto() + MESON = auto() + + @property + def is_c_ccp(self): + if self is BuildTool.CMAKE: + return True + if self is BuildTool.MESON: + return True + return False + + +class GdbCrossConfig: + """Base class defining the GDB configuration generator interface + + Generate a GDB configuration for a binary on the target device. + Only one instance per binary is allowed. This allows to assign unique port + numbers for all gdbserver instances. + """ + _gdbserver_port_next = 1234 + _binaries = [] + + def __init__(self, image_recipe, modified_recipe, binary, gdbserver_multi=True): + self.image_recipe = image_recipe + self.modified_recipe = modified_recipe + self.gdb_cross = modified_recipe.gdb_cross + self.binary = binary + if binary in GdbCrossConfig._binaries: + raise DevtoolError( + "gdbserver config for binary %s is already generated" % binary) + GdbCrossConfig._binaries.append(binary) + self.script_dir = modified_recipe.ide_sdk_scripts_dir + self.gdbinit_dir = os.path.join(self.script_dir, 'gdbinit') + self.gdbserver_multi = gdbserver_multi + self.binary_pretty = self.binary.replace(os.sep, '-').lstrip('-') + self.gdbserver_port = GdbCrossConfig._gdbserver_port_next + GdbCrossConfig._gdbserver_port_next += 1 + self.id_pretty = "%d_%s" % (self.gdbserver_port, self.binary_pretty) + # gdbserver start script + gdbserver_script_file = 'gdbserver_' + self.id_pretty + if self.gdbserver_multi: + gdbserver_script_file += "_m" + self.gdbserver_script = os.path.join( + self.script_dir, gdbserver_script_file) + # gdbinit file + self.gdbinit = os.path.join( + self.gdbinit_dir, 'gdbinit_' + self.id_pretty) + # gdb start script + self.gdb_script = os.path.join( + self.script_dir, 'gdb_' + self.id_pretty) + + def _gen_gdbserver_start_script(self): + """Generate a shell command starting the gdbserver on the remote device via ssh + + GDB supports two modes: + multi: gdbserver remains running over several debug sessions + once: gdbserver terminates after the debugged process terminates + """ + cmd_lines = ['#!/bin/sh'] + if self.gdbserver_multi: + temp_dir = "TEMP_DIR=/tmp/gdbserver_%s; " % self.id_pretty + gdbserver_cmd_start = temp_dir + gdbserver_cmd_start += "test -f \\$TEMP_DIR/pid && exit 0; " + gdbserver_cmd_start += "mkdir -p \\$TEMP_DIR; " + gdbserver_cmd_start += "%s --multi :%s > \\$TEMP_DIR/log 2>&1 & " % ( + self.gdb_cross.gdbserver_path, self.gdbserver_port) + gdbserver_cmd_start += "echo \\$! > \\$TEMP_DIR/pid;" + + gdbserver_cmd_stop = temp_dir + gdbserver_cmd_stop += "test -f \\$TEMP_DIR/pid && kill \\$(cat \\$TEMP_DIR/pid); " + gdbserver_cmd_stop += "rm -rf \\$TEMP_DIR; " + + gdbserver_cmd_l = [] + gdbserver_cmd_l.append('if [ "$1" = "stop" ]; then') + gdbserver_cmd_l.append(' shift') + gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % ( + self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_stop)) + gdbserver_cmd_l.append('else') + gdbserver_cmd_l.append(" %s %s %s %s 'sh -c \"%s\"'" % ( + self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start)) + gdbserver_cmd_l.append('fi') + gdbserver_cmd = os.linesep.join(gdbserver_cmd_l) + else: + gdbserver_cmd_start = "%s --once :%s %s" % ( + self.gdb_cross.gdbserver_path, self.gdbserver_port, self.binary) + gdbserver_cmd = "%s %s %s %s 'sh -c \"%s\"'" % ( + self.gdb_cross.target_device.ssh_sshexec, self.gdb_cross.target_device.ssh_port, self.gdb_cross.target_device.extraoptions, self.gdb_cross.target_device.target, gdbserver_cmd_start) + cmd_lines.append(gdbserver_cmd) + GdbCrossConfig.write_file(self.gdbserver_script, cmd_lines, True) + + def _gen_gdbinit_config(self): + """Generate a gdbinit file for this binary and the corresponding gdbserver configuration""" + gdbinit_lines = ['# This file is generated by devtool ide-sdk'] + if self.gdbserver_multi: + target_help = '# gdbserver --multi :%d' % self.gdbserver_port + remote_cmd = 'target extended-remote' + else: + target_help = '# gdbserver :%d %s' % ( + self.gdbserver_port, self.binary) + remote_cmd = 'target remote' + gdbinit_lines.append('# On the remote target:') + gdbinit_lines.append(target_help) + gdbinit_lines.append('# On the build machine:') + gdbinit_lines.append('# cd ' + self.modified_recipe.real_srctree) + gdbinit_lines.append( + '# ' + self.gdb_cross.gdb + ' -ix ' + self.gdbinit) + + gdbinit_lines.append('set sysroot ' + self.modified_recipe.d) + gdbinit_lines.append('set substitute-path "/usr/include" "' + + os.path.join(self.modified_recipe.recipe_sysroot, 'usr', 'include') + '"') + # Disable debuginfod for now, the IDE configuration uses rootfs-dbg from the image workdir. + gdbinit_lines.append('set debuginfod enabled off') + if self.image_recipe.rootfs_dbg: + gdbinit_lines.append( + 'set solib-search-path "' + self.modified_recipe.solib_search_path_str(self.image_recipe) + '"') + # First: Search for sources of this recipe in the workspace folder + if self.modified_recipe.pn in self.modified_recipe.target_dbgsrc_dir: + gdbinit_lines.append('set substitute-path "%s" "%s"' % + (self.modified_recipe.target_dbgsrc_dir, self.modified_recipe.real_srctree)) + else: + logger.error( + "TARGET_DBGSRC_DIR must contain the recipe name PN.") + # Second: Search for sources of other recipes in the rootfs-dbg + if self.modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"): + gdbinit_lines.append('set substitute-path "/usr/src/debug" "%s"' % os.path.join( + self.image_recipe.rootfs_dbg, "usr", "src", "debug")) + else: + logger.error( + "TARGET_DBGSRC_DIR must start with /usr/src/debug.") + else: + logger.warning( + "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.") + gdbinit_lines.append( + '%s %s:%d' % (remote_cmd, self.gdb_cross.host, self.gdbserver_port)) + gdbinit_lines.append('set remote exec-file ' + self.binary) + gdbinit_lines.append( + 'run ' + os.path.join(self.modified_recipe.d, self.binary)) + + GdbCrossConfig.write_file(self.gdbinit, gdbinit_lines) + + def _gen_gdb_start_script(self): + """Generate a script starting GDB with the corresponding gdbinit configuration.""" + cmd_lines = ['#!/bin/sh'] + cmd_lines.append('cd ' + self.modified_recipe.real_srctree) + cmd_lines.append(self.gdb_cross.gdb + ' -ix ' + + self.gdbinit + ' "$@"') + GdbCrossConfig.write_file(self.gdb_script, cmd_lines, True) + + def initialize(self): + self._gen_gdbserver_start_script() + self._gen_gdbinit_config() + self._gen_gdb_start_script() + + @staticmethod + def write_file(script_file, cmd_lines, executable=False): + script_dir = os.path.dirname(script_file) + mkdirhier(script_dir) + with open(script_file, 'w') as script_f: + script_f.write(os.linesep.join(cmd_lines)) + script_f.write(os.linesep) + if executable: + st = os.stat(script_file) + os.chmod(script_file, st.st_mode | stat.S_IEXEC) + logger.info("Created: %s" % script_file) + + +class IdeBase: + """Base class defining the interface for IDE plugins""" + + def __init__(self): + self.ide_name = 'undefined' + self.gdb_cross_configs = [] + + @classmethod + def ide_plugin_priority(cls): + """Used to find the default ide handler if --ide is not passed""" + return 10 + + def setup_shared_sysroots(self, shared_env): + logger.warn("Shared sysroot mode is not supported for IDE %s" % + self.ide_name) + + def setup_modified_recipe(self, args, image_recipe, modified_recipe): + logger.warn("Modified recipe mode is not supported for IDE %s" % + self.ide_name) + + def initialize_gdb_cross_configs(self, image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfig): + binaries = modified_recipe.find_installed_binaries() + for binary in binaries: + gdb_cross_config = gdb_cross_config_class( + image_recipe, modified_recipe, binary) + gdb_cross_config.initialize() + self.gdb_cross_configs.append(gdb_cross_config) + + @staticmethod + def gen_oe_scrtips_sym_link(modified_recipe): + # create a sym-link from sources to the scripts directory + if os.path.isdir(modified_recipe.ide_sdk_scripts_dir): + IdeBase.symlink_force(modified_recipe.ide_sdk_scripts_dir, + os.path.join(modified_recipe.real_srctree, 'oe-scripts')) + + @staticmethod + def update_json_file(json_dir, json_file, update_dict): + """Update a json file + + By default it uses the dict.update function. If this is not sutiable + the update function might be passed via update_func parameter. + """ + json_path = os.path.join(json_dir, json_file) + logger.info("Updating IDE config file: %s (%s)" % + (json_file, json_path)) + if not os.path.exists(json_dir): + os.makedirs(json_dir) + try: + with open(json_path) as f: + orig_dict = json.load(f) + except json.decoder.JSONDecodeError: + logger.info( + "Decoding %s failed. Probably because of comments in the json file" % json_path) + orig_dict = {} + except FileNotFoundError: + orig_dict = {} + orig_dict.update(update_dict) + with open(json_path, 'w') as f: + json.dump(orig_dict, f, indent=4) + + @staticmethod + def symlink_force(tgt, dst): + try: + os.symlink(tgt, dst) + except OSError as err: + if err.errno == errno.EEXIST: + if os.readlink(dst) != tgt: + os.remove(dst) + os.symlink(tgt, dst) + else: + raise err + + +def get_devtool_deploy_opts(args): + """Filter args for devtool deploy-target args""" + if not args.target: + return None + devtool_deploy_opts = [args.target] + if args.no_host_check: + devtool_deploy_opts += ["-c"] + if args.show_status: + devtool_deploy_opts += ["-s"] + if args.no_preserve: + devtool_deploy_opts += ["-p"] + if args.no_check_space: + devtool_deploy_opts += ["--no-check-space"] + if args.ssh_exec: + devtool_deploy_opts += ["-e", args.ssh.exec] + if args.port: + devtool_deploy_opts += ["-P", args.port] + if args.key: + devtool_deploy_opts += ["-I", args.key] + if args.strip is False: + devtool_deploy_opts += ["--no-strip"] + return devtool_deploy_opts diff --git a/scripts/lib/devtool/ide_plugins/ide_code.py b/scripts/lib/devtool/ide_plugins/ide_code.py new file mode 100644 index 0000000000..a62b93224e --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/ide_code.py @@ -0,0 +1,463 @@ +# +# Copyright (C) 2023-2024 Siemens AG +# +# SPDX-License-Identifier: GPL-2.0-only +# +"""Devtool ide-sdk IDE plugin for VSCode and VSCodium""" + +import json +import logging +import os +import shutil +from devtool.ide_plugins import BuildTool, IdeBase, GdbCrossConfig, get_devtool_deploy_opts + +logger = logging.getLogger('devtool') + + +class GdbCrossConfigVSCode(GdbCrossConfig): + def __init__(self, image_recipe, modified_recipe, binary): + super().__init__(image_recipe, modified_recipe, binary, False) + + def initialize(self): + self._gen_gdbserver_start_script() + + +class IdeVSCode(IdeBase): + """Manage IDE configurations for VSCode + + Modified recipe mode: + - cmake: use the cmake-preset generated by devtool ide-sdk + - meson: meson is called via a wrapper script generated by devtool ide-sdk + + Shared sysroot mode: + In shared sysroot mode, the cross tool-chain is exported to the user's global configuration. + A workspace cannot be created because there is no recipe that defines how a workspace could + be set up. + - cmake: adds a cmake-kit to .local/share/CMakeTools/cmake-tools-kits.json + The cmake-kit uses the environment script and the tool-chain file + generated by meta-ide-support. + - meson: Meson needs manual workspace configuration. + """ + + @classmethod + def ide_plugin_priority(cls): + """If --ide is not passed this is the default plugin""" + if shutil.which('code'): + return 100 + return 0 + + def setup_shared_sysroots(self, shared_env): + """Expose the toolchain of the shared sysroots SDK""" + datadir = shared_env.ide_support.datadir + deploy_dir_image = shared_env.ide_support.deploy_dir_image + real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys + standalone_sysroot_native = shared_env.build_sysroots.standalone_sysroot_native + vscode_ws_path = os.path.join( + os.environ['HOME'], '.local', 'share', 'CMakeTools') + cmake_kits_path = os.path.join(vscode_ws_path, 'cmake-tools-kits.json') + oecmake_generator = "Ninja" + env_script = os.path.join( + deploy_dir_image, 'environment-setup-' + real_multimach_target_sys) + + if not os.path.isdir(vscode_ws_path): + os.makedirs(vscode_ws_path) + cmake_kits_old = [] + if os.path.exists(cmake_kits_path): + with open(cmake_kits_path, 'r', encoding='utf-8') as cmake_kits_file: + cmake_kits_old = json.load(cmake_kits_file) + cmake_kits = cmake_kits_old.copy() + + cmake_kit_new = { + "name": "OE " + real_multimach_target_sys, + "environmentSetupScript": env_script, + "toolchainFile": standalone_sysroot_native + datadir + "/cmake/OEToolchainConfig.cmake", + "preferredGenerator": { + "name": oecmake_generator + } + } + + def merge_kit(cmake_kits, cmake_kit_new): + i = 0 + while i < len(cmake_kits): + if 'environmentSetupScript' in cmake_kits[i] and \ + cmake_kits[i]['environmentSetupScript'] == cmake_kit_new['environmentSetupScript']: + cmake_kits[i] = cmake_kit_new + return + i += 1 + cmake_kits.append(cmake_kit_new) + merge_kit(cmake_kits, cmake_kit_new) + + if cmake_kits != cmake_kits_old: + logger.info("Updating: %s" % cmake_kits_path) + with open(cmake_kits_path, 'w', encoding='utf-8') as cmake_kits_file: + json.dump(cmake_kits, cmake_kits_file, indent=4) + else: + logger.info("Already up to date: %s" % cmake_kits_path) + + cmake_native = os.path.join( + shared_env.build_sysroots.standalone_sysroot_native, 'usr', 'bin', 'cmake') + if os.path.isfile(cmake_native): + logger.info('cmake-kits call cmake by default. If the cmake provided by this SDK should be used, please add the following line to ".vscode/settings.json" file: "cmake.cmakePath": "%s"' % cmake_native) + else: + logger.error("Cannot find cmake native at: %s" % cmake_native) + + def dot_code_dir(self, modified_recipe): + return os.path.join(modified_recipe.srctree, '.vscode') + + def __vscode_settings_meson(self, settings_dict, modified_recipe): + if modified_recipe.build_tool is not BuildTool.MESON: + return + settings_dict["mesonbuild.mesonPath"] = modified_recipe.meson_wrapper + + confopts = modified_recipe.mesonopts.split() + confopts += modified_recipe.meson_cross_file.split() + confopts += modified_recipe.extra_oemeson.split() + settings_dict["mesonbuild.configureOptions"] = confopts + settings_dict["mesonbuild.buildFolder"] = modified_recipe.b + + def __vscode_settings_cmake(self, settings_dict, modified_recipe): + """Add cmake specific settings to settings.json. + + Note: most settings are passed to the cmake preset. + """ + if modified_recipe.build_tool is not BuildTool.CMAKE: + return + settings_dict["cmake.configureOnOpen"] = True + settings_dict["cmake.sourceDirectory"] = modified_recipe.real_srctree + + def vscode_settings(self, modified_recipe, image_recipe): + files_excludes = { + "**/.git/**": True, + "**/oe-logs/**": True, + "**/oe-workdir/**": True, + "**/source-date-epoch/**": True + } + python_exclude = [ + "**/.git/**", + "**/oe-logs/**", + "**/oe-workdir/**", + "**/source-date-epoch/**" + ] + files_readonly = { + modified_recipe.recipe_sysroot + '/**': True, + modified_recipe.recipe_sysroot_native + '/**': True, + } + if image_recipe.rootfs_dbg is not None: + files_readonly[image_recipe.rootfs_dbg + '/**'] = True + settings_dict = { + "files.watcherExclude": files_excludes, + "files.exclude": files_excludes, + "files.readonlyInclude": files_readonly, + "python.analysis.exclude": python_exclude + } + self.__vscode_settings_cmake(settings_dict, modified_recipe) + self.__vscode_settings_meson(settings_dict, modified_recipe) + + settings_file = 'settings.json' + IdeBase.update_json_file( + self.dot_code_dir(modified_recipe), settings_file, settings_dict) + + def __vscode_extensions_cmake(self, modified_recipe, recommendations): + if modified_recipe.build_tool is not BuildTool.CMAKE: + return + recommendations += [ + "twxs.cmake", + "ms-vscode.cmake-tools", + "ms-vscode.cpptools", + "ms-vscode.cpptools-extension-pack", + "ms-vscode.cpptools-themes" + ] + + def __vscode_extensions_meson(self, modified_recipe, recommendations): + if modified_recipe.build_tool is not BuildTool.MESON: + return + recommendations += [ + 'mesonbuild.mesonbuild', + "ms-vscode.cpptools", + "ms-vscode.cpptools-extension-pack", + "ms-vscode.cpptools-themes" + ] + + def vscode_extensions(self, modified_recipe): + recommendations = [] + self.__vscode_extensions_cmake(modified_recipe, recommendations) + self.__vscode_extensions_meson(modified_recipe, recommendations) + extensions_file = 'extensions.json' + IdeBase.update_json_file( + self.dot_code_dir(modified_recipe), extensions_file, {"recommendations": recommendations}) + + def vscode_c_cpp_properties(self, modified_recipe): + properties_dict = { + "name": modified_recipe.recipe_id_pretty, + } + if modified_recipe.build_tool is BuildTool.CMAKE: + properties_dict["configurationProvider"] = "ms-vscode.cmake-tools" + elif modified_recipe.build_tool is BuildTool.MESON: + properties_dict["configurationProvider"] = "mesonbuild.mesonbuild" + properties_dict["compilerPath"] = os.path.join(modified_recipe.staging_bindir_toolchain, modified_recipe.cxx.split()[0]) + else: # no C/C++ build + return + + properties_dicts = { + "configurations": [ + properties_dict + ], + "version": 4 + } + prop_file = 'c_cpp_properties.json' + IdeBase.update_json_file( + self.dot_code_dir(modified_recipe), prop_file, properties_dicts) + + def vscode_launch_bin_dbg(self, gdb_cross_config): + modified_recipe = gdb_cross_config.modified_recipe + + launch_config = { + "name": gdb_cross_config.id_pretty, + "type": "cppdbg", + "request": "launch", + "program": os.path.join(modified_recipe.d, gdb_cross_config.binary.lstrip('/')), + "stopAtEntry": True, + "cwd": "${workspaceFolder}", + "environment": [], + "externalConsole": False, + "MIMode": "gdb", + "preLaunchTask": gdb_cross_config.id_pretty, + "miDebuggerPath": modified_recipe.gdb_cross.gdb, + "miDebuggerServerAddress": "%s:%d" % (modified_recipe.gdb_cross.host, gdb_cross_config.gdbserver_port) + } + + # Search for header files in recipe-sysroot. + src_file_map = { + "/usr/include": os.path.join(modified_recipe.recipe_sysroot, "usr", "include") + } + # First of all search for not stripped binaries in the image folder. + # These binaries are copied (and optionally stripped) by deploy-target + setup_commands = [ + { + "description": "sysroot", + "text": "set sysroot " + modified_recipe.d + } + ] + + if gdb_cross_config.image_recipe.rootfs_dbg: + launch_config['additionalSOLibSearchPath'] = modified_recipe.solib_search_path_str( + gdb_cross_config.image_recipe) + # First: Search for sources of this recipe in the workspace folder + if modified_recipe.pn in modified_recipe.target_dbgsrc_dir: + src_file_map[modified_recipe.target_dbgsrc_dir] = "${workspaceFolder}" + else: + logger.error( + "TARGET_DBGSRC_DIR must contain the recipe name PN.") + # Second: Search for sources of other recipes in the rootfs-dbg + if modified_recipe.target_dbgsrc_dir.startswith("/usr/src/debug"): + src_file_map["/usr/src/debug"] = os.path.join( + gdb_cross_config.image_recipe.rootfs_dbg, "usr", "src", "debug") + else: + logger.error( + "TARGET_DBGSRC_DIR must start with /usr/src/debug.") + else: + logger.warning( + "Cannot setup debug symbols configuration for GDB. IMAGE_GEN_DEBUGFS is not enabled.") + + launch_config['sourceFileMap'] = src_file_map + launch_config['setupCommands'] = setup_commands + return launch_config + + def vscode_launch(self, modified_recipe): + """GDB Launch configuration for binaries (elf files)""" + + configurations = [] + for gdb_cross_config in self.gdb_cross_configs: + if gdb_cross_config.modified_recipe is modified_recipe: + configurations.append(self.vscode_launch_bin_dbg(gdb_cross_config)) + launch_dict = { + "version": "0.2.0", + "configurations": configurations + } + launch_file = 'launch.json' + IdeBase.update_json_file( + self.dot_code_dir(modified_recipe), launch_file, launch_dict) + + def vscode_tasks_cpp(self, args, modified_recipe): + run_install_deploy = modified_recipe.gen_install_deploy_script(args) + install_task_name = "install && deploy-target %s" % modified_recipe.recipe_id_pretty + tasks_dict = { + "version": "2.0.0", + "tasks": [ + { + "label": install_task_name, + "type": "shell", + "command": run_install_deploy, + "problemMatcher": [] + } + ] + } + for gdb_cross_config in self.gdb_cross_configs: + if gdb_cross_config.modified_recipe is not modified_recipe: + continue + tasks_dict['tasks'].append( + { + "label": gdb_cross_config.id_pretty, + "type": "shell", + "isBackground": True, + "dependsOn": [ + install_task_name + ], + "command": gdb_cross_config.gdbserver_script, + "problemMatcher": [ + { + "pattern": [ + { + "regexp": ".", + "file": 1, + "location": 2, + "message": 3 + } + ], + "background": { + "activeOnStart": True, + "beginsPattern": ".", + "endsPattern": ".", + } + } + ] + }) + tasks_file = 'tasks.json' + IdeBase.update_json_file( + self.dot_code_dir(modified_recipe), tasks_file, tasks_dict) + + def vscode_tasks_fallback(self, args, modified_recipe): + oe_init_dir = modified_recipe.oe_init_dir + oe_init = ". %s %s > /dev/null && " % (modified_recipe.oe_init_build_env, modified_recipe.topdir) + dt_build = "devtool build " + dt_build_label = dt_build + modified_recipe.recipe_id_pretty + dt_build_cmd = dt_build + modified_recipe.bpn + clean_opt = " --clean" + dt_build_clean_label = dt_build + modified_recipe.recipe_id_pretty + clean_opt + dt_build_clean_cmd = dt_build + modified_recipe.bpn + clean_opt + dt_deploy = "devtool deploy-target " + dt_deploy_label = dt_deploy + modified_recipe.recipe_id_pretty + dt_deploy_cmd = dt_deploy + modified_recipe.bpn + dt_build_deploy_label = "devtool build & deploy-target %s" % modified_recipe.recipe_id_pretty + deploy_opts = ' '.join(get_devtool_deploy_opts(args)) + tasks_dict = { + "version": "2.0.0", + "tasks": [ + { + "label": dt_build_label, + "type": "shell", + "command": "bash", + "linux": { + "options": { + "cwd": oe_init_dir + } + }, + "args": [ + "--login", + "-c", + "%s%s" % (oe_init, dt_build_cmd) + ], + "problemMatcher": [] + }, + { + "label": dt_deploy_label, + "type": "shell", + "command": "bash", + "linux": { + "options": { + "cwd": oe_init_dir + } + }, + "args": [ + "--login", + "-c", + "%s%s %s" % ( + oe_init, dt_deploy_cmd, deploy_opts) + ], + "problemMatcher": [] + }, + { + "label": dt_build_deploy_label, + "dependsOrder": "sequence", + "dependsOn": [ + dt_build_label, + dt_deploy_label + ], + "problemMatcher": [], + "group": { + "kind": "build", + "isDefault": True + } + }, + { + "label": dt_build_clean_label, + "type": "shell", + "command": "bash", + "linux": { + "options": { + "cwd": oe_init_dir + } + }, + "args": [ + "--login", + "-c", + "%s%s" % (oe_init, dt_build_clean_cmd) + ], + "problemMatcher": [] + } + ] + } + if modified_recipe.gdb_cross: + for gdb_cross_config in self.gdb_cross_configs: + if gdb_cross_config.modified_recipe is not modified_recipe: + continue + tasks_dict['tasks'].append( + { + "label": gdb_cross_config.id_pretty, + "type": "shell", + "isBackground": True, + "dependsOn": [ + dt_build_deploy_label + ], + "command": gdb_cross_config.gdbserver_script, + "problemMatcher": [ + { + "pattern": [ + { + "regexp": ".", + "file": 1, + "location": 2, + "message": 3 + } + ], + "background": { + "activeOnStart": True, + "beginsPattern": ".", + "endsPattern": ".", + } + } + ] + }) + tasks_file = 'tasks.json' + IdeBase.update_json_file( + self.dot_code_dir(modified_recipe), tasks_file, tasks_dict) + + def vscode_tasks(self, args, modified_recipe): + if modified_recipe.build_tool.is_c_ccp: + self.vscode_tasks_cpp(args, modified_recipe) + else: + self.vscode_tasks_fallback(args, modified_recipe) + + def setup_modified_recipe(self, args, image_recipe, modified_recipe): + self.vscode_settings(modified_recipe, image_recipe) + self.vscode_extensions(modified_recipe) + self.vscode_c_cpp_properties(modified_recipe) + if args.target: + self.initialize_gdb_cross_configs( + image_recipe, modified_recipe, gdb_cross_config_class=GdbCrossConfigVSCode) + self.vscode_launch(modified_recipe) + self.vscode_tasks(args, modified_recipe) + + +def register_ide_plugin(ide_plugins): + ide_plugins['code'] = IdeVSCode diff --git a/scripts/lib/devtool/ide_plugins/ide_none.py b/scripts/lib/devtool/ide_plugins/ide_none.py new file mode 100644 index 0000000000..f106c5a026 --- /dev/null +++ b/scripts/lib/devtool/ide_plugins/ide_none.py @@ -0,0 +1,53 @@ +# +# Copyright (C) 2023-2024 Siemens AG +# +# SPDX-License-Identifier: GPL-2.0-only +# +"""Devtool ide-sdk generic IDE plugin""" + +import os +import logging +from devtool.ide_plugins import IdeBase, GdbCrossConfig + +logger = logging.getLogger('devtool') + + +class IdeNone(IdeBase): + """Generate some generic helpers for other IDEs + + Modified recipe mode: + Generate some helper scripts for remote debugging with GDB + + Shared sysroot mode: + A wrapper for bitbake meta-ide-support and bitbake build-sysroots + """ + + def __init__(self): + super().__init__() + + def setup_shared_sysroots(self, shared_env): + real_multimach_target_sys = shared_env.ide_support.real_multimach_target_sys + deploy_dir_image = shared_env.ide_support.deploy_dir_image + env_script = os.path.join( + deploy_dir_image, 'environment-setup-' + real_multimach_target_sys) + logger.info( + "To use this SDK please source this: %s" % env_script) + + def setup_modified_recipe(self, args, image_recipe, modified_recipe): + """generate some helper scripts and config files + + - Execute the do_install task + - Execute devtool deploy-target + - Generate a gdbinit file per executable + - Generate the oe-scripts sym-link + """ + script_path = modified_recipe.gen_install_deploy_script(args) + logger.info("Created: %s" % script_path) + + self.initialize_gdb_cross_configs(image_recipe, modified_recipe) + + IdeBase.gen_oe_scrtips_sym_link(modified_recipe) + + +def register_ide_plugin(ide_plugins): + ide_plugins['none'] = IdeNone diff --git a/scripts/lib/devtool/ide_sdk.py b/scripts/lib/devtool/ide_sdk.py new file mode 100755 index 0000000000..65873b088d --- /dev/null +++ b/scripts/lib/devtool/ide_sdk.py @@ -0,0 +1,1070 @@ +# Development tool - ide-sdk command plugin +# +# Copyright (C) 2023-2024 Siemens AG +# +# SPDX-License-Identifier: GPL-2.0-only +# +"""Devtool ide-sdk plugin""" + +import json +import logging +import os +import re +import shutil +import stat +import subprocess +import sys +from argparse import RawTextHelpFormatter +from enum import Enum + +import scriptutils +import bb +from devtool import exec_build_env_command, setup_tinfoil, check_workspace_recipe, DevtoolError, parse_recipe +from devtool.standard import get_real_srctree +from devtool.ide_plugins import BuildTool + + +logger = logging.getLogger('devtool') + +# dict of classes derived from IdeBase +ide_plugins = {} + + +class DevtoolIdeMode(Enum): + """Different modes are supported by the ide-sdk plugin. + + The enum might be extended by more advanced modes in the future. Some ideas: + - auto: modified if all recipes are modified, shared if none of the recipes is modified. + - mixed: modified mode for modified recipes, shared mode for all other recipes. + """ + + modified = 'modified' + shared = 'shared' + + +class TargetDevice: + """SSH remote login parameters""" + + def __init__(self, args): + self.extraoptions = '' + if args.no_host_check: + self.extraoptions += '-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no' + self.ssh_sshexec = 'ssh' + if args.ssh_exec: + self.ssh_sshexec = args.ssh_exec + self.ssh_port = '' + if args.port: + self.ssh_port = "-p %s" % args.port + if args.key: + self.extraoptions += ' -i %s' % args.key + + self.target = args.target + target_sp = args.target.split('@') + if len(target_sp) == 1: + self.login = "" + self.host = target_sp[0] + elif len(target_sp) == 2: + self.login = target_sp[0] + self.host = target_sp[1] + else: + logger.error("Invalid target argument: %s" % args.target) + + +class RecipeNative: + """Base class for calling bitbake to provide a -native recipe""" + + def __init__(self, name, target_arch=None): + self.name = name + self.target_arch = target_arch + self.bootstrap_tasks = [self.name + ':do_addto_recipe_sysroot'] + self.staging_bindir_native = None + self.target_sys = None + self.__native_bin = None + + def _initialize(self, config, workspace, tinfoil): + """Get the parsed recipe""" + recipe_d = parse_recipe( + config, tinfoil, self.name, appends=True, filter_workspace=False) + if not recipe_d: + raise DevtoolError("Parsing %s recipe failed" % self.name) + self.staging_bindir_native = os.path.realpath( + recipe_d.getVar('STAGING_BINDIR_NATIVE')) + self.target_sys = recipe_d.getVar('TARGET_SYS') + return recipe_d + + def initialize(self, config, workspace, tinfoil): + """Basic initialization that can be overridden by a derived class""" + self._initialize(config, workspace, tinfoil) + + @property + def native_bin(self): + if not self.__native_bin: + raise DevtoolError("native binary name is not defined.") + return self.__native_bin + + +class RecipeGdbCross(RecipeNative): + """Handle handle gdb-cross on the host and the gdbserver on the target device""" + + def __init__(self, args, target_arch, target_device): + super().__init__('gdb-cross-' + target_arch, target_arch) + self.target_device = target_device + self.gdb = None + self.gdbserver_port_next = int(args.gdbserver_port_start) + self.config_db = {} + + def __find_gdbserver(self, config, tinfoil): + """Absolute path of the gdbserver""" + recipe_d_gdb = parse_recipe( + config, tinfoil, 'gdb', appends=True, filter_workspace=False) + if not recipe_d_gdb: + raise DevtoolError("Parsing gdb recipe failed") + return os.path.join(recipe_d_gdb.getVar('bindir'), 'gdbserver') + + def initialize(self, config, workspace, tinfoil): + super()._initialize(config, workspace, tinfoil) + gdb_bin = self.target_sys + '-gdb' + gdb_path = os.path.join( + self.staging_bindir_native, self.target_sys, gdb_bin) + self.gdb = gdb_path + self.gdbserver_path = self.__find_gdbserver(config, tinfoil) + + @property + def host(self): + return self.target_device.host + + +class RecipeImage: + """Handle some image recipe related properties + + Most workflows require firmware that runs on the target device. + This firmware must be consistent with the setup of the host system. + In particular, the debug symbols must be compatible. For this, the + rootfs must be created as part of the SDK. + """ + + def __init__(self, name): + self.combine_dbg_image = False + self.gdbserver_missing = False + self.name = name + self.rootfs = None + self.__rootfs_dbg = None + self.bootstrap_tasks = [self.name + ':do_build'] + + def initialize(self, config, tinfoil): + image_d = parse_recipe( + config, tinfoil, self.name, appends=True, filter_workspace=False) + if not image_d: + raise DevtoolError( + "Parsing image recipe %s failed" % self.name) + + self.combine_dbg_image = bb.data.inherits_class( + 'image-combined-dbg', image_d) + + workdir = image_d.getVar('WORKDIR') + self.rootfs = os.path.join(workdir, 'rootfs') + if image_d.getVar('IMAGE_GEN_DEBUGFS') == "1": + self.__rootfs_dbg = os.path.join(workdir, 'rootfs-dbg') + + self.gdbserver_missing = 'gdbserver' not in image_d.getVar( + 'IMAGE_INSTALL') + + @property + def debug_support(self): + return bool(self.rootfs_dbg) + + @property + def rootfs_dbg(self): + if self.__rootfs_dbg and os.path.isdir(self.__rootfs_dbg): + return self.__rootfs_dbg + return None + + +class RecipeMetaIdeSupport: + """For the shared sysroots mode meta-ide-support is needed + + For use cases where just a cross tool-chain is required but + no recipe is used, devtool ide-sdk abstracts calling bitbake meta-ide-support + and bitbake build-sysroots. This also allows to expose the cross-toolchains + to IDEs. For example VSCode support different tool-chains with e.g. cmake-kits. + """ + + def __init__(self): + self.bootstrap_tasks = ['meta-ide-support:do_build'] + self.topdir = None + self.datadir = None + self.deploy_dir_image = None + self.build_sys = None + # From toolchain-scripts + self.real_multimach_target_sys = None + + def initialize(self, config, tinfoil): + meta_ide_support_d = parse_recipe( + config, tinfoil, 'meta-ide-support', appends=True, filter_workspace=False) + if not meta_ide_support_d: + raise DevtoolError("Parsing meta-ide-support recipe failed") + + self.topdir = meta_ide_support_d.getVar('TOPDIR') + self.datadir = meta_ide_support_d.getVar('datadir') + self.deploy_dir_image = meta_ide_support_d.getVar( + 'DEPLOY_DIR_IMAGE') + self.build_sys = meta_ide_support_d.getVar('BUILD_SYS') + self.real_multimach_target_sys = meta_ide_support_d.getVar( + 'REAL_MULTIMACH_TARGET_SYS') + + +class RecipeBuildSysroots: + """For the shared sysroots mode build-sysroots is needed""" + + def __init__(self): + self.standalone_sysroot = None + self.standalone_sysroot_native = None + self.bootstrap_tasks = [ + 'build-sysroots:do_build_target_sysroot', + 'build-sysroots:do_build_native_sysroot' + ] + + def initialize(self, config, tinfoil): + build_sysroots_d = parse_recipe( + config, tinfoil, 'build-sysroots', appends=True, filter_workspace=False) + if not build_sysroots_d: + raise DevtoolError("Parsing build-sysroots recipe failed") + self.standalone_sysroot = build_sysroots_d.getVar( + 'STANDALONE_SYSROOT') + self.standalone_sysroot_native = build_sysroots_d.getVar( + 'STANDALONE_SYSROOT_NATIVE') + + +class SharedSysrootsEnv: + """Handle the shared sysroots based workflow + + Support the workflow with just a tool-chain without a recipe. + It's basically like: + bitbake some-dependencies + bitbake meta-ide-support + bitbake build-sysroots + Use the environment-* file found in the deploy folder + """ + + def __init__(self): + self.ide_support = None + self.build_sysroots = None + + def initialize(self, ide_support, build_sysroots): + self.ide_support = ide_support + self.build_sysroots = build_sysroots + + def setup_ide(self, ide): + ide.setup(self) + + +class RecipeNotModified: + """Handling of recipes added to the Direct DSK shared sysroots.""" + + def __init__(self, name): + self.name = name + self.bootstrap_tasks = [name + ':do_populate_sysroot'] + + +class RecipeModified: + """Handling of recipes in the workspace created by devtool modify""" + OE_INIT_BUILD_ENV = 'oe-init-build-env' + + VALID_BASH_ENV_NAME_CHARS = re.compile(r"^[a-zA-Z0-9_]*$") + + def __init__(self, name): + self.name = name + self.bootstrap_tasks = [name + ':do_install'] + self.gdb_cross = None + # workspace + self.real_srctree = None + self.srctree = None + self.ide_sdk_dir = None + self.ide_sdk_scripts_dir = None + self.bbappend = None + # recipe variables from d.getVar + self.b = None + self.base_libdir = None + self.bblayers = None + self.bpn = None + self.d = None + self.fakerootcmd = None + self.fakerootenv = None + self.libdir = None + self.max_process = None + self.package_arch = None + self.package_debug_split_style = None + self.path = None + self.pn = None + self.recipe_sysroot = None + self.recipe_sysroot_native = None + self.staging_incdir = None + self.strip_cmd = None + self.target_arch = None + self.target_dbgsrc_dir = None + self.topdir = None + self.workdir = None + self.recipe_id = None + # replicate bitbake build environment + self.exported_vars = None + self.cmd_compile = None + self.__oe_init_dir = None + # main build tool used by this recipe + self.build_tool = BuildTool.UNDEFINED + # build_tool = cmake + self.oecmake_generator = None + self.cmake_cache_vars = None + # build_tool = meson + self.meson_buildtype = None + self.meson_wrapper = None + self.mesonopts = None + self.extra_oemeson = None + self.meson_cross_file = None + + def initialize(self, config, workspace, tinfoil): + recipe_d = parse_recipe( + config, tinfoil, self.name, appends=True, filter_workspace=False) + if not recipe_d: + raise DevtoolError("Parsing %s recipe failed" % self.name) + + # Verify this recipe is built as externalsrc setup by devtool modify + workspacepn = check_workspace_recipe( + workspace, self.name, bbclassextend=True) + self.srctree = workspace[workspacepn]['srctree'] + # Need to grab this here in case the source is within a subdirectory + self.real_srctree = get_real_srctree( + self.srctree, recipe_d.getVar('S'), recipe_d.getVar('WORKDIR')) + self.bbappend = workspace[workspacepn]['bbappend'] + + self.ide_sdk_dir = os.path.join( + config.workspace_path, 'ide-sdk', self.name) + if os.path.exists(self.ide_sdk_dir): + shutil.rmtree(self.ide_sdk_dir) + self.ide_sdk_scripts_dir = os.path.join(self.ide_sdk_dir, 'scripts') + + self.b = recipe_d.getVar('B') + self.base_libdir = recipe_d.getVar('base_libdir') + self.bblayers = recipe_d.getVar('BBLAYERS').split() + self.bpn = recipe_d.getVar('BPN') + self.cxx = recipe_d.getVar('CXX') + self.d = recipe_d.getVar('D') + self.fakerootcmd = recipe_d.getVar('FAKEROOTCMD') + self.fakerootenv = recipe_d.getVar('FAKEROOTENV') + self.libdir = recipe_d.getVar('libdir') + self.max_process = int(recipe_d.getVar( + "BB_NUMBER_THREADS") or os.cpu_count() or 1) + self.package_arch = recipe_d.getVar('PACKAGE_ARCH') + self.package_debug_split_style = recipe_d.getVar( + 'PACKAGE_DEBUG_SPLIT_STYLE') + self.path = recipe_d.getVar('PATH') + self.pn = recipe_d.getVar('PN') + self.recipe_sysroot = os.path.realpath( + recipe_d.getVar('RECIPE_SYSROOT')) + self.recipe_sysroot_native = os.path.realpath( + recipe_d.getVar('RECIPE_SYSROOT_NATIVE')) + self.staging_bindir_toolchain = os.path.realpath( + recipe_d.getVar('STAGING_BINDIR_TOOLCHAIN')) + self.staging_incdir = os.path.realpath( + recipe_d.getVar('STAGING_INCDIR')) + self.strip_cmd = recipe_d.getVar('STRIP') + self.target_arch = recipe_d.getVar('TARGET_ARCH') + self.target_dbgsrc_dir = recipe_d.getVar('TARGET_DBGSRC_DIR') + self.topdir = recipe_d.getVar('TOPDIR') + self.workdir = os.path.realpath(recipe_d.getVar('WORKDIR')) + + self.__init_exported_variables(recipe_d) + + if bb.data.inherits_class('cmake', recipe_d): + self.oecmake_generator = recipe_d.getVar('OECMAKE_GENERATOR') + self.__init_cmake_preset_cache(recipe_d) + self.build_tool = BuildTool.CMAKE + elif bb.data.inherits_class('meson', recipe_d): + self.meson_buildtype = recipe_d.getVar('MESON_BUILDTYPE') + self.mesonopts = recipe_d.getVar('MESONOPTS') + self.extra_oemeson = recipe_d.getVar('EXTRA_OEMESON') + self.meson_cross_file = recipe_d.getVar('MESON_CROSS_FILE') + self.build_tool = BuildTool.MESON + + # Recipe ID is the identifier for IDE config sections + self.recipe_id = self.bpn + "-" + self.package_arch + self.recipe_id_pretty = self.bpn + ": " + self.package_arch + + def append_to_bbappend(self, append_text): + with open(self.bbappend, 'a') as bbap: + bbap.write(append_text) + + def remove_from_bbappend(self, append_text): + with open(self.bbappend, 'r') as bbap: + text = bbap.read() + new_text = text.replace(append_text, '') + with open(self.bbappend, 'w') as bbap: + bbap.write(new_text) + + @staticmethod + def is_valid_shell_variable(var): + """Skip strange shell variables like systemd + + prevent from strange bugs because of strange variables which + are not used in this context but break various tools. + """ + if RecipeModified.VALID_BASH_ENV_NAME_CHARS.match(var): + bb.debug(1, "ignoring variable: %s" % var) + return True + return False + + def debug_build_config(self, args): + """Explicitely set for example CMAKE_BUILD_TYPE to Debug if not defined otherwise""" + if self.build_tool is BuildTool.CMAKE: + append_text = os.linesep + \ + 'OECMAKE_ARGS:append = " -DCMAKE_BUILD_TYPE:STRING=Debug"' + os.linesep + if args.debug_build_config and not 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: + self.cmake_cache_vars['CMAKE_BUILD_TYPE'] = { + "type": "STRING", + "value": "Debug", + } + self.append_to_bbappend(append_text) + elif 'CMAKE_BUILD_TYPE' in self.cmake_cache_vars: + del self.cmake_cache_vars['CMAKE_BUILD_TYPE'] + self.remove_from_bbappend(append_text) + elif self.build_tool is BuildTool.MESON: + append_text = os.linesep + 'MESON_BUILDTYPE = "debug"' + os.linesep + if args.debug_build_config and self.meson_buildtype != "debug": + self.mesonopts.replace( + '--buildtype ' + self.meson_buildtype, '--buildtype debug') + self.append_to_bbappend(append_text) + elif self.meson_buildtype == "debug": + self.mesonopts.replace( + '--buildtype debug', '--buildtype plain') + self.remove_from_bbappend(append_text) + elif args.debug_build_config: + logger.warn( + "--debug-build-config is not implemented for this build tool yet.") + + def solib_search_path(self, image): + """Search for debug symbols in the rootfs and rootfs-dbg + + The debug symbols of shared libraries which are provided by other packages + are grabbed from the -dbg packages in the rootfs-dbg. + + But most cross debugging tools like gdb, perf, and systemtap need to find + executable/library first and through it debuglink note find corresponding + symbols file. Therefore the library paths from the rootfs are added as well. + + Note: For the devtool modified recipe compiled from the IDE, the debug + symbols are taken from the unstripped binaries in the image folder. + Also, devtool deploy-target takes the files from the image folder. + debug symbols in the image folder refer to the corresponding source files + with absolute paths of the build machine. Debug symbols found in the + rootfs-dbg are relocated and contain paths which refer to the source files + installed on the target device e.g. /usr/src/... + """ + base_libdir = self.base_libdir.lstrip('/') + libdir = self.libdir.lstrip('/') + so_paths = [ + # debug symbols for package_debug_split_style: debug-with-srcpkg or .debug + os.path.join(image.rootfs_dbg, base_libdir, ".debug"), + os.path.join(image.rootfs_dbg, libdir, ".debug"), + # debug symbols for package_debug_split_style: debug-file-directory + os.path.join(image.rootfs_dbg, "usr", "lib", "debug"), + + # The binaries are required as well, the debug packages are not enough + # With image-combined-dbg.bbclass the binaries are copied into rootfs-dbg + os.path.join(image.rootfs_dbg, base_libdir), + os.path.join(image.rootfs_dbg, libdir), + # Without image-combined-dbg.bbclass the binaries are only in rootfs. + # Note: Stepping into source files located in rootfs-dbg does not + # work without image-combined-dbg.bbclass yet. + os.path.join(image.rootfs, base_libdir), + os.path.join(image.rootfs, libdir) + ] + return so_paths + + def solib_search_path_str(self, image): + """Return a : separated list of paths usable by GDB's set solib-search-path""" + return ':'.join(self.solib_search_path(image)) + + def __init_exported_variables(self, d): + """Find all variables with export flag set. + + This allows to generate IDE configurations which compile with the same + environment as bitbake does. That's at least a reasonable default behavior. + """ + exported_vars = {} + + vars = (key for key in d.keys() if not key.startswith( + "__") and not d.getVarFlag(key, "func", False)) + for var in vars: + func = d.getVarFlag(var, "func", False) + if d.getVarFlag(var, 'python', False) and func: + continue + export = d.getVarFlag(var, "export", False) + unexport = d.getVarFlag(var, "unexport", False) + if not export and not unexport and not func: + continue + if unexport: + continue + + val = d.getVar(var) + if val is None: + continue + if set(var) & set("-.{}+"): + logger.warn( + "Warning: Found invalid character in variable name %s", str(var)) + continue + varExpanded = d.expand(var) + val = str(val) + + if not RecipeModified.is_valid_shell_variable(varExpanded): + continue + + if func: + code_line = "line: {0}, file: {1}\n".format( + d.getVarFlag(var, "lineno", False), + d.getVarFlag(var, "filename", False)) + val = val.rstrip('\n') + logger.warn("Warning: exported shell function %s() is not exported (%s)" % + (varExpanded, code_line)) + continue + + if export: + exported_vars[varExpanded] = val.strip() + continue + + self.exported_vars = exported_vars + + def __init_cmake_preset_cache(self, d): + """Get the arguments passed to cmake + + Replicate the cmake configure arguments with all details to + share on build folder between bitbake and SDK. + """ + site_file = os.path.join(self.workdir, 'site-file.cmake') + if os.path.exists(site_file): + print("Warning: site-file.cmake is not supported") + + cache_vars = {} + oecmake_args = d.getVar('OECMAKE_ARGS').split() + extra_oecmake = d.getVar('EXTRA_OECMAKE').split() + for param in oecmake_args + extra_oecmake: + d_pref = "-D" + if param.startswith(d_pref): + param = param[len(d_pref):] + else: + print("Error: expected a -D") + param_s = param.split('=', 1) + param_nt = param_s[0].split(':', 1) + + def handle_undefined_variable(var): + if var.startswith('${') and var.endswith('}'): + return '' + else: + return var + # Example: FOO=ON + if len(param_nt) == 1: + cache_vars[param_s[0]] = handle_undefined_variable(param_s[1]) + # Example: FOO:PATH=/tmp + elif len(param_nt) == 2: + cache_vars[param_nt[0]] = { + "type": param_nt[1], + "value": handle_undefined_variable(param_s[1]), + } + else: + print("Error: cannot parse %s" % param) + self.cmake_cache_vars = cache_vars + + def cmake_preset(self): + """Create a preset for cmake that mimics how bitbake calls cmake""" + toolchain_file = os.path.join(self.workdir, 'toolchain.cmake') + cmake_executable = os.path.join( + self.recipe_sysroot_native, 'usr', 'bin', 'cmake') + self.cmd_compile = cmake_executable + " --build --preset " + self.recipe_id + + preset_dict_configure = { + "name": self.recipe_id, + "displayName": self.recipe_id_pretty, + "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), + "binaryDir": self.b, + "generator": self.oecmake_generator, + "toolchainFile": toolchain_file, + "cacheVariables": self.cmake_cache_vars, + "environment": self.exported_vars, + "cmakeExecutable": cmake_executable + } + + preset_dict_build = { + "name": self.recipe_id, + "displayName": self.recipe_id_pretty, + "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), + "configurePreset": self.recipe_id, + "inheritConfigureEnvironment": True + } + + preset_dict_test = { + "name": self.recipe_id, + "displayName": self.recipe_id_pretty, + "description": "Bitbake build environment for the recipe %s compiled for %s" % (self.bpn, self.package_arch), + "configurePreset": self.recipe_id, + "inheritConfigureEnvironment": True + } + + preset_dict = { + "version": 3, # cmake 3.21, backward compatible with kirkstone + "configurePresets": [preset_dict_configure], + "buildPresets": [preset_dict_build], + "testPresets": [preset_dict_test] + } + + # Finally write the json file + json_file = 'CMakeUserPresets.json' + json_path = os.path.join(self.real_srctree, json_file) + logger.info("Updating CMake preset: %s (%s)" % (json_file, json_path)) + if not os.path.exists(self.real_srctree): + os.makedirs(self.real_srctree) + try: + with open(json_path) as f: + orig_dict = json.load(f) + except json.decoder.JSONDecodeError: + logger.info( + "Decoding %s failed. Probably because of comments in the json file" % json_path) + orig_dict = {} + except FileNotFoundError: + orig_dict = {} + + # Add or update the presets for the recipe and keep other presets + for k, v in preset_dict.items(): + if isinstance(v, list): + update_preset = v[0] + preset_added = False + if k in orig_dict: + for index, orig_preset in enumerate(orig_dict[k]): + if 'name' in orig_preset: + if orig_preset['name'] == update_preset['name']: + logger.debug("Updating preset: %s" % + orig_preset['name']) + orig_dict[k][index] = update_preset + preset_added = True + break + else: + logger.debug("keeping preset: %s" % + orig_preset['name']) + else: + logger.warn("preset without a name found") + if not preset_added: + if not k in orig_dict: + orig_dict[k] = [] + orig_dict[k].append(update_preset) + logger.debug("Added preset: %s" % + update_preset['name']) + else: + orig_dict[k] = v + + with open(json_path, 'w') as f: + json.dump(orig_dict, f, indent=4) + + def gen_meson_wrapper(self): + """Generate a wrapper script to call meson with the cross environment""" + bb.utils.mkdirhier(self.ide_sdk_scripts_dir) + meson_wrapper = os.path.join(self.ide_sdk_scripts_dir, 'meson') + meson_real = os.path.join( + self.recipe_sysroot_native, 'usr', 'bin', 'meson.real') + with open(meson_wrapper, 'w') as mwrap: + mwrap.write("#!/bin/sh" + os.linesep) + for var, val in self.exported_vars.items(): + mwrap.write('export %s="%s"' % (var, val) + os.linesep) + mwrap.write("unset CC CXX CPP LD AR NM STRIP" + os.linesep) + private_temp = os.path.join(self.b, "meson-private", "tmp") + mwrap.write('mkdir -p "%s"' % private_temp + os.linesep) + mwrap.write('export TMPDIR="%s"' % private_temp + os.linesep) + mwrap.write('exec "%s" "$@"' % meson_real + os.linesep) + st = os.stat(meson_wrapper) + os.chmod(meson_wrapper, st.st_mode | stat.S_IEXEC) + self.meson_wrapper = meson_wrapper + self.cmd_compile = meson_wrapper + " compile -C " + self.b + + def which(self, executable): + bin_path = shutil.which(executable, path=self.path) + if not bin_path: + raise DevtoolError( + 'Cannot find %s. Probably the recipe %s is not built yet.' % (executable, self.bpn)) + return bin_path + + @staticmethod + def is_elf_file(file_path): + with open(file_path, "rb") as f: + data = f.read(4) + if data == b'\x7fELF': + return True + return False + + def find_installed_binaries(self): + """find all executable elf files in the image directory""" + binaries = [] + d_len = len(self.d) + re_so = re.compile(r'.*\.so[.0-9]*$') + for root, _, files in os.walk(self.d, followlinks=False): + for file in files: + if os.path.islink(file): + continue + if re_so.match(file): + continue + abs_name = os.path.join(root, file) + if os.access(abs_name, os.X_OK) and RecipeModified.is_elf_file(abs_name): + binaries.append(abs_name[d_len:]) + return sorted(binaries) + + def gen_delete_package_dirs(self): + """delete folders of package tasks + + This is a workaround for and issue with recipes having their sources + downloaded as file:// + This likely breaks pseudo like: + path mismatch [3 links]: ino 79147802 db + .../build/tmp/.../cmake-example/1.0/package/usr/src/debug/ + cmake-example/1.0-r0/oe-local-files/cpp-example-lib.cpp + .../build/workspace/sources/cmake-example/oe-local-files/cpp-example-lib.cpp + Since the files are anyway outdated lets deleted them (also from pseudo's db) to workaround this issue. + """ + cmd_lines = ['#!/bin/sh'] + + # Set up the appropriate environment + newenv = dict(os.environ) + for varvalue in self.fakerootenv.split(): + if '=' in varvalue: + splitval = varvalue.split('=', 1) + newenv[splitval[0]] = splitval[1] + + # Replicate the environment variables from bitbake + for var, val in newenv.items(): + if not RecipeModified.is_valid_shell_variable(var): + continue + cmd_lines.append('%s="%s"' % (var, val)) + cmd_lines.append('export %s' % var) + + # Delete the folders + pkg_dirs = ' '.join([os.path.join(self.workdir, d) for d in [ + "package", "packages-split", "pkgdata", "sstate-install-package", "debugsources.list", "*.spec"]]) + cmd = "%s rm -rf %s" % (self.fakerootcmd, pkg_dirs) + cmd_lines.append('%s || { "%s failed"; exit 1; }' % (cmd, cmd)) + + return self.write_script(cmd_lines, 'delete_package_dirs') + + def gen_deploy_target_script(self, args): + """Generate a script which does what devtool deploy-target does + + This script is much quicker than devtool target-deploy. Because it + does not need to start a bitbake server. All information from tinfoil + is hard-coded in the generated script. + """ + cmd_lines = ['#!%s' % str(sys.executable)] + cmd_lines.append('import sys') + cmd_lines.append('devtool_sys_path = %s' % str(sys.path)) + cmd_lines.append('devtool_sys_path.reverse()') + cmd_lines.append('for p in devtool_sys_path:') + cmd_lines.append(' if p not in sys.path:') + cmd_lines.append(' sys.path.insert(0, p)') + cmd_lines.append('from devtool.deploy import deploy_no_d') + args_filter = ['debug', 'dry_run', 'key', 'no_check_space', 'no_host_check', + 'no_preserve', 'port', 'show_status', 'ssh_exec', 'strip', 'target'] + filtered_args_dict = {key: value for key, value in vars( + args).items() if key in args_filter} + cmd_lines.append('filtered_args_dict = %s' % str(filtered_args_dict)) + cmd_lines.append('class Dict2Class(object):') + cmd_lines.append(' def __init__(self, my_dict):') + cmd_lines.append(' for key in my_dict:') + cmd_lines.append(' setattr(self, key, my_dict[key])') + cmd_lines.append('filtered_args = Dict2Class(filtered_args_dict)') + cmd_lines.append( + 'setattr(filtered_args, "recipename", "%s")' % self.bpn) + cmd_lines.append('deploy_no_d("%s", "%s", "%s", "%s", "%s", "%s", %d, "%s", "%s", filtered_args)' % + (self.d, self.workdir, self.path, self.strip_cmd, + self.libdir, self.base_libdir, self.max_process, + self.fakerootcmd, self.fakerootenv)) + return self.write_script(cmd_lines, 'deploy_target') + + def gen_install_deploy_script(self, args): + """Generate a script which does install and deploy""" + cmd_lines = ['#!/bin/bash'] + + cmd_lines.append(self.gen_delete_package_dirs()) + + # . oe-init-build-env $BUILDDIR + # Note: Sourcing scripts with arguments requires bash + cmd_lines.append('cd "%s" || { echo "cd %s failed"; exit 1; }' % ( + self.oe_init_dir, self.oe_init_dir)) + cmd_lines.append('. "%s" "%s" || { echo ". %s %s failed"; exit 1; }' % ( + self.oe_init_build_env, self.topdir, self.oe_init_build_env, self.topdir)) + + # bitbake -c install + cmd_lines.append( + 'bitbake %s -c install --force || { echo "bitbake %s -c install --force failed"; exit 1; }' % (self.bpn, self.bpn)) + + # Self contained devtool deploy-target + cmd_lines.append(self.gen_deploy_target_script(args)) + + return self.write_script(cmd_lines, 'install_and_deploy') + + def write_script(self, cmd_lines, script_name): + bb.utils.mkdirhier(self.ide_sdk_scripts_dir) + script_name_arch = script_name + '_' + self.recipe_id + script_file = os.path.join(self.ide_sdk_scripts_dir, script_name_arch) + with open(script_file, 'w') as script_f: + script_f.write(os.linesep.join(cmd_lines)) + st = os.stat(script_file) + os.chmod(script_file, st.st_mode | stat.S_IEXEC) + return script_file + + @property + def oe_init_build_env(self): + """Find the oe-init-build-env used for this setup""" + oe_init_dir = self.oe_init_dir + if oe_init_dir: + return os.path.join(oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV) + return None + + @property + def oe_init_dir(self): + """Find the directory where the oe-init-build-env is located + + Assumption: There might be a layer with higher priority than poky + which provides to oe-init-build-env in the layer's toplevel folder. + """ + if not self.__oe_init_dir: + for layer in reversed(self.bblayers): + result = subprocess.run( + ['git', 'rev-parse', '--show-toplevel'], cwd=layer, capture_output=True) + if result.returncode == 0: + oe_init_dir = result.stdout.decode('utf-8').strip() + oe_init_path = os.path.join( + oe_init_dir, RecipeModified.OE_INIT_BUILD_ENV) + if os.path.exists(oe_init_path): + logger.debug("Using %s from: %s" % ( + RecipeModified.OE_INIT_BUILD_ENV, oe_init_path)) + self.__oe_init_dir = oe_init_dir + break + if not self.__oe_init_dir: + logger.error("Cannot find the bitbake top level folder") + return self.__oe_init_dir + + +def ide_setup(args, config, basepath, workspace): + """Generate the IDE configuration for the workspace""" + + # Explicitely passing some special recipes does not make sense + for recipe in args.recipenames: + if recipe in ['meta-ide-support', 'build-sysroots']: + raise DevtoolError("Invalid recipe: %s." % recipe) + + # Collect information about tasks which need to be bitbaked + bootstrap_tasks = [] + bootstrap_tasks_late = [] + tinfoil = setup_tinfoil(config_only=False, basepath=basepath) + try: + # define mode depending on recipes which need to be processed + recipes_image_names = [] + recipes_modified_names = [] + recipes_other_names = [] + for recipe in args.recipenames: + try: + check_workspace_recipe( + workspace, recipe, bbclassextend=True) + recipes_modified_names.append(recipe) + except DevtoolError: + recipe_d = parse_recipe( + config, tinfoil, recipe, appends=True, filter_workspace=False) + if not recipe_d: + raise DevtoolError("Parsing recipe %s failed" % recipe) + if bb.data.inherits_class('image', recipe_d): + recipes_image_names.append(recipe) + else: + recipes_other_names.append(recipe) + + invalid_params = False + if args.mode == DevtoolIdeMode.shared: + if len(recipes_modified_names): + logger.error("In shared sysroots mode modified recipes %s cannot be handled." % str( + recipes_modified_names)) + invalid_params = True + if args.mode == DevtoolIdeMode.modified: + if len(recipes_other_names): + logger.error("Only in shared sysroots mode not modified recipes %s can be handled." % str( + recipes_other_names)) + invalid_params = True + if len(recipes_image_names) != 1: + logger.error( + "One image recipe is required as the rootfs for the remote development.") + invalid_params = True + for modified_recipe_name in recipes_modified_names: + if modified_recipe_name.startswith('nativesdk-') or modified_recipe_name.endswith('-native'): + logger.error( + "Only cross compiled recipes are support. %s is not cross." % modified_recipe_name) + invalid_params = True + + if invalid_params: + raise DevtoolError("Invalid parameters are passed.") + + # For the shared sysroots mode, add all dependencies of all the images to the sysroots + # For the modified mode provide one rootfs and the corresponding debug symbols via rootfs-dbg + recipes_images = [] + for recipes_image_name in recipes_image_names: + logger.info("Using image: %s" % recipes_image_name) + recipe_image = RecipeImage(recipes_image_name) + recipe_image.initialize(config, tinfoil) + bootstrap_tasks += recipe_image.bootstrap_tasks + recipes_images.append(recipe_image) + + # Provide a Direct SDK with shared sysroots + recipes_not_modified = [] + if args.mode == DevtoolIdeMode.shared: + ide_support = RecipeMetaIdeSupport() + ide_support.initialize(config, tinfoil) + bootstrap_tasks += ide_support.bootstrap_tasks + + logger.info("Adding %s to the Direct SDK sysroots." % + str(recipes_other_names)) + for recipe_name in recipes_other_names: + recipe_not_modified = RecipeNotModified(recipe_name) + bootstrap_tasks += recipe_not_modified.bootstrap_tasks + recipes_not_modified.append(recipe_not_modified) + + build_sysroots = RecipeBuildSysroots() + build_sysroots.initialize(config, tinfoil) + bootstrap_tasks_late += build_sysroots.bootstrap_tasks + shared_env = SharedSysrootsEnv() + shared_env.initialize(ide_support, build_sysroots) + + recipes_modified = [] + if args.mode == DevtoolIdeMode.modified: + logger.info("Setting up workspaces for modified recipe: %s" % + str(recipes_modified_names)) + gdbs_cross = {} + for recipe_name in recipes_modified_names: + recipe_modified = RecipeModified(recipe_name) + recipe_modified.initialize(config, workspace, tinfoil) + bootstrap_tasks += recipe_modified.bootstrap_tasks + recipes_modified.append(recipe_modified) + + if recipe_modified.target_arch not in gdbs_cross: + target_device = TargetDevice(args) + gdb_cross = RecipeGdbCross( + args, recipe_modified.target_arch, target_device) + gdb_cross.initialize(config, workspace, tinfoil) + bootstrap_tasks += gdb_cross.bootstrap_tasks + gdbs_cross[recipe_modified.target_arch] = gdb_cross + recipe_modified.gdb_cross = gdbs_cross[recipe_modified.target_arch] + + finally: + tinfoil.shutdown() + + if not args.skip_bitbake: + bb_cmd = 'bitbake ' + if args.bitbake_k: + bb_cmd += "-k " + bb_cmd_early = bb_cmd + ' '.join(bootstrap_tasks) + exec_build_env_command( + config.init_path, basepath, bb_cmd_early, watch=True) + if bootstrap_tasks_late: + bb_cmd_late = bb_cmd + ' '.join(bootstrap_tasks_late) + exec_build_env_command( + config.init_path, basepath, bb_cmd_late, watch=True) + + for recipe_image in recipes_images: + if (recipe_image.gdbserver_missing): + logger.warning( + "gdbserver not installed in image %s. Remote debugging will not be available" % recipe_image) + + if recipe_image.combine_dbg_image is False: + logger.warning( + 'IMAGE_CLASSES += "image-combined-dbg" is missing for image %s. Remote debugging will not find debug symbols from rootfs-dbg.' % recipe_image) + + # Instantiate the active IDE plugin + ide = ide_plugins[args.ide]() + if args.mode == DevtoolIdeMode.shared: + ide.setup_shared_sysroots(shared_env) + elif args.mode == DevtoolIdeMode.modified: + for recipe_modified in recipes_modified: + if recipe_modified.build_tool is BuildTool.CMAKE: + recipe_modified.cmake_preset() + if recipe_modified.build_tool is BuildTool.MESON: + recipe_modified.gen_meson_wrapper() + ide.setup_modified_recipe( + args, recipe_image, recipe_modified) + else: + raise DevtoolError("Must not end up here.") + + +def register_commands(subparsers, context): + """Register devtool subcommands from this plugin""" + + global ide_plugins + + # Search for IDE plugins in all sub-folders named ide_plugins where devtool seraches for plugins. + pluginpaths = [os.path.join(path, 'ide_plugins') + for path in context.pluginpaths] + ide_plugin_modules = [] + for pluginpath in pluginpaths: + scriptutils.load_plugins(logger, ide_plugin_modules, pluginpath) + + for ide_plugin_module in ide_plugin_modules: + if hasattr(ide_plugin_module, 'register_ide_plugin'): + ide_plugin_module.register_ide_plugin(ide_plugins) + # Sort plugins according to their priority. The first entry is the default IDE plugin. + ide_plugins = dict(sorted(ide_plugins.items(), + key=lambda p: p[1].ide_plugin_priority(), reverse=True)) + + parser_ide_sdk = subparsers.add_parser('ide-sdk', group='working', order=50, formatter_class=RawTextHelpFormatter, + help='Setup the SDK and configure the IDE') + parser_ide_sdk.add_argument( + 'recipenames', nargs='+', help='Generate an IDE configuration suitable to work on the given recipes.\n' + 'Depending on the --mode paramter different types of SDKs and IDE configurations are generated.') + parser_ide_sdk.add_argument( + '-m', '--mode', type=DevtoolIdeMode, default=DevtoolIdeMode.modified, + help='Different SDK types are supported:\n' + '- "' + DevtoolIdeMode.modified.name + '" (default):\n' + ' devtool modify creates a workspace to work on the source code of a recipe.\n' + ' devtool ide-sdk builds the SDK and generates the IDE configuration(s) in the workspace directorie(s)\n' + ' Usage example:\n' + ' devtool modify cmake-example\n' + ' devtool ide-sdk cmake-example core-image-minimal\n' + ' Start the IDE in the workspace folder\n' + ' At least one devtool modified recipe plus one image recipe are required:\n' + ' The image recipe is used to generate the target image and the remote debug configuration.\n' + '- "' + DevtoolIdeMode.shared.name + '":\n' + ' Usage example:\n' + ' devtool ide-sdk -m ' + DevtoolIdeMode.shared.name + ' recipe(s)\n' + ' This command generates a cross-toolchain as well as the corresponding shared sysroot directories.\n' + ' To use this tool-chain the environment-* file found in the deploy..image folder needs to be sourced into a shell.\n' + ' In case of VSCode and cmake the tool-chain is also exposed as a cmake-kit') + default_ide = list(ide_plugins.keys())[0] + parser_ide_sdk.add_argument( + '-i', '--ide', choices=ide_plugins.keys(), default=default_ide, + help='Setup the configuration for this IDE (default: %s)' % default_ide) + parser_ide_sdk.add_argument( + '-t', '--target', default='root@192.168.7.2', + help='Live target machine running an ssh server: user@hostname.') + parser_ide_sdk.add_argument( + '-G', '--gdbserver-port-start', default="1234", help='port where gdbserver is listening.') + parser_ide_sdk.add_argument( + '-c', '--no-host-check', help='Disable ssh host key checking', action='store_true') + parser_ide_sdk.add_argument( + '-e', '--ssh-exec', help='Executable to use in place of ssh') + parser_ide_sdk.add_argument( + '-P', '--port', help='Specify ssh port to use for connection to the target') + parser_ide_sdk.add_argument( + '-I', '--key', help='Specify ssh private key for connection to the target') + parser_ide_sdk.add_argument( + '--skip-bitbake', help='Generate IDE configuration but skip calling bitbake to update the SDK', action='store_true') + parser_ide_sdk.add_argument( + '-k', '--bitbake-k', help='Pass -k parameter to bitbake', action='store_true') + parser_ide_sdk.add_argument( + '--no-strip', help='Do not strip executables prior to deploy', dest='strip', action='store_false') + parser_ide_sdk.add_argument( + '-n', '--dry-run', help='List files to be undeployed only', action='store_true') + parser_ide_sdk.add_argument( + '-s', '--show-status', help='Show progress/status output', action='store_true') + parser_ide_sdk.add_argument( + '-p', '--no-preserve', help='Do not preserve existing files', action='store_true') + parser_ide_sdk.add_argument( + '--no-check-space', help='Do not check for available space before deploying', action='store_true') + parser_ide_sdk.add_argument( + '--debug-build-config', help='Use debug build flags, for example set CMAKE_BUILD_TYPE=Debug', action='store_true') + parser_ide_sdk.set_defaults(func=ide_setup) diff --git a/scripts/lib/devtool/menuconfig.py b/scripts/lib/devtool/menuconfig.py index 95384c5333..18daef30c3 100644 --- a/scripts/lib/devtool/menuconfig.py +++ b/scripts/lib/devtool/menuconfig.py @@ -3,6 +3,8 @@ # Copyright (C) 2018 Xilinx # Written by: Chandana Kalluri <ckalluri@xilinx.com> # +# SPDX-License-Identifier: MIT +# # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. @@ -43,7 +45,7 @@ def menuconfig(args, config, basepath, workspace): return 1 check_workspace_recipe(workspace, args.component) - pn = rd.getVar('PN', True) + pn = rd.getVar('PN') if not rd.getVarFlag('do_menuconfig','task'): raise DevtoolError("This recipe does not support menuconfig option") diff --git a/scripts/lib/devtool/sdk.py b/scripts/lib/devtool/sdk.py index 3aa42a1466..9aefd7e354 100644 --- a/scripts/lib/devtool/sdk.py +++ b/scripts/lib/devtool/sdk.py @@ -207,7 +207,7 @@ def sdk_update(args, config, basepath, workspace): if not sstate_mirrors: with open(os.path.join(conf_dir, 'site.conf'), 'a') as f: f.write('SCONF_VERSION = "%s"\n' % site_conf_version) - f.write('SSTATE_MIRRORS_append = " file://.* %s/sstate-cache/PATH \\n "\n' % updateserver) + f.write('SSTATE_MIRRORS:append = " file://.* %s/sstate-cache/PATH"\n' % updateserver) finally: shutil.rmtree(tmpsdk_dir) @@ -300,7 +300,8 @@ def sdk_install(args, config, basepath, workspace): return 2 try: - exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots', watch=True) + exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_native_sysroot', watch=True) + exec_build_env_command(config.init_path, basepath, 'bitbake build-sysroots -c build_target_sysroot', watch=True) except bb.process.ExecutionError as e: raise DevtoolError('Failed to bitbake build-sysroots:\n%s' % (str(e))) diff --git a/scripts/lib/devtool/search.py b/scripts/lib/devtool/search.py index d24040df37..70b81cac5e 100644 --- a/scripts/lib/devtool/search.py +++ b/scripts/lib/devtool/search.py @@ -62,10 +62,11 @@ def search(args, config, basepath, workspace): with open(os.path.join(pkgdata_dir, 'runtime', pkg), 'r') as f: for line in f: if ': ' in line: - splitline = line.split(':', 1) + splitline = line.split(': ', 1) key = splitline[0] value = splitline[1].strip() - if key in ['PKG_%s' % pkg, 'DESCRIPTION', 'FILES_INFO'] or key.startswith('FILERPROVIDES_'): + key = key.replace(":" + pkg, "") + if key in ['PKG', 'DESCRIPTION', 'FILES_INFO', 'FILERPROVIDES']: if keyword_rc.search(value): match = True break diff --git a/scripts/lib/devtool/standard.py b/scripts/lib/devtool/standard.py index 5eba2191d9..b2e1a6ca3a 100644 --- a/scripts/lib/devtool/standard.py +++ b/scripts/lib/devtool/standard.py @@ -147,6 +147,8 @@ def add(args, config, basepath, workspace): extracmdopts += ' -a' if args.npm_dev: extracmdopts += ' --npm-dev' + if args.no_pypi: + extracmdopts += ' --no-pypi' if args.mirrors: extracmdopts += ' --mirrors' if args.srcrev: @@ -234,10 +236,14 @@ def add(args, config, basepath, workspace): if args.fetchuri and not args.no_git: setup_git_repo(srctree, args.version, 'devtool', d=tinfoil.config_data) - initial_rev = None + initial_rev = {} if os.path.exists(os.path.join(srctree, '.git')): (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) - initial_rev = stdout.rstrip() + initial_rev["."] = stdout.rstrip() + (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse HEAD` $PWD\'', cwd=srctree) + for line in stdout.splitlines(): + (rev, submodule) = line.split() + initial_rev[os.path.relpath(submodule, srctree)] = rev if args.src_subdir: srctree = os.path.join(srctree, args.src_subdir) @@ -251,16 +257,17 @@ def add(args, config, basepath, workspace): if b_is_s: f.write('EXTERNALSRC_BUILD = "%s"\n' % srctree) if initial_rev: - f.write('\n# initial_rev: %s\n' % initial_rev) + for key, value in initial_rev.items(): + f.write('\n# initial_rev %s: %s\n' % (key, value)) if args.binary: - f.write('do_install_append() {\n') + f.write('do_install:append() {\n') f.write(' rm -rf ${D}/.git\n') f.write(' rm -f ${D}/singletask.lock\n') f.write('}\n') if bb.data.inherits_class('npm', rd): - f.write('python do_configure_append() {\n') + f.write('python do_configure:append() {\n') f.write(' pkgdir = d.getVar("NPM_PACKAGE")\n') f.write(' lockfile = os.path.join(pkgdir, "singletask.lock")\n') f.write(' bb.utils.remove(lockfile)\n') @@ -318,10 +325,6 @@ def _check_compatible_recipe(pn, d): raise DevtoolError("The %s recipe is a packagegroup, and therefore is " "not supported by this tool" % pn, 4) - if bb.data.inherits_class('meta', d): - raise DevtoolError("The %s recipe is a meta-recipe, and therefore is " - "not supported by this tool" % pn, 4) - if bb.data.inherits_class('externalsrc', d) and d.getVar('EXTERNALSRC'): # Not an incompatibility error per se, so we don't pass the error code raise DevtoolError("externalsrc is currently enabled for the %s " @@ -357,7 +360,7 @@ def _move_file(src, dst, dry_run_outdir=None, base_outdir=None): bb.utils.mkdirhier(dst_d) shutil.move(src, dst) -def _copy_file(src, dst, dry_run_outdir=None): +def _copy_file(src, dst, dry_run_outdir=None, base_outdir=None): """Copy a file. Creates all the directory components of destination path.""" dry_run_suffix = ' (dry-run)' if dry_run_outdir else '' logger.debug('Copying %s to %s%s' % (src, dst, dry_run_suffix)) @@ -384,6 +387,19 @@ def _git_ls_tree(repodir, treeish='HEAD', recursive=False): ret[split[3]] = split[0:3] return ret +def _git_modified(repodir): + """List the difference between HEAD and the index""" + import bb + cmd = ['git', 'status', '--porcelain'] + out, _ = bb.process.run(cmd, cwd=repodir) + ret = [] + if out: + for line in out.split("\n"): + if line and not line.startswith('??'): + ret.append(line[3:]) + return ret + + def _git_exclude_path(srctree, path): """Return pathspec (list of paths) that excludes certain path""" # NOTE: "Filtering out" files/paths in this way is not entirely reliable - @@ -457,36 +473,6 @@ def sync(args, config, basepath, workspace): finally: tinfoil.shutdown() -def symlink_oelocal_files_srctree(rd,srctree): - import oe.patch - if os.path.abspath(rd.getVar('S')) == os.path.abspath(rd.getVar('WORKDIR')): - # If recipe extracts to ${WORKDIR}, symlink the files into the srctree - # (otherwise the recipe won't build as expected) - local_files_dir = os.path.join(srctree, 'oe-local-files') - addfiles = [] - for root, _, files in os.walk(local_files_dir): - relpth = os.path.relpath(root, local_files_dir) - if relpth != '.': - bb.utils.mkdirhier(os.path.join(srctree, relpth)) - for fn in files: - if fn == '.gitignore': - continue - destpth = os.path.join(srctree, relpth, fn) - if os.path.exists(destpth): - os.unlink(destpth) - if relpth != '.': - back_relpth = os.path.relpath(local_files_dir, root) - os.symlink('%s/oe-local-files/%s/%s' % (back_relpth, relpth, fn), destpth) - else: - os.symlink('oe-local-files/%s' % fn, destpth) - addfiles.append(os.path.join(relpth, fn)) - if addfiles: - bb.process.run('git add %s' % ' '.join(addfiles), cwd=srctree) - useroptions = [] - oe.patch.GitApplyTree.gitCommandUserOptions(useroptions, d=rd) - bb.process.run('git %s commit -m "Committing local file symlinks\n\n%s"' % (' '.join(useroptions), oe.patch.GitApplyTree.ignore_commit_prefix), cwd=srctree) - - def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, workspace, fixed_setup, d, tinfoil, no_overrides=False): """Extract sources of a recipe""" import oe.recipeutils @@ -523,8 +509,10 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works history = d.varhistory.variable('SRC_URI') for event in history: if not 'flag' in event: - if event['op'].startswith(('_append[', '_prepend[')): - extra_overrides.append(event['op'].split('[')[1].split(']')[0]) + if event['op'].startswith((':append[', ':prepend[')): + override = event['op'].split('[')[1].split(']')[0] + if not override.startswith('pn-'): + extra_overrides.append(override) # We want to remove duplicate overrides. If a recipe had multiple # SRC_URI_override += values it would cause mulitple instances of # overrides. This doesn't play nicely with things like creating a @@ -569,6 +557,9 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works logger.debug('writing append file %s' % appendfile) with open(appendfile, 'a') as f: f.write('###--- _extract_source\n') + f.write('deltask do_recipe_qa\n') + f.write('deltask do_recipe_qa_setscene\n') + f.write('ERROR_QA:remove = "patch-fuzz"\n') f.write('DEVTOOL_TEMPDIR = "%s"\n' % tempdir) f.write('DEVTOOL_DEVBRANCH = "%s"\n' % devbranch) if not is_kernel_yocto: @@ -586,6 +577,7 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works preservestampfile = os.path.join(sstate_manifests, 'preserve-stamps') with open(preservestampfile, 'w') as f: f.write(d.getVar('STAMP')) + tinfoil.modified_files() try: if is_kernel_yocto: # We need to generate the kernel config @@ -648,39 +640,26 @@ def _extract_source(srctree, keep_temp, devbranch, sync, config, basepath, works if os.path.exists(workshareddir) and (not os.listdir(workshareddir) or kernelVersion != staging_kerVer): shutil.rmtree(workshareddir) - oe.path.copyhardlinktree(srcsubdir,workshareddir) + oe.path.copyhardlinktree(srcsubdir, workshareddir) elif not os.path.exists(workshareddir): - oe.path.copyhardlinktree(srcsubdir,workshareddir) - - tempdir_localdir = os.path.join(tempdir, 'oe-local-files') - srctree_localdir = os.path.join(srctree, 'oe-local-files') + oe.path.copyhardlinktree(srcsubdir, workshareddir) if sync: - bb.process.run('git fetch file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) - - # Move oe-local-files directory to srctree - # As the oe-local-files is not part of the constructed git tree, - # remove them directly during the synchrounizating might surprise - # the users. Instead, we move it to oe-local-files.bak and remind - # user in the log message. - if os.path.exists(srctree_localdir + '.bak'): - shutil.rmtree(srctree_localdir, srctree_localdir + '.bak') - - if os.path.exists(srctree_localdir): - logger.info('Backing up current local file directory %s' % srctree_localdir) - shutil.move(srctree_localdir, srctree_localdir + '.bak') - - if os.path.exists(tempdir_localdir): - logger.info('Syncing local source files to srctree...') - shutil.copytree(tempdir_localdir, srctree_localdir) - else: - # Move oe-local-files directory to srctree - if os.path.exists(tempdir_localdir): - logger.info('Adding local source files to srctree...') - shutil.move(tempdir_localdir, srcsubdir) + try: + logger.info('Backing up current %s branch as branch: %s.bak' % (devbranch, devbranch)) + bb.process.run('git branch -f ' + devbranch + '.bak', cwd=srctree) + + # Use git fetch to update the source with the current recipe + # To be able to update the currently checked out branch with + # possibly new history (no fast-forward) git needs to be told + # that's ok + logger.info('Syncing source files including patches to git branch: %s' % devbranch) + bb.process.run('git fetch --update-head-ok --force file://' + srcsubdir + ' ' + devbranch + ':' + devbranch, cwd=srctree) + except bb.process.ExecutionError as e: + raise DevtoolError("Error when syncing source files to local checkout: %s" % str(e)) + else: shutil.move(srcsubdir, srctree) - symlink_oelocal_files_srctree(d,srctree) if is_kernel_yocto: logger.info('Copying kernel config to srctree') @@ -753,7 +732,7 @@ def get_staging_kver(srcdir): kerver = [] staging_kerVer="" if os.path.exists(srcdir) and os.listdir(srcdir): - with open(os.path.join(srcdir,"Makefile")) as f: + with open(os.path.join(srcdir, "Makefile")) as f: version = [next(f) for x in range(5)][1:4] for word in version: kerver.append(word.split('= ')[1].split('\n')[0]) @@ -763,10 +742,20 @@ def get_staging_kver(srcdir): def get_staging_kbranch(srcdir): staging_kbranch = "" if os.path.exists(srcdir) and os.listdir(srcdir): - (branch, _) = bb.process.run('git branch | grep \* | cut -d \' \' -f2', cwd=srcdir) + (branch, _) = bb.process.run('git branch | grep \\* | cut -d \' \' -f2', cwd=srcdir) staging_kbranch = "".join(branch.split('\n')[0]) return staging_kbranch +def get_real_srctree(srctree, s, workdir): + # Check that recipe isn't using a shared workdir + s = os.path.abspath(s) + workdir = os.path.abspath(workdir) + if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: + # Handle if S is set to a subdirectory of the source + srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] + srctree = os.path.join(srctree, srcsubdir) + return srctree + def modify(args, config, basepath, workspace): """Entry point for the devtool 'modify' subcommand""" import bb @@ -811,8 +800,8 @@ def modify(args, config, basepath, workspace): _check_compatible_recipe(pn, rd) - initial_rev = None - commits = [] + initial_revs = {} + commits = {} check_commits = False if bb.data.inherits_class('kernel-yocto', rd): @@ -824,36 +813,24 @@ def modify(args, config, basepath, workspace): staging_kerVer = get_staging_kver(srcdir) staging_kbranch = get_staging_kbranch(srcdir) if (os.path.exists(srcdir) and os.listdir(srcdir)) and (kernelVersion in staging_kerVer and staging_kbranch == kbranch): - oe.path.copyhardlinktree(srcdir,srctree) + oe.path.copyhardlinktree(srcdir, srctree) workdir = rd.getVar('WORKDIR') + unpackdir = rd.getVar('UNPACKDIR') srcsubdir = rd.getVar('S') - localfilesdir = os.path.join(srctree,'oe-local-files') - # Move local source files into separate subdir - recipe_patches = [os.path.basename(patch) for patch in oe.recipeutils.get_recipe_patches(rd)] - local_files = oe.recipeutils.get_recipe_local_files(rd) + localfilesdir = os.path.join(srctree, 'oe-local-files') - for key in local_files.copy(): - if key.endswith('scc'): - sccfile = open(local_files[key], 'r') - for l in sccfile: - line = l.split() - if line and line[0] in ('kconf', 'patch'): - cfg = os.path.join(os.path.dirname(local_files[key]), line[-1]) - if not cfg in local_files.values(): - local_files[line[-1]] = cfg - shutil.copy2(cfg, workdir) - sccfile.close() - - # Ignore local files with subdir={BP} + # Add locally copied files to gitignore as we add back to the metadata directly + local_files = oe.recipeutils.get_recipe_local_files(rd) srcabspath = os.path.abspath(srcsubdir) - local_files = [fname for fname in local_files if os.path.exists(os.path.join(workdir, fname)) and (srcabspath == workdir or not os.path.join(workdir, fname).startswith(srcabspath + os.sep))] + local_files = [fname for fname in local_files if + os.path.exists(os.path.join(unpackdir, fname)) and + srcabspath == unpackdir] if local_files: - for fname in local_files: - _move_file(os.path.join(workdir, fname), os.path.join(srctree, 'oe-local-files', fname)) - with open(os.path.join(srctree, 'oe-local-files', '.gitignore'), 'w') as f: - f.write('# Ignore local files, by default. Remove this file ''if you want to commit the directory to Git\n*\n') - - symlink_oelocal_files_srctree(rd,srctree) + with open(os.path.join(srctree, '.gitignore'), 'a+') as f: + f.write('# Ignore local files, by default. Remove following lines' + 'if you want to commit the directory to Git\n') + for fname in local_files: + f.write('%s\n' % fname) task = 'do_configure' res = tinfoil.build_targets(pn, task, handle_events=True) @@ -861,22 +838,33 @@ def modify(args, config, basepath, workspace): # Copy .config to workspace kconfpath = rd.getVar('B') logger.info('Copying kernel config to workspace') - shutil.copy2(os.path.join(kconfpath, '.config'),srctree) + shutil.copy2(os.path.join(kconfpath, '.config'), srctree) # Set this to true, we still need to get initial_rev # by parsing the git repo args.no_extract = True if not args.no_extract: - initial_rev, _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) - if not initial_rev: + initial_revs["."], _ = _extract_source(srctree, args.keep_temp, args.branch, False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) + if not initial_revs["."]: return 1 logger.info('Source tree extracted to %s' % srctree) + if os.path.exists(os.path.join(srctree, '.git')): # Get list of commits since this revision - (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_rev, cwd=srctree) - commits = stdout.split() + (stdout, _) = bb.process.run('git rev-list --reverse %s..HEAD' % initial_revs["."], cwd=srctree) + commits["."] = stdout.split() check_commits = True + try: + (stdout, _) = bb.process.run('git submodule --quiet foreach --recursive \'echo `git rev-parse devtool-base` $PWD\'', cwd=srctree) + except bb.process.ExecutionError: + stdout = "" + for line in stdout.splitlines(): + (rev, submodule_path) = line.split() + submodule = os.path.relpath(submodule_path, srctree) + initial_revs[submodule] = rev + (stdout, _) = bb.process.run('git rev-list --reverse devtool-base..HEAD', cwd=submodule_path) + commits[submodule] = stdout.split() else: if os.path.exists(os.path.join(srctree, '.git')): # Check if it's a tree previously extracted by us. This is done @@ -893,11 +881,11 @@ def modify(args, config, basepath, workspace): for line in stdout.splitlines(): if line.startswith('*'): (stdout, _) = bb.process.run('git rev-parse devtool-base', cwd=srctree) - initial_rev = stdout.rstrip() - if not initial_rev: + initial_revs["."] = stdout.rstrip() + if "." not in initial_revs: # Otherwise, just grab the head revision (stdout, _) = bb.process.run('git rev-parse HEAD', cwd=srctree) - initial_rev = stdout.rstrip() + initial_revs["."] = stdout.rstrip() branch_patches = {} if check_commits: @@ -914,67 +902,74 @@ def modify(args, config, basepath, workspace): seen_patches = [] for branch in branches: branch_patches[branch] = [] - (stdout, _) = bb.process.run('git log devtool-base..%s' % branch, cwd=srctree) - for line in stdout.splitlines(): - line = line.strip() - if line.startswith(oe.patch.GitApplyTree.patch_line_prefix): - origpatch = line[len(oe.patch.GitApplyTree.patch_line_prefix):].split(':', 1)[-1].strip() - if not origpatch in seen_patches: - seen_patches.append(origpatch) - branch_patches[branch].append(origpatch) + (stdout, _) = bb.process.run('git rev-list devtool-base..%s' % branch, cwd=srctree) + for sha1 in stdout.splitlines(): + notes = oe.patch.GitApplyTree.getNotes(srctree, sha1.strip()) + origpatch = notes.get(oe.patch.GitApplyTree.original_patch) + if origpatch and origpatch not in seen_patches: + seen_patches.append(origpatch) + branch_patches[branch].append(origpatch) # Need to grab this here in case the source is within a subdirectory srctreebase = srctree - - # Check that recipe isn't using a shared workdir - s = os.path.abspath(rd.getVar('S')) - workdir = os.path.abspath(rd.getVar('WORKDIR')) - if s.startswith(workdir) and s != workdir and os.path.dirname(s) != workdir: - # Handle if S is set to a subdirectory of the source - srcsubdir = os.path.relpath(s, workdir).split(os.sep, 1)[1] - srctree = os.path.join(srctree, srcsubdir) + srctree = get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) bb.utils.mkdirhier(os.path.dirname(appendfile)) with open(appendfile, 'w') as f: - f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n') + # if not present, add type=git-dependency to the secondary sources + # (non local files) so they can be extracted correctly when building a recipe after + # doing a devtool modify on it + src_uri = rd.getVar('SRC_URI').split() + src_uri_append = [] + src_uri_remove = [] + + # Assume first entry is main source extracted in ${S} so skip it + src_uri = src_uri[1::] + + # Add "type=git-dependency" to all non local sources + for url in src_uri: + if not url.startswith('file://') and not 'type=' in url: + src_uri_remove.append(url) + src_uri_append.append('%s;type=git-dependency' % url) + + if src_uri_remove: + f.write('SRC_URI:remove = "%s"\n' % ' '.join(src_uri_remove)) + f.write('SRC_URI:append = " %s"\n\n' % ' '.join(src_uri_append)) + + f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n') # Local files can be modified/tracked in separate subdir under srctree # Mostly useful for packages with S != WORKDIR - f.write('FILESPATH_prepend := "%s:"\n' % + f.write('FILESPATH:prepend := "%s:"\n' % os.path.join(srctreebase, 'oe-local-files')) f.write('# srctreebase: %s\n' % srctreebase) f.write('\ninherit externalsrc\n') f.write('# NOTE: We use pn- overrides here to avoid affecting multiple variants in the case where the recipe uses BBCLASSEXTEND\n') - f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree)) + f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree)) b_is_s = use_external_build(args.same_dir, args.no_same_dir, rd) if b_is_s: - f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree)) + f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) if bb.data.inherits_class('kernel', rd): - f.write('SRCTREECOVEREDTASKS = "do_validate_branches do_kernel_checkout ' - 'do_fetch do_unpack do_kernel_configcheck"\n') - f.write('\ndo_patch[noexec] = "1"\n') - f.write('\ndo_configure_append() {\n' - ' cp ${B}/.config ${S}/.config.baseline\n' - ' ln -sfT ${B}/.config ${S}/.config.new\n' - '}\n') - f.write('\ndo_kernel_configme_prepend() {\n' + f.write('\ndo_kernel_configme:prepend() {\n' ' if [ -e ${S}/.config ]; then\n' ' mv ${S}/.config ${S}/.config.old\n' ' fi\n' '}\n') - if rd.getVarFlag('do_menuconfig','task'): - f.write('\ndo_configure_append() {\n' - ' if [ ! ${DEVTOOL_DISABLE_MENUCONFIG} ]; then\n' - ' cp ${B}/.config ${S}/.config.baseline\n' - ' ln -sfT ${B}/.config ${S}/.config.new\n' + if rd.getVarFlag('do_menuconfig', 'task'): + f.write('\ndo_configure:append() {\n' + ' if [ ${@oe.types.boolean(d.getVar("KCONFIG_CONFIG_ENABLE_MENUCONFIG"))} = True ]; then\n' + ' cp ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.baseline\n' + ' ln -sfT ${KCONFIG_CONFIG_ROOTDIR}/.config ${S}/.config.new\n' ' fi\n' '}\n') - if initial_rev: - f.write('\n# initial_rev: %s\n' % initial_rev) - for commit in commits: - f.write('# commit: %s\n' % commit) + if initial_revs: + for name, rev in initial_revs.items(): + f.write('\n# initial_rev %s: %s\n' % (name, rev)) + if name in commits: + for commit in commits[name]: + f.write('# commit %s: %s\n' % (name, commit)) if branch_patches: for branch in branch_patches: if branch == args.branch: @@ -1197,44 +1192,56 @@ def _get_patchset_revs(srctree, recipe_path, initial_rev=None, force_patch_refre branchname = stdout.rstrip() # Parse initial rev from recipe if not specified - commits = [] + commits = {} patches = [] + initial_revs = {} with open(recipe_path, 'r') as f: for line in f: - if line.startswith('# initial_rev:'): - if not initial_rev: - initial_rev = line.split(':')[-1].strip() - elif line.startswith('# commit:') and not force_patch_refresh: - commits.append(line.split(':')[-1].strip()) - elif line.startswith('# patches_%s:' % branchname): - patches = line.split(':')[-1].strip().split(',') - - update_rev = initial_rev - changed_revs = None - if initial_rev: + pattern = r'^#\s.*\s(.*):\s([0-9a-fA-F]+)$' + match = re.search(pattern, line) + if match: + name = match.group(1) + rev = match.group(2) + if line.startswith('# initial_rev'): + if not (name == "." and initial_rev): + initial_revs[name] = rev + elif line.startswith('# commit') and not force_patch_refresh: + if name not in commits: + commits[name] = [rev] + else: + commits[name].append(rev) + elif line.startswith('# patches_%s:' % branchname): + patches = line.split(':')[-1].strip().split(',') + + update_revs = dict(initial_revs) + changed_revs = {} + for name, rev in initial_revs.items(): # Find first actually changed revision stdout, _ = bb.process.run('git rev-list --reverse %s..HEAD' % - initial_rev, cwd=srctree) + rev, cwd=os.path.join(srctree, name)) newcommits = stdout.split() - for i in range(min(len(commits), len(newcommits))): - if newcommits[i] == commits[i]: - update_rev = commits[i] + if name in commits: + for i in range(min(len(commits[name]), len(newcommits))): + if newcommits[i] == commits[name][i]: + update_revs[name] = commits[name][i] try: stdout, _ = bb.process.run('git cherry devtool-patched', - cwd=srctree) + cwd=os.path.join(srctree, name)) except bb.process.ExecutionError as err: stdout = None if stdout is not None and not force_patch_refresh: - changed_revs = [] for line in stdout.splitlines(): if line.startswith('+ '): rev = line.split()[1] if rev in newcommits: - changed_revs.append(rev) + if name not in changed_revs: + changed_revs[name] = [rev] + else: + changed_revs[name].append(rev) - return initial_rev, update_rev, changed_revs, patches + return initial_revs, update_revs, changed_revs, patches def _remove_file_entries(srcuri, filelist): """Remove file:// entries from SRC_URI""" @@ -1289,14 +1296,17 @@ def _remove_source_files(append, files, destpath, no_report_remove=False, dry_ru raise -def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): +def _export_patches(srctree, rd, start_revs, destdir, changed_revs=None): """Export patches from srctree to given location. Returns three-tuple of dicts: 1. updated - patches that already exist in SRCURI 2. added - new patches that don't exist in SRCURI 3 removed - patches that exist in SRCURI but not in exported patches - In each dict the key is the 'basepath' of the URI and value is the - absolute path to the existing file in recipe space (if any). + In each dict the key is the 'basepath' of the URI and value is: + - for updated and added dicts, a dict with 2 optionnal keys: + - 'path': the absolute path to the existing file in recipe space (if any) + - 'patchdir': the directory in wich the patch should be applied (if any) + - for removed dict, the absolute path to the existing file in recipe space """ import oe.recipeutils from oe.patch import GitApplyTree @@ -1310,54 +1320,60 @@ def _export_patches(srctree, rd, start_rev, destdir, changed_revs=None): # Generate patches from Git, exclude local files directory patch_pathspec = _git_exclude_path(srctree, 'oe-local-files') - GitApplyTree.extractPatches(srctree, start_rev, destdir, patch_pathspec) - - new_patches = sorted(os.listdir(destdir)) - for new_patch in new_patches: - # Strip numbering from patch names. If it's a git sequence named patch, - # the numbers might not match up since we are starting from a different - # revision This does assume that people are using unique shortlog - # values, but they ought to be anyway... - new_basename = seqpatch_re.match(new_patch).group(2) - match_name = None - for old_patch in existing_patches: - old_basename = seqpatch_re.match(old_patch).group(2) - old_basename_splitext = os.path.splitext(old_basename) - if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: - old_patch_noext = os.path.splitext(old_patch)[0] - match_name = old_patch_noext - break - elif new_basename == old_basename: - match_name = old_patch - break - if match_name: - # Rename patch files - if new_patch != match_name: - bb.utils.rename(os.path.join(destdir, new_patch), - os.path.join(destdir, match_name)) - # Need to pop it off the list now before checking changed_revs - oldpath = existing_patches.pop(old_patch) - if changed_revs is not None: - # Avoid updating patches that have not actually changed - with open(os.path.join(destdir, match_name), 'r') as f: - firstlineitems = f.readline().split() - # Looking for "From <hash>" line - if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: - if not firstlineitems[1] in changed_revs: - continue - # Recompress if necessary - if oldpath.endswith(('.gz', '.Z')): - bb.process.run(['gzip', match_name], cwd=destdir) - if oldpath.endswith('.gz'): - match_name += '.gz' - else: - match_name += '.Z' - elif oldpath.endswith('.bz2'): - bb.process.run(['bzip2', match_name], cwd=destdir) - match_name += '.bz2' - updated[match_name] = oldpath - else: - added[new_patch] = None + GitApplyTree.extractPatches(srctree, start_revs, destdir, patch_pathspec) + for dirpath, dirnames, filenames in os.walk(destdir): + new_patches = filenames + reldirpath = os.path.relpath(dirpath, destdir) + for new_patch in new_patches: + # Strip numbering from patch names. If it's a git sequence named patch, + # the numbers might not match up since we are starting from a different + # revision This does assume that people are using unique shortlog + # values, but they ought to be anyway... + new_basename = seqpatch_re.match(new_patch).group(2) + match_name = None + for old_patch in existing_patches: + old_basename = seqpatch_re.match(old_patch).group(2) + old_basename_splitext = os.path.splitext(old_basename) + if old_basename.endswith(('.gz', '.bz2', '.Z')) and old_basename_splitext[0] == new_basename: + old_patch_noext = os.path.splitext(old_patch)[0] + match_name = old_patch_noext + break + elif new_basename == old_basename: + match_name = old_patch + break + if match_name: + # Rename patch files + if new_patch != match_name: + bb.utils.rename(os.path.join(destdir, new_patch), + os.path.join(destdir, match_name)) + # Need to pop it off the list now before checking changed_revs + oldpath = existing_patches.pop(old_patch) + if changed_revs is not None and dirpath in changed_revs: + # Avoid updating patches that have not actually changed + with open(os.path.join(dirpath, match_name), 'r') as f: + firstlineitems = f.readline().split() + # Looking for "From <hash>" line + if len(firstlineitems) > 1 and len(firstlineitems[1]) == 40: + if not firstlineitems[1] in changed_revs[dirpath]: + continue + # Recompress if necessary + if oldpath.endswith(('.gz', '.Z')): + bb.process.run(['gzip', match_name], cwd=destdir) + if oldpath.endswith('.gz'): + match_name += '.gz' + else: + match_name += '.Z' + elif oldpath.endswith('.bz2'): + bb.process.run(['bzip2', match_name], cwd=destdir) + match_name += '.bz2' + updated[match_name] = {'path' : oldpath} + if reldirpath != ".": + updated[match_name]['patchdir'] = reldirpath + else: + added[new_patch] = {} + if reldirpath != ".": + added[new_patch]['patchdir'] = reldirpath + return (updated, added, existing_patches) @@ -1394,8 +1410,10 @@ def _export_local_files(srctree, rd, destdir, srctreebase): 1. updated - files that already exist in SRCURI 2. added - new files files that don't exist in SRCURI 3 removed - files that exist in SRCURI but not in exported files - In each dict the key is the 'basepath' of the URI and value is the - absolute path to the existing file in recipe space (if any). + In each dict the key is the 'basepath' of the URI and value is: + - for updated and added dicts, a dict with 1 optionnal key: + - 'path': the absolute path to the existing file in recipe space (if any) + - for removed dict, the absolute path to the existing file in recipe space """ import oe.recipeutils @@ -1404,28 +1422,45 @@ def _export_local_files(srctree, rd, destdir, srctreebase): # Instead they are directly copied over the original source files (in # recipe space). existing_files = oe.recipeutils.get_recipe_local_files(rd) + new_set = None updated = OrderedDict() added = OrderedDict() removed = OrderedDict() - local_files_dir = os.path.join(srctreebase, 'oe-local-files') - git_files = _git_ls_tree(srctree) - if 'oe-local-files' in git_files: - # If tracked by Git, take the files from srctree HEAD. First get - # the tree object of the directory - tmp_index = os.path.join(srctree, '.git', 'index.tmp.devtool') - tree = git_files['oe-local-files'][2] - bb.process.run(['git', 'checkout', tree, '--', '.'], cwd=srctree, - env=dict(os.environ, GIT_WORK_TREE=destdir, - GIT_INDEX_FILE=tmp_index)) - new_set = list(_git_ls_tree(srctree, tree, True).keys()) - elif os.path.isdir(local_files_dir): - # If not tracked by Git, just copy from working copy - new_set = _ls_tree(local_files_dir) - bb.process.run(['cp', '-ax', - os.path.join(local_files_dir, '.'), destdir]) - else: - new_set = [] + + # Get current branch and return early with empty lists + # if on one of the override branches + # (local files are provided only for the main branch and processing + # them against lists from recipe overrides will result in mismatches + # and broken modifications to recipes). + stdout, _ = bb.process.run('git rev-parse --abbrev-ref HEAD', + cwd=srctree) + branchname = stdout.rstrip() + if branchname.startswith(override_branch_prefix): + return (updated, added, removed) + + files = _git_modified(srctree) + #if not files: + # files = _ls_tree(srctree) + for f in files: + fullfile = os.path.join(srctree, f) + if os.path.exists(os.path.join(fullfile, ".git")): + # submodules handled elsewhere + continue + if f not in existing_files: + added[f] = {} + if os.path.isdir(os.path.join(srctree, f)): + shutil.copytree(fullfile, os.path.join(destdir, f)) + else: + shutil.copy2(fullfile, os.path.join(destdir, f)) + elif not os.path.exists(fullfile): + removed[f] = existing_files[f] + elif f in existing_files: + updated[f] = {'path' : existing_files[f]} + if os.path.isdir(os.path.join(srctree, f)): + shutil.copytree(fullfile, os.path.join(destdir, f)) + else: + shutil.copy2(fullfile, os.path.join(destdir, f)) # Special handling for kernel config if bb.data.inherits_class('kernel-yocto', rd): @@ -1433,17 +1468,14 @@ def _export_local_files(srctree, rd, destdir, srctreebase): fragment_path = os.path.join(destdir, fragment_fn) if _create_kconfig_diff(srctree, rd, fragment_path): if os.path.exists(fragment_path): - if fragment_fn not in new_set: - new_set.append(fragment_fn) - # Copy fragment to local-files - if os.path.isdir(local_files_dir): - shutil.copy2(fragment_path, local_files_dir) + if fragment_fn in removed: + del removed[fragment_fn] + if fragment_fn not in updated and fragment_fn not in added: + added[fragment_fn] = {} else: - if fragment_fn in new_set: - new_set.remove(fragment_fn) - # Remove fragment from local-files - if os.path.exists(os.path.join(local_files_dir, fragment_fn)): - os.unlink(os.path.join(local_files_dir, fragment_fn)) + if fragment_fn in updated: + revoved[fragment_fn] = updated[fragment_fn] + del updated[fragment_fn] # Special handling for cml1, ccmake, etc bbclasses that generated # configuration fragment files that are consumed as source files @@ -1451,42 +1483,13 @@ def _export_local_files(srctree, rd, destdir, srctreebase): if bb.data.inherits_class(frag_class, rd): srcpath = os.path.join(rd.getVar('WORKDIR'), frag_name) if os.path.exists(srcpath): - if frag_name not in new_set: - new_set.append(frag_name) + if frag_name in removed: + del removed[frag_name] + if frag_name not in updated: + added[frag_name] = {} # copy fragment into destdir shutil.copy2(srcpath, destdir) - # copy fragment into local files if exists - if os.path.isdir(local_files_dir): - shutil.copy2(srcpath, local_files_dir) - - if new_set is not None: - for fname in new_set: - if fname in existing_files: - origpath = existing_files.pop(fname) - workpath = os.path.join(local_files_dir, fname) - if not filecmp.cmp(origpath, workpath): - updated[fname] = origpath - elif fname != '.gitignore': - added[fname] = None - - workdir = rd.getVar('WORKDIR') - s = rd.getVar('S') - if not s.endswith(os.sep): - s += os.sep - - if workdir != s: - # Handle files where subdir= was specified - for fname in list(existing_files.keys()): - # FIXME handle both subdir starting with BP and not? - fworkpath = os.path.join(workdir, fname) - if fworkpath.startswith(s): - fpath = os.path.join(srctree, os.path.relpath(fworkpath, s)) - if os.path.exists(fpath): - origpath = existing_files.pop(fname) - if not filecmp.cmp(origpath, fpath): - updated[fpath] = origpath - - removed = existing_files + return (updated, added, removed) @@ -1513,6 +1516,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi recipedir = os.path.basename(recipefile) logger.info('Updating SRCREV in recipe %s%s' % (recipedir, dry_run_suffix)) + # Get original SRCREV + old_srcrev = rd.getVar('SRCREV') or '' + if old_srcrev == "INVALID": + raise DevtoolError('Update mode srcrev is only valid for recipe fetched from an SCM repository') + old_srcrev = {'.': old_srcrev} + # Get HEAD revision try: stdout, _ = bb.process.run('git rev-parse HEAD', cwd=srctree) @@ -1539,13 +1548,12 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi if not no_remove: # Find list of existing patches in recipe file patches_dir = tempfile.mkdtemp(dir=tempdir) - old_srcrev = rd.getVar('SRCREV') or '' upd_p, new_p, del_p = _export_patches(srctree, rd, old_srcrev, patches_dir) logger.debug('Patches: update %s, new %s, delete %s' % (dict(upd_p), dict(new_p), dict(del_p))) # Remove deleted local files and "overlapping" patches - remove_files = list(del_f.values()) + list(upd_p.values()) + list(del_p.values()) + remove_files = list(del_f.values()) + [value["path"] for value in upd_p.values() if "path" in value] + [value["path"] for value in del_p.values() if "path" in value] if remove_files: removedentries = _remove_file_entries(srcuri, remove_files)[0] update_srcuri = True @@ -1559,14 +1567,14 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi patchfields['SRC_URI'] = '\\\n '.join(srcuri) if dry_run_outdir: logger.info('Creating bbappend (dry-run)') - else: - appendfile, destpath = oe.recipeutils.bbappend_recipe( - rd, appendlayerdir, files, wildcardver=wildcard_version, - extralines=patchfields, removevalues=removevalues, - redirect_output=dry_run_outdir) + appendfile, destpath = oe.recipeutils.bbappend_recipe( + rd, appendlayerdir, files, wildcardver=wildcard_version, + extralines=patchfields, removevalues=removevalues, + redirect_output=dry_run_outdir) else: files_dir = _determine_files_dir(rd) - for basepath, path in upd_f.items(): + for basepath, param in upd_f.items(): + path = param['path'] logger.info('Updating file %s%s' % (basepath, dry_run_suffix)) if os.path.isabs(basepath): # Original file (probably with subdir pointing inside source tree) @@ -1576,7 +1584,8 @@ def _update_recipe_srcrev(recipename, workspace, srctree, rd, appendlayerdir, wi _move_file(os.path.join(local_files_dir, basepath), path, dry_run_outdir=dry_run_outdir, base_outdir=recipedir) update_srcuri= True - for basepath, path in new_f.items(): + for basepath, param in new_f.items(): + path = param['path'] logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) _move_file(os.path.join(local_files_dir, basepath), os.path.join(files_dir, basepath), @@ -1608,9 +1617,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil if not os.path.exists(append): raise DevtoolError('unable to find workspace bbappend for recipe %s' % recipename) + srctreebase = workspace[recipename]['srctreebase'] + relpatchdir = os.path.relpath(srctreebase, srctree) + if relpatchdir == '.': + patchdir_params = {} + else: + patchdir_params = {'patchdir': relpatchdir} - initial_rev, update_rev, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) - if not initial_rev: + def srcuri_entry(basepath, patchdir_params): + if patchdir_params: + paramstr = ';' + ';'.join('%s=%s' % (k,v) for k,v in patchdir_params.items()) + else: + paramstr = '' + return 'file://%s%s' % (basepath, paramstr) + + initial_revs, update_revs, changed_revs, filter_patches = _get_patchset_revs(srctree, append, initial_rev, force_patch_refresh) + if not initial_revs: raise DevtoolError('Unable to find initial revision - please specify ' 'it with --initial-rev') @@ -1624,61 +1646,69 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil tempdir = tempfile.mkdtemp(prefix='devtool') try: local_files_dir = tempfile.mkdtemp(dir=tempdir) - if filter_patches: - upd_f = {} - new_f = {} - del_f = {} - else: - srctreebase = workspace[recipename]['srctreebase'] - upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) - - remove_files = [] - if not no_remove: - # Get all patches from source tree and check if any should be removed - all_patches_dir = tempfile.mkdtemp(dir=tempdir) - _, _, del_p = _export_patches(srctree, rd, initial_rev, - all_patches_dir) - # Remove deleted local files and patches - remove_files = list(del_f.values()) + list(del_p.values()) + upd_f, new_f, del_f = _export_local_files(srctree, rd, local_files_dir, srctreebase) # Get updated patches from source tree patches_dir = tempfile.mkdtemp(dir=tempdir) - upd_p, new_p, _ = _export_patches(srctree, rd, update_rev, + upd_p, new_p, _ = _export_patches(srctree, rd, update_revs, patches_dir, changed_revs) + # Get all patches from source tree and check if any should be removed + all_patches_dir = tempfile.mkdtemp(dir=tempdir) + _, _, del_p = _export_patches(srctree, rd, initial_revs, + all_patches_dir) logger.debug('Pre-filtering: update: %s, new: %s' % (dict(upd_p), dict(new_p))) if filter_patches: new_p = OrderedDict() upd_p = OrderedDict((k,v) for k,v in upd_p.items() if k in filter_patches) - remove_files = [f for f in remove_files if f in filter_patches] + del_p = OrderedDict((k,v) for k,v in del_p.items() if k in filter_patches) + remove_files = [] + if not no_remove: + # Remove deleted local files and patches + remove_files = list(del_f.values()) + list(del_p.values()) updatefiles = False updaterecipe = False destpath = None srcuri = (rd.getVar('SRC_URI', False) or '').split() + if appendlayerdir: files = OrderedDict((os.path.join(local_files_dir, key), val) for key, val in list(upd_f.items()) + list(new_f.items())) files.update(OrderedDict((os.path.join(patches_dir, key), val) for key, val in list(upd_p.items()) + list(new_p.items()))) + + params = [] + for file, param in files.items(): + patchdir_param = dict(patchdir_params) + patchdir = param.get('patchdir', ".") + if patchdir != "." : + if patchdir_param: + patchdir_param['patchdir'] += patchdir + else: + patchdir_param['patchdir'] = patchdir + params.append(patchdir_param) + if files or remove_files: removevalues = None if remove_files: removedentries, remaining = _remove_file_entries( srcuri, remove_files) if removedentries or remaining: - remaining = ['file://' + os.path.basename(item) for + remaining = [srcuri_entry(os.path.basename(item), patchdir_params) for item in remaining] removevalues = {'SRC_URI': removedentries + remaining} appendfile, destpath = oe.recipeutils.bbappend_recipe( rd, appendlayerdir, files, wildcardver=wildcard_version, removevalues=removevalues, - redirect_output=dry_run_outdir) + redirect_output=dry_run_outdir, + params=params) else: logger.info('No patches or local source files needed updating') else: # Update existing files files_dir = _determine_files_dir(rd) - for basepath, path in upd_f.items(): + for basepath, param in upd_f.items(): + path = param['path'] logger.info('Updating file %s' % basepath) if os.path.isabs(basepath): # Original file (probably with subdir pointing inside source tree) @@ -1689,14 +1719,22 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil _move_file(os.path.join(local_files_dir, basepath), path, dry_run_outdir=dry_run_outdir, base_outdir=recipedir) updatefiles = True - for basepath, path in upd_p.items(): - patchfn = os.path.join(patches_dir, basepath) + for basepath, param in upd_p.items(): + path = param['path'] + patchdir = param.get('patchdir', ".") + if patchdir != "." : + patchdir_param = dict(patchdir_params) + if patchdir_param: + patchdir_param['patchdir'] += patchdir + else: + patchdir_param['patchdir'] = patchdir + patchfn = os.path.join(patches_dir, patchdir, basepath) if os.path.dirname(path) + '/' == dl_dir: # This is a a downloaded patch file - we now need to # replace the entry in SRC_URI with our local version logger.info('Replacing remote patch %s with updated local version' % basepath) path = os.path.join(files_dir, basepath) - _replace_srcuri_entry(srcuri, basepath, 'file://%s' % basepath) + _replace_srcuri_entry(srcuri, basepath, srcuri_entry(basepath, patchdir_param)) updaterecipe = True else: logger.info('Updating patch %s%s' % (basepath, dry_run_suffix)) @@ -1704,21 +1742,29 @@ def _update_recipe_patch(recipename, workspace, srctree, rd, appendlayerdir, wil dry_run_outdir=dry_run_outdir, base_outdir=recipedir) updatefiles = True # Add any new files - for basepath, path in new_f.items(): + for basepath, param in new_f.items(): logger.info('Adding new file %s%s' % (basepath, dry_run_suffix)) _move_file(os.path.join(local_files_dir, basepath), os.path.join(files_dir, basepath), dry_run_outdir=dry_run_outdir, base_outdir=recipedir) - srcuri.append('file://%s' % basepath) + srcuri.append(srcuri_entry(basepath, patchdir_params)) updaterecipe = True - for basepath, path in new_p.items(): + for basepath, param in new_p.items(): + patchdir = param.get('patchdir', ".") logger.info('Adding new patch %s%s' % (basepath, dry_run_suffix)) - _move_file(os.path.join(patches_dir, basepath), + _move_file(os.path.join(patches_dir, patchdir, basepath), os.path.join(files_dir, basepath), dry_run_outdir=dry_run_outdir, base_outdir=recipedir) - srcuri.append('file://%s' % basepath) + params = dict(patchdir_params) + if patchdir != "." : + if params: + params['patchdir'] += patchdir + else: + params['patchdir'] = patchdir + + srcuri.append(srcuri_entry(basepath, params)) updaterecipe = True # Update recipe, if needed if _remove_file_entries(srcuri, remove_files)[0]: @@ -1775,6 +1821,8 @@ def _update_recipe(recipename, workspace, rd, mode, appendlayerdir, wildcard_ver for line in stdout.splitlines(): branchname = line[2:] if line.startswith('* '): + if 'HEAD' in line: + raise DevtoolError('Detached HEAD - please check out a branch, e.g., "devtool"') startbranch = branchname if branchname.startswith(override_branch_prefix): override_branches.append(branchname) @@ -1964,9 +2012,19 @@ def _reset(recipes, no_clean, remove_work, config, basepath, workspace): shutil.rmtree(srctreebase) else: # We don't want to risk wiping out any work in progress - logger.info('Leaving source tree %s as-is; if you no ' - 'longer need it then please delete it manually' - % srctreebase) + if srctreebase.startswith(os.path.join(config.workspace_path, 'sources')): + from datetime import datetime + preservesrc = os.path.join(config.workspace_path, 'attic', 'sources', "{}.{}".format(pn, datetime.now().strftime("%Y%m%d%H%M%S"))) + logger.info('Preserving source tree in %s\nIf you no ' + 'longer need it then please delete it manually.\n' + 'It is also possible to reuse it via devtool source tree argument.' + % preservesrc) + bb.utils.mkdirhier(os.path.dirname(preservesrc)) + shutil.move(srctreebase, preservesrc) + else: + logger.info('Leaving source tree %s as-is; if you no ' + 'longer need it then please delete it manually' + % srctreebase) else: # This is unlikely, but if it's empty we can just remove it os.rmdir(srctreebase) @@ -2226,6 +2284,7 @@ def register_commands(subparsers, context): group.add_argument('--no-same-dir', help='Force build in a separate build directory', action="store_true") parser_add.add_argument('--fetch', '-f', help='Fetch the specified URI and extract it to create the source tree (deprecated - pass as positional argument instead)', metavar='URI') parser_add.add_argument('--npm-dev', help='For npm, also fetch devDependencies', action="store_true") + parser_add.add_argument('--no-pypi', help='Do not inherit pypi class', action="store_true") parser_add.add_argument('--version', '-V', help='Version to use within recipe (PV)') parser_add.add_argument('--no-git', '-g', help='If fetching source, do not set up source tree as a git repository', action="store_true") group = parser_add.add_mutually_exclusive_group() diff --git a/scripts/lib/devtool/upgrade.py b/scripts/lib/devtool/upgrade.py index 24e3700ece..eed3a49e4b 100644 --- a/scripts/lib/devtool/upgrade.py +++ b/scripts/lib/devtool/upgrade.py @@ -32,9 +32,11 @@ def _run(cmd, cwd=''): def _get_srctree(tmpdir): srctree = tmpdir - dirs = scriptutils.filter_src_subdirs(tmpdir) + dirs = os.listdir(tmpdir) if len(dirs) == 1: srctree = os.path.join(tmpdir, dirs[0]) + else: + raise DevtoolError("Cannot determine where the source tree is after unpacking in {}: {}".format(tmpdir,dirs)) return srctree def _copy_source_code(orig, dest): @@ -74,21 +76,21 @@ def _rename_recipe_dirs(oldpv, newpv, path): bb.utils.rename(os.path.join(path, oldfile), os.path.join(path, newfile)) -def _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path): +def _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path): oldrecipe = os.path.basename(oldrecipe) if oldrecipe.endswith('_%s.bb' % oldpv): - newrecipe = '%s_%s.bb' % (bpn, newpv) + newrecipe = '%s_%s.bb' % (pn, newpv) if oldrecipe != newrecipe: shutil.move(os.path.join(path, oldrecipe), os.path.join(path, newrecipe)) else: newrecipe = oldrecipe return os.path.join(path, newrecipe) -def _rename_recipe_files(oldrecipe, bpn, oldpv, newpv, path): +def _rename_recipe_files(oldrecipe, pn, oldpv, newpv, path): _rename_recipe_dirs(oldpv, newpv, path) - return _rename_recipe_file(oldrecipe, bpn, oldpv, newpv, path) + return _rename_recipe_file(oldrecipe, pn, oldpv, newpv, path) -def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d): +def _write_append(rc, srctreebase, srctree, same_dir, no_same_dir, revs, copied, workspace, d): """Writes an append file""" if not os.path.exists(rc): raise DevtoolError("bbappend not created because %s does not exist" % rc) @@ -103,36 +105,38 @@ def _write_append(rc, srctree, same_dir, no_same_dir, rev, copied, workspace, d) pn = d.getVar('PN') af = os.path.join(appendpath, '%s.bbappend' % brf) with open(af, 'w') as f: - f.write('FILESEXTRAPATHS_prepend := "${THISDIR}/${PN}:"\n\n') + f.write('FILESEXTRAPATHS:prepend := "${THISDIR}/${PN}:"\n\n') + # Local files can be modified/tracked in separate subdir under srctree + # Mostly useful for packages with S != WORKDIR + f.write('FILESPATH:prepend := "%s:"\n' % + os.path.join(srctreebase, 'oe-local-files')) + f.write('# srctreebase: %s\n' % srctreebase) f.write('inherit externalsrc\n') f.write(('# NOTE: We use pn- overrides here to avoid affecting' 'multiple variants in the case where the recipe uses BBCLASSEXTEND\n')) - f.write('EXTERNALSRC_pn-%s = "%s"\n' % (pn, srctree)) + f.write('EXTERNALSRC:pn-%s = "%s"\n' % (pn, srctree)) b_is_s = use_external_build(same_dir, no_same_dir, d) if b_is_s: - f.write('EXTERNALSRC_BUILD_pn-%s = "%s"\n' % (pn, srctree)) + f.write('EXTERNALSRC_BUILD:pn-%s = "%s"\n' % (pn, srctree)) f.write('\n') - if rev: - f.write('# initial_rev: %s\n' % rev) + if revs: + for name, rev in revs.items(): + f.write('# initial_rev %s: %s\n' % (name, rev)) if copied: f.write('# original_path: %s\n' % os.path.dirname(d.getVar('FILE'))) f.write('# original_files: %s\n' % ' '.join(copied)) return af -def _cleanup_on_error(rf, srctree): - rfp = os.path.split(rf)[0] # recipe folder - rfpp = os.path.split(rfp)[0] # recipes folder - if os.path.exists(rfp): - shutil.rmtree(rfp) - if not len(os.listdir(rfpp)): - os.rmdir(rfpp) +def _cleanup_on_error(rd, srctree): + if os.path.exists(rd): + shutil.rmtree(rd) srctree = os.path.abspath(srctree) if os.path.exists(srctree): shutil.rmtree(srctree) -def _upgrade_error(e, rf, srctree, keep_failure=False, extramsg=None): - if rf and not keep_failure: - _cleanup_on_error(rf, srctree) +def _upgrade_error(e, rd, srctree, keep_failure=False, extramsg=None): + if not keep_failure: + _cleanup_on_error(rd, srctree) logger.error(e) if extramsg: logger.error(extramsg) @@ -179,12 +183,16 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee uri, rev = _get_uri(crd) if srcrev: rev = srcrev + paths = [srctree] if uri.startswith('git://') or uri.startswith('gitsm://'): __run('git fetch') __run('git checkout %s' % rev) __run('git tag -f devtool-base-new') - md5 = None - sha256 = None + __run('git submodule update --recursive') + __run('git submodule foreach \'git tag -f devtool-base-new\'') + (stdout, _) = __run('git submodule --quiet foreach \'echo $sm_path\'') + paths += [os.path.join(srctree, p) for p in stdout.splitlines()] + checksums = {} _, _, _, _, _, params = bb.fetch2.decodeurl(uri) srcsubdir_rel = params.get('destsuffix', 'git') if not srcbranch: @@ -192,14 +200,15 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee get_branch = [x.strip() for x in check_branch.splitlines()] # Remove HEAD reference point and drop remote prefix get_branch = [x.split('/', 1)[1] for x in get_branch if not x.startswith('origin/HEAD')] - if 'master' in get_branch: - # If it is master, we do not need to append 'branch=master' as this is default. - # Even with the case where get_branch has multiple objects, if 'master' is one - # of them, we should default take from 'master' - srcbranch = '' - elif len(get_branch) == 1: - # If 'master' isn't in get_branch and get_branch contains only ONE object, then store result into 'srcbranch' + if len(get_branch) == 1: + # If srcrev is on only ONE branch, then use that branch srcbranch = get_branch[0] + elif 'main' in get_branch: + # If srcrev is on multiple branches, then choose 'main' if it is one of them + srcbranch = 'main' + elif 'master' in get_branch: + # Otherwise choose 'master' if it is one of the branches + srcbranch = 'master' else: # If get_branch contains more than one objects, then display error and exit. mbrch = '\n ' + '\n '.join(get_branch) @@ -216,9 +225,6 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee if ftmpdir and keep_temp: logger.info('Fetch temp directory is %s' % ftmpdir) - md5 = checksums['md5sum'] - sha256 = checksums['sha256sum'] - tmpsrctree = _get_srctree(tmpdir) srctree = os.path.abspath(srctree) srcsubdir_rel = os.path.relpath(tmpsrctree, tmpdir) @@ -252,30 +258,50 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee __run('git %s commit -q -m "Commit of upstream changes at version %s" --allow-empty' % (' '.join(useroptions), newpv)) __run('git tag -f devtool-base-%s' % newpv) - (stdout, _) = __run('git rev-parse HEAD') - rev = stdout.rstrip() + revs = {} + for path in paths: + (stdout, _) = _run('git rev-parse HEAD', cwd=path) + revs[os.path.relpath(path, srctree)] = stdout.rstrip() if no_patch: patches = oe.recipeutils.get_recipe_patches(crd) if patches: logger.warning('By user choice, the following patches will NOT be applied to the new source tree:\n %s' % '\n '.join([os.path.basename(patch) for patch in patches])) else: - __run('git checkout devtool-patched -b %s' % branch) - skiptag = False - try: - __run('git rebase %s' % rev) - except bb.process.ExecutionError as e: - skiptag = True - if 'conflict' in e.stdout: - logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) - else: - logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) - if not skiptag: - if uri.startswith('git://') or uri.startswith('gitsm://'): - suffix = 'new' - else: - suffix = newpv - __run('git tag -f devtool-patched-%s' % suffix) + for path in paths: + _run('git checkout devtool-patched -b %s' % branch, cwd=path) + (stdout, _) = _run('git branch --list devtool-override-*', cwd=path) + branches_to_rebase = [branch] + stdout.split() + target_branch = revs[os.path.relpath(path, srctree)] + + # There is a bug (or feature?) in git rebase where if a commit with + # a note is fully rebased away by being part of an old commit, the + # note is still attached to the old commit. Avoid this by making + # sure all old devtool related commits have a note attached to them + # (this assumes git config notes.rewriteMode is set to ignore). + (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) + for rev in stdout.splitlines(): + if not oe.patch.GitApplyTree.getNotes(path, rev): + oe.patch.GitApplyTree.addNote(path, rev, "dummy") + + for b in branches_to_rebase: + logger.info("Rebasing {} onto {}".format(b, target_branch)) + _run('git checkout %s' % b, cwd=path) + try: + _run('git rebase %s' % target_branch, cwd=path) + except bb.process.ExecutionError as e: + if 'conflict' in e.stdout: + logger.warning('Command \'%s\' failed:\n%s\n\nYou will need to resolve conflicts in order to complete the upgrade.' % (e.command, e.stdout.rstrip())) + _run('git rebase --abort', cwd=path) + else: + logger.warning('Command \'%s\' failed:\n%s' % (e.command, e.stdout)) + + # Remove any dummy notes added above. + (stdout, _) = __run('git rev-list devtool-base..%s' % target_branch) + for rev in stdout.splitlines(): + oe.patch.GitApplyTree.removeNote(path, rev, "dummy") + + _run('git checkout %s' % branch, cwd=path) if tmpsrctree: if keep_temp: @@ -285,7 +311,7 @@ def _extract_new_source(newpv, srctree, no_patch, srcrev, srcbranch, branch, kee if tmpdir != tmpsrctree: shutil.rmtree(tmpdir) - return (rev, md5, sha256, srcbranch, srcsubdir_rel) + return (revs, checksums, srcbranch, srcsubdir_rel) def _add_license_diff_to_recipe(path, diff): notice_text = """# FIXME: the LIC_FILES_CHKSUM values have been updated by 'devtool upgrade'. @@ -306,22 +332,22 @@ def _add_license_diff_to_recipe(path, diff): f.write("\n#\n\n".encode()) f.write(orig_content) -def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): +def _create_new_recipe(newpv, checksums, srcrev, srcbranch, srcsubdir_old, srcsubdir_new, workspace, tinfoil, rd, license_diff, new_licenses, srctree, keep_failure): """Creates the new recipe under workspace""" - bpn = rd.getVar('BPN') - path = os.path.join(workspace, 'recipes', bpn) + pn = rd.getVar('PN') + path = os.path.join(workspace, 'recipes', pn) bb.utils.mkdirhier(path) copied, _ = oe.recipeutils.copy_recipe_files(rd, path, all_variants=True) if not copied: - raise DevtoolError('Internal error - no files were copied for recipe %s' % bpn) + raise DevtoolError('Internal error - no files were copied for recipe %s' % pn) logger.debug('Copied %s to %s' % (copied, path)) oldpv = rd.getVar('PV') if not newpv: newpv = oldpv origpath = rd.getVar('FILE') - fullpath = _rename_recipe_files(origpath, bpn, oldpv, newpv, path) + fullpath = _rename_recipe_files(origpath, pn, oldpv, newpv, path) logger.debug('Upgraded %s => %s' % (origpath, fullpath)) newvalues = {} @@ -337,7 +363,10 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src replacing = True new_src_uri = [] for entry in src_uri: - scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) + try: + scheme, network, path, user, passwd, params = bb.fetch2.decodeurl(entry) + except bb.fetch2.MalformedUrl as e: + raise DevtoolError("Could not decode SRC_URI: {}".format(e)) if replacing and scheme in ['git', 'gitsm']: branch = params.get('branch', 'master') if rd.expand(branch) != srcbranch: @@ -375,30 +404,39 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src addnames.append(params['name']) # Find what's been set in the original recipe oldnames = [] + oldsums = [] noname = False for varflag in rd.getVarFlags('SRC_URI'): - if varflag.endswith(('.md5sum', '.sha256sum')): - name = varflag.rsplit('.', 1)[0] - if name not in oldnames: - oldnames.append(name) - elif varflag in ['md5sum', 'sha256sum']: - noname = True + for checksum in checksums: + if varflag.endswith('.' + checksum): + name = varflag.rsplit('.', 1)[0] + if name not in oldnames: + oldnames.append(name) + oldsums.append(checksum) + elif varflag == checksum: + noname = True + oldsums.append(checksum) # Even if SRC_URI has named entries it doesn't have to actually use the name if noname and addnames and addnames[0] not in oldnames: addnames = [] # Drop any old names (the name actually might include ${PV}) for name in oldnames: if name not in newnames: - newvalues['SRC_URI[%s.md5sum]' % name] = None - newvalues['SRC_URI[%s.sha256sum]' % name] = None + for checksum in oldsums: + newvalues['SRC_URI[%s.%s]' % (name, checksum)] = None - if sha256: - if addnames: - nameprefix = '%s.' % addnames[0] - else: - nameprefix = '' + nameprefix = '%s.' % addnames[0] if addnames else '' + + # md5sum is deprecated, remove any traces of it. If it was the only old + # checksum, then replace it with the default checksums. + if 'md5sum' in oldsums: newvalues['SRC_URI[%smd5sum]' % nameprefix] = None - newvalues['SRC_URI[%ssha256sum]' % nameprefix] = sha256 + oldsums.remove('md5sum') + if not oldsums: + oldsums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST] + + for checksum in oldsums: + newvalues['SRC_URI[%s%s]' % (nameprefix, checksum)] = checksums[checksum] if srcsubdir_new != srcsubdir_old: s_subdir_old = os.path.relpath(os.path.abspath(rd.getVar('S')), rd.getVar('WORKDIR')) @@ -423,10 +461,11 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src newvalues["LIC_FILES_CHKSUM"] = newlicchksum _add_license_diff_to_recipe(fullpath, license_diff) + tinfoil.modified_files() try: rd = tinfoil.parse_recipe_file(fullpath, False) except bb.tinfoil.TinfoilCommandFailed as e: - _upgrade_error(e, fullpath, srctree, keep_failure, 'Parsing of upgraded recipe failed') + _upgrade_error(e, os.path.dirname(fullpath), srctree, keep_failure, 'Parsing of upgraded recipe failed') oe.recipeutils.patch_recipe(rd, fullpath, newvalues) return fullpath, copied @@ -435,7 +474,7 @@ def _create_new_recipe(newpv, md5, sha256, srcrev, srcbranch, srcsubdir_old, src def _check_git_config(): def getconfig(name): try: - value = bb.process.run('git config --global %s' % name)[0].strip() + value = bb.process.run('git config %s' % name)[0].strip() except bb.process.ExecutionError as e: if e.exitcode == 1: value = None @@ -495,6 +534,15 @@ def _generate_license_diff(old_licenses, new_licenses): diff = diff + line return diff +def _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil): + tasks = [] + for task in (rd.getVar('RECIPE_UPGRADE_EXTRA_TASKS') or '').split(): + logger.info('Running extra recipe upgrade task: %s' % task) + res = tinfoil.build_targets(pn, task, handle_events=True) + + if not res: + raise DevtoolError('Running extra recipe upgrade task %s for %s failed' % (task, pn)) + def upgrade(args, config, basepath, workspace): """Entry point for the devtool 'upgrade' subcommand""" @@ -522,6 +570,8 @@ def upgrade(args, config, basepath, workspace): else: srctree = standard.get_default_srctree(config, pn) + srctree_s = standard.get_real_srctree(srctree, rd.getVar('S'), rd.getVar('WORKDIR')) + # try to automatically discover latest version and revision if not provided on command line if not args.version and not args.srcrev: version_info = oe.recipeutils.get_recipe_upstream_version(rd) @@ -551,30 +601,34 @@ def upgrade(args, config, basepath, workspace): try: logger.info('Extracting current version source...') rev1, srcsubdir1 = standard._extract_source(srctree, False, 'devtool-orig', False, config, basepath, workspace, args.fixed_setup, rd, tinfoil, no_overrides=args.no_overrides) - old_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or "")) + old_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) logger.info('Extracting upgraded version source...') - rev2, md5, sha256, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, + rev2, checksums, srcbranch, srcsubdir2 = _extract_new_source(args.version, srctree, args.no_patch, args.srcrev, args.srcbranch, args.branch, args.keep_temp, tinfoil, rd) - new_licenses = _extract_licenses(srctree, (rd.getVar('LIC_FILES_CHKSUM') or "")) + new_licenses = _extract_licenses(srctree_s, (rd.getVar('LIC_FILES_CHKSUM') or "")) license_diff = _generate_license_diff(old_licenses, new_licenses) - rf, copied = _create_new_recipe(args.version, md5, sha256, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) - except bb.process.CmdError as e: - _upgrade_error(e, rf, srctree, args.keep_failure) - except DevtoolError as e: - _upgrade_error(e, rf, srctree, args.keep_failure) + rf, copied = _create_new_recipe(args.version, checksums, args.srcrev, srcbranch, srcsubdir1, srcsubdir2, config.workspace_path, tinfoil, rd, license_diff, new_licenses, srctree, args.keep_failure) + except (bb.process.CmdError, DevtoolError) as e: + recipedir = os.path.join(config.workspace_path, 'recipes', rd.getVar('PN')) + _upgrade_error(e, recipedir, srctree, args.keep_failure) standard._add_md5(config, pn, os.path.dirname(rf)) - af = _write_append(rf, srctree, args.same_dir, args.no_same_dir, rev2, + af = _write_append(rf, srctree, srctree_s, args.same_dir, args.no_same_dir, rev2, copied, config.workspace_path, rd) standard._add_md5(config, pn, af) + _run_recipe_upgrade_extra_tasks(pn, rd, tinfoil) + update_unlockedsigs(basepath, workspace, args.fixed_setup, [pn]) logger.info('Upgraded source extracted to %s' % srctree) logger.info('New recipe is %s' % rf) if license_diff: logger.info('License checksums have been updated in the new recipe; please refer to it for the difference between the old and the new license texts.') + preferred_version = rd.getVar('PREFERRED_VERSION_%s' % rd.getVar('PN')) + if preferred_version: + logger.warning('Version is pinned to %s via PREFERRED_VERSION; it may need adjustment to match the new version before any further steps are taken' % preferred_version) finally: tinfoil.shutdown() return 0 @@ -600,18 +654,28 @@ def latest_version(args, config, basepath, workspace): return 0 def check_upgrade_status(args, config, basepath, workspace): + def _print_status(recipe): + print("{:25} {:15} {:15} {} {} {}".format( recipe['pn'], + recipe['cur_ver'], + recipe['status'] if recipe['status'] != 'UPDATE' else (recipe['next_ver'] if not recipe['next_ver'].endswith("new-commits-available") else "new commits"), + recipe['maintainer'], + recipe['revision'] if recipe['revision'] != 'N/A' else "", + "cannot be updated due to: %s" %(recipe['no_upgrade_reason']) if recipe['no_upgrade_reason'] else "")) if not args.recipe: logger.info("Checking the upstream status for all recipes may take a few minutes") results = oe.recipeutils.get_recipe_upgrade_status(args.recipe) - for result in results: - # pn, update_status, current, latest, maintainer, latest_commit, no_update_reason - if args.all or result[1] != 'MATCH': - logger.info("{:25} {:15} {:15} {} {} {}".format( result[0], - result[2], - result[1] if result[1] != 'UPDATE' else (result[3] if not result[3].endswith("new-commits-available") else "new commits"), - result[4], - result[5] if result[5] != 'N/A' else "", - "cannot be updated due to: %s" %(result[6]) if result[6] else "")) + for recipegroup in results: + upgrades = [r for r in recipegroup if r['status'] != 'MATCH'] + currents = [r for r in recipegroup if r['status'] == 'MATCH'] + if len(upgrades) > 1: + print("These recipes need to be upgraded together {") + for r in upgrades: + _print_status(r) + if len(upgrades) > 1: + print("}") + for r in currents: + if args.all: + _print_status(r) def register_commands(subparsers, context): """Register devtool subcommands from this plugin""" |