summaryrefslogtreecommitdiffstats
path: root/scripts/lib/recipetool
diff options
context:
space:
mode:
Diffstat (limited to 'scripts/lib/recipetool')
-rw-r--r--scripts/lib/recipetool/append.py72
-rw-r--r--scripts/lib/recipetool/create.py167
-rw-r--r--scripts/lib/recipetool/create_buildsys.py40
-rw-r--r--scripts/lib/recipetool/create_buildsys_python.py1071
-rw-r--r--scripts/lib/recipetool/create_go.py777
-rw-r--r--scripts/lib/recipetool/create_npm.py43
-rw-r--r--scripts/lib/recipetool/setvar.py1
7 files changed, 1693 insertions, 478 deletions
diff --git a/scripts/lib/recipetool/append.py b/scripts/lib/recipetool/append.py
index 88ed8c5f01..10945d6008 100644
--- a/scripts/lib/recipetool/append.py
+++ b/scripts/lib/recipetool/append.py
@@ -18,6 +18,7 @@ import shutil
import scriptutils
import errno
from collections import defaultdict
+import difflib
logger = logging.getLogger('recipetool')
@@ -100,7 +101,7 @@ def determine_file_source(targetpath, rd):
import oe.recipeutils
# See if it's in do_install for the recipe
- workdir = rd.getVar('WORKDIR')
+ unpackdir = rd.getVar('UNPACKDIR')
src_uri = rd.getVar('SRC_URI')
srcfile = ''
modpatches = []
@@ -112,9 +113,9 @@ def determine_file_source(targetpath, rd):
if not srcpath.startswith('/'):
# Handle non-absolute path
srcpath = os.path.abspath(os.path.join(rd.getVarFlag('do_install', 'dirs').split()[-1], srcpath))
- if srcpath.startswith(workdir):
+ if srcpath.startswith(unpackdir):
# OK, now we have the source file name, look for it in SRC_URI
- workdirfile = os.path.relpath(srcpath, workdir)
+ workdirfile = os.path.relpath(srcpath, unpackdir)
# FIXME this is where we ought to have some code in the fetcher, because this is naive
for item in src_uri.split():
localpath = bb.fetch2.localpath(item, rd)
@@ -299,7 +300,10 @@ def appendfile(args):
if st.st_mode & stat.S_IXUSR:
perms = '0755'
install = {args.newfile: (args.targetpath, perms)}
- oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: sourcepath}, install, wildcardver=args.wildcard_version, machine=args.machine)
+ if sourcepath:
+ sourcepath = os.path.basename(sourcepath)
+ oe.recipeutils.bbappend_recipe(rd, args.destlayer, {args.newfile: {'newname' : sourcepath}}, install, wildcardver=args.wildcard_version, machine=args.machine)
+ tinfoil.modified_files()
return 0
else:
if alternative_pns:
@@ -327,6 +331,7 @@ def appendsrc(args, files, rd, extralines=None):
copyfiles = {}
extralines = extralines or []
+ params = []
for newfile, srcfile in files.items():
src_destdir = os.path.dirname(srcfile)
if not args.use_workdir:
@@ -337,25 +342,46 @@ def appendsrc(args, files, rd, extralines=None):
src_destdir = os.path.join(os.path.relpath(srcdir, workdir), src_destdir)
src_destdir = os.path.normpath(src_destdir)
- source_uri = 'file://{0}'.format(os.path.basename(srcfile))
if src_destdir and src_destdir != '.':
- source_uri += ';subdir={0}'.format(src_destdir)
-
- simple = bb.fetch.URI(source_uri)
- simple.params = {}
- simple_str = str(simple)
- if simple_str in simplified:
- existing = simplified[simple_str]
- if source_uri != existing:
- logger.warning('{0!r} is already in SRC_URI, with different parameters: {1!r}, not adding'.format(source_uri, existing))
- else:
- logger.warning('{0!r} is already in SRC_URI, not adding'.format(source_uri))
+ params.append({'subdir': src_destdir})
else:
- extralines.append('SRC_URI += {0}'.format(source_uri))
- copyfiles[newfile] = srcfile
-
- oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines)
-
+ params.append({})
+
+ copyfiles[newfile] = {'newname' : os.path.basename(srcfile)}
+
+ dry_run_output = None
+ dry_run_outdir = None
+ if args.dry_run:
+ import tempfile
+ dry_run_output = tempfile.TemporaryDirectory(prefix='devtool')
+ dry_run_outdir = dry_run_output.name
+
+ appendfile, _ = oe.recipeutils.bbappend_recipe(rd, args.destlayer, copyfiles, None, wildcardver=args.wildcard_version, machine=args.machine, extralines=extralines, params=params,
+ redirect_output=dry_run_outdir, update_original_recipe=args.update_recipe)
+ if not appendfile:
+ return
+ if args.dry_run:
+ output = ''
+ appendfilename = os.path.basename(appendfile)
+ newappendfile = appendfile
+ if appendfile and os.path.exists(appendfile):
+ with open(appendfile, 'r') as f:
+ oldlines = f.readlines()
+ else:
+ appendfile = '/dev/null'
+ oldlines = []
+
+ with open(os.path.join(dry_run_outdir, appendfilename), 'r') as f:
+ newlines = f.readlines()
+ diff = difflib.unified_diff(oldlines, newlines, appendfile, newappendfile)
+ difflines = list(diff)
+ if difflines:
+ output += ''.join(difflines)
+ if output:
+ logger.info('Diff of changed files:\n%s' % output)
+ else:
+ logger.info('No changed files')
+ tinfoil.modified_files()
def appendsrcfiles(parser, args):
recipedata = _parse_recipe(args.recipe, tinfoil)
@@ -435,6 +461,8 @@ def register_commands(subparsers):
help='Create/update a bbappend to add or replace source files',
description='Creates a bbappend (or updates an existing one) to add or replace the specified file in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify multiple files with a destination directory, so cannot specify the destination filename. See the `appendsrcfile` command for the other behavior.')
parser.add_argument('-D', '--destdir', help='Destination directory (relative to S or WORKDIR, defaults to ".")', default='', type=destination_path)
+ parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
+ parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
parser.add_argument('files', nargs='+', metavar='FILE', help='File(s) to be added to the recipe sources (WORKDIR or S)', type=existing_path)
parser.set_defaults(func=lambda a: appendsrcfiles(parser, a), parserecipes=True)
@@ -442,6 +470,8 @@ def register_commands(subparsers):
parents=[common_src],
help='Create/update a bbappend to add or replace a source file',
description='Creates a bbappend (or updates an existing one) to add or replace the specified files in the recipe sources, either those in WORKDIR or those in the source tree. This command lets you specify the destination filename, not just destination directory, but only works for one file. See the `appendsrcfiles` command for the other behavior.')
+ parser.add_argument('-u', '--update-recipe', help='Update recipe instead of creating (or updating) a bbapend file. DESTLAYER must contains the recipe to update', action='store_true')
+ parser.add_argument('-n', '--dry-run', help='Dry run mode', action='store_true')
parser.add_argument('file', metavar='FILE', help='File to be added to the recipe sources (WORKDIR or S)', type=existing_path)
parser.add_argument('destfile', metavar='DESTFILE', nargs='?', help='Destination path (relative to S or WORKDIR, optional)', type=destination_path)
parser.set_defaults(func=lambda a: appendsrcfile(parser, a), parserecipes=True)
diff --git a/scripts/lib/recipetool/create.py b/scripts/lib/recipetool/create.py
index 824ac6350d..8e9ff38db6 100644
--- a/scripts/lib/recipetool/create.py
+++ b/scripts/lib/recipetool/create.py
@@ -423,6 +423,36 @@ def create_recipe(args):
storeTagName = ''
pv_srcpv = False
+ handled = []
+ classes = []
+
+ # Find all plugins that want to register handlers
+ logger.debug('Loading recipe handlers')
+ raw_handlers = []
+ for plugin in plugins:
+ if hasattr(plugin, 'register_recipe_handlers'):
+ plugin.register_recipe_handlers(raw_handlers)
+ # Sort handlers by priority
+ handlers = []
+ for i, handler in enumerate(raw_handlers):
+ if isinstance(handler, tuple):
+ handlers.append((handler[0], handler[1], i))
+ else:
+ handlers.append((handler, 0, i))
+ handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
+ for handler, priority, _ in handlers:
+ logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
+ setattr(handler, '_devtool', args.devtool)
+ handlers = [item[0] for item in handlers]
+
+ fetchuri = None
+ for handler in handlers:
+ if hasattr(handler, 'process_url'):
+ ret = handler.process_url(args, classes, handled, extravalues)
+ if 'url' in handled and ret:
+ fetchuri = ret
+ break
+
if os.path.isfile(source):
source = 'file://%s' % os.path.abspath(source)
@@ -431,7 +461,8 @@ def create_recipe(args):
if re.match(r'https?://github.com/[^/]+/[^/]+/archive/.+(\.tar\..*|\.zip)$', source):
logger.warning('github archive files are not guaranteed to be stable and may be re-generated over time. If the latter occurs, the checksums will likely change and the recipe will fail at do_fetch. It is recommended that you point to an actual commit or tag in the repository instead (using the repository URL in conjunction with the -S/--srcrev option).')
# Fetch a URL
- fetchuri = reformat_git_uri(urldefrag(source)[0])
+ if not fetchuri:
+ fetchuri = reformat_git_uri(urldefrag(source)[0])
if args.binary:
# Assume the archive contains the directory structure verbatim
# so we need to extract to a subdirectory
@@ -638,8 +669,6 @@ def create_recipe(args):
# We'll come back and replace this later in handle_license_vars()
lines_before.append('##LICENSE_PLACEHOLDER##')
- handled = []
- classes = []
# FIXME This is kind of a hack, we probably ought to be using bitbake to do this
pn = None
@@ -677,8 +706,10 @@ def create_recipe(args):
if not srcuri:
lines_before.append('# No information for SRC_URI yet (only an external source tree was specified)')
lines_before.append('SRC_URI = "%s"' % srcuri)
+ shown_checksums = ["%ssum" % s for s in bb.fetch2.SHOWN_CHECKSUM_LIST]
for key, value in sorted(checksums.items()):
- lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
+ if key in shown_checksums:
+ lines_before.append('SRC_URI[%s] = "%s"' % (key, value))
if srcuri and supports_srcrev(srcuri):
lines_before.append('')
lines_before.append('# Modify these as desired')
@@ -690,7 +721,7 @@ def create_recipe(args):
srcpvprefix = 'svnr'
else:
srcpvprefix = scheme
- lines_before.append('PV = "%s+%s${SRCPV}"' % (realpv or '1.0', srcpvprefix))
+ lines_before.append('PV = "%s+%s"' % (realpv or '1.0', srcpvprefix))
pv_srcpv = True
if not args.autorev and srcrev == '${AUTOREV}':
if os.path.exists(os.path.join(srctree, '.git')):
@@ -718,25 +749,6 @@ def create_recipe(args):
if args.npm_dev:
extravalues['NPM_INSTALL_DEV'] = 1
- # Find all plugins that want to register handlers
- logger.debug('Loading recipe handlers')
- raw_handlers = []
- for plugin in plugins:
- if hasattr(plugin, 'register_recipe_handlers'):
- plugin.register_recipe_handlers(raw_handlers)
- # Sort handlers by priority
- handlers = []
- for i, handler in enumerate(raw_handlers):
- if isinstance(handler, tuple):
- handlers.append((handler[0], handler[1], i))
- else:
- handlers.append((handler, 0, i))
- handlers.sort(key=lambda item: (item[1], -item[2]), reverse=True)
- for handler, priority, _ in handlers:
- logger.debug('Handler: %s (priority %d)' % (handler.__class__.__name__, priority))
- setattr(handler, '_devtool', args.devtool)
- handlers = [item[0] for item in handlers]
-
# Apply the handlers
if args.binary:
classes.append('bin_package')
@@ -745,6 +757,10 @@ def create_recipe(args):
for handler in handlers:
handler.process(srctree_use, classes, lines_before, lines_after, handled, extravalues)
+ # native and nativesdk classes are special and must be inherited last
+ # If present, put them at the end of the classes list
+ classes.sort(key=lambda c: c in ("native", "nativesdk"))
+
extrafiles = extravalues.pop('extrafiles', {})
extra_pn = extravalues.pop('PN', None)
extra_pv = extravalues.pop('PV', None)
@@ -869,8 +885,10 @@ def create_recipe(args):
outlines.append('')
outlines.extend(lines_after)
+ outlines = [ line.rstrip('\n') +"\n" for line in outlines]
+
if extravalues:
- _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=False)
+ _, outlines = oe.recipeutils.patch_recipe_lines(outlines, extravalues, trailing_newline=True)
if args.extract_to:
scriptutils.git_convert_standalone_clone(srctree)
@@ -886,7 +904,7 @@ def create_recipe(args):
log_info_cond('Source extracted to %s' % args.extract_to, args.devtool)
if outfile == '-':
- sys.stdout.write('\n'.join(outlines) + '\n')
+ sys.stdout.write(''.join(outlines) + '\n')
else:
with open(outfile, 'w') as f:
lastline = None
@@ -894,9 +912,10 @@ def create_recipe(args):
if not lastline and not line:
# Skip extra blank lines
continue
- f.write('%s\n' % line)
+ f.write('%s' % line)
lastline = line
log_info_cond('Recipe %s has been created; further editing may be required to make it fully functional' % outfile, args.devtool)
+ tinfoil.modified_files()
if tempsrc:
if args.keep_temp:
@@ -1054,54 +1073,18 @@ def get_license_md5sums(d, static_only=False, linenumbers=False):
return md5sums
-def crunch_license(licfile):
+def crunch_known_licenses(d):
'''
- Remove non-material text from a license file and then check
- its md5sum against a known list. This works well for licenses
- which contain a copyright statement, but is also a useful way
- to handle people's insistence upon reformatting the license text
- slightly (with no material difference to the text of the
- license).
+ Calculate the MD5 checksums for the crunched versions of all common
+ licenses. Also add additional known checksums.
'''
-
- import oe.utils
-
- # Note: these are carefully constructed!
- license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
- license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
- copyright_re = re.compile('^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
- disclaimer_re = re.compile('^ *\*? ?All [Rr]ights [Rr]eserved\.$')
- email_re = re.compile('^.*<[\w\.-]*@[\w\.\-]*>$')
- header_re = re.compile('^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
- tag_re = re.compile('^ *@?\(?([Ll]icense|MIT)\)?$')
- url_re = re.compile('^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
-
+
crunched_md5sums = {}
# common licenses
- crunched_md5sums['89f3bf322f30a1dcfe952e09945842f0'] = 'Apache-2.0'
- crunched_md5sums['13b6fe3075f8f42f2270a748965bf3a1'] = '0BSD'
- crunched_md5sums['ba87a7d7c20719c8df4b8beed9b78c43'] = 'BSD-2-Clause'
- crunched_md5sums['7f8892c03b72de419c27be4ebfa253f8'] = 'BSD-3-Clause'
- crunched_md5sums['21128c0790b23a8a9f9e260d5f6b3619'] = 'BSL-1.0'
- crunched_md5sums['975742a59ae1b8abdea63a97121f49f4'] = 'EDL-1.0'
- crunched_md5sums['5322cee4433d84fb3aafc9e253116447'] = 'EPL-1.0'
- crunched_md5sums['6922352e87de080f42419bed93063754'] = 'EPL-2.0'
- crunched_md5sums['793475baa22295cae1d3d4046a3a0ceb'] = 'GPL-2.0-only'
- crunched_md5sums['ff9047f969b02c20f0559470df5cb433'] = 'GPL-2.0-or-later'
- crunched_md5sums['ea6de5453fcadf534df246e6cdafadcd'] = 'GPL-3.0-only'
- crunched_md5sums['b419257d4d153a6fde92ddf96acf5b67'] = 'GPL-3.0-or-later'
- crunched_md5sums['228737f4c49d3ee75b8fb3706b090b84'] = 'ISC'
- crunched_md5sums['c6a782e826ca4e85bf7f8b89435a677d'] = 'LGPL-2.0-only'
- crunched_md5sums['32d8f758a066752f0db09bd7624b8090'] = 'LGPL-2.0-or-later'
- crunched_md5sums['4820937eb198b4f84c52217ed230be33'] = 'LGPL-2.1-only'
- crunched_md5sums['db13fe9f3a13af7adab2dc7a76f9e44a'] = 'LGPL-2.1-or-later'
- crunched_md5sums['d7a0f2e4e0950e837ac3eabf5bd1d246'] = 'LGPL-3.0-only'
- crunched_md5sums['abbf328e2b434f9153351f06b9f79d02'] = 'LGPL-3.0-or-later'
- crunched_md5sums['eecf6429523cbc9693547cf2db790b5c'] = 'MIT'
- crunched_md5sums['b218b0e94290b9b818c4be67c8e1cc82'] = 'MIT-0'
- crunched_md5sums['ddc18131d6748374f0f35a621c245b49'] = 'Unlicense'
- crunched_md5sums['51f9570ff32571fc0a443102285c5e33'] = 'WTFPL'
+ crunched_md5sums['ad4e9d34a2e966dfe9837f18de03266d'] = 'GFDL-1.1-only'
+ crunched_md5sums['d014fb11a34eb67dc717fdcfc97e60ed'] = 'GFDL-1.2-only'
+ crunched_md5sums['e020ca655b06c112def28e597ab844f1'] = 'GFDL-1.3-only'
# The following two were gleaned from the "forever" npm package
crunched_md5sums['0a97f8e4cbaf889d6fa51f84b89a79f6'] = 'ISC'
@@ -1157,6 +1140,39 @@ def crunch_license(licfile):
# https://raw.githubusercontent.com/stackgl/gl-mat3/v2.0.0/LICENSE.md
crunched_md5sums['75512892d6f59dddb6d1c7e191957e9c'] = 'Zlib'
+ commonlicdir = d.getVar('COMMON_LICENSE_DIR')
+ for fn in sorted(os.listdir(commonlicdir)):
+ md5value, lictext = crunch_license(os.path.join(commonlicdir, fn))
+ if md5value not in crunched_md5sums:
+ crunched_md5sums[md5value] = fn
+ elif fn != crunched_md5sums[md5value]:
+ bb.debug(2, "crunched_md5sums['%s'] is already set to '%s' rather than '%s'" % (md5value, crunched_md5sums[md5value], fn))
+ else:
+ bb.debug(2, "crunched_md5sums['%s'] is already set to '%s'" % (md5value, crunched_md5sums[md5value]))
+
+ return crunched_md5sums
+
+def crunch_license(licfile):
+ '''
+ Remove non-material text from a license file and then calculate its
+ md5sum. This works well for licenses that contain a copyright statement,
+ but is also a useful way to handle people's insistence upon reformatting
+ the license text slightly (with no material difference to the text of the
+ license).
+ '''
+
+ import oe.utils
+
+ # Note: these are carefully constructed!
+ license_title_re = re.compile(r'^#*\(? *(This is )?([Tt]he )?.{0,15} ?[Ll]icen[sc]e( \(.{1,10}\))?\)?[:\.]? ?#*$')
+ license_statement_re = re.compile(r'^((This (project|software)|.{1,10}) is( free software)? (released|licen[sc]ed)|(Released|Licen[cs]ed)) under the .{1,10} [Ll]icen[sc]e:?$')
+ copyright_re = re.compile(r'^ *[#\*]* *(Modified work |MIT LICENSED )?Copyright ?(\([cC]\))? .*$')
+ disclaimer_re = re.compile(r'^ *\*? ?All [Rr]ights [Rr]eserved\.$')
+ email_re = re.compile(r'^.*<[\w\.-]*@[\w\.\-]*>$')
+ header_re = re.compile(r'^(\/\**!?)? ?[\-=\*]* ?(\*\/)?$')
+ tag_re = re.compile(r'^ *@?\(?([Ll]icense|MIT)\)?$')
+ url_re = re.compile(r'^ *[#\*]* *https?:\/\/[\w\.\/\-]+$')
+
lictext = []
with open(licfile, 'r', errors='surrogateescape') as f:
for line in f:
@@ -1198,16 +1214,17 @@ def crunch_license(licfile):
except UnicodeEncodeError:
md5val = None
lictext = ''
- license = crunched_md5sums.get(md5val, None)
- return license, md5val, lictext
+ return md5val, lictext
def guess_license(srctree, d):
import bb
md5sums = get_license_md5sums(d)
+ crunched_md5sums = crunch_known_licenses(d)
+
licenses = []
licspecs = ['*LICEN[CS]E*', 'COPYING*', '*[Ll]icense*', 'LEGAL*', '[Ll]egal*', '*GPL*', 'README.lic*', 'COPYRIGHT*', '[Cc]opyright*', 'e[dp]l-v10']
- skip_extensions = (".html", ".js", ".json", ".svg", ".ts")
+ skip_extensions = (".html", ".js", ".json", ".svg", ".ts", ".go")
licfiles = []
for root, dirs, files in os.walk(srctree):
for fn in files:
@@ -1222,7 +1239,8 @@ def guess_license(srctree, d):
md5value = bb.utils.md5_file(licfile)
license = md5sums.get(md5value, None)
if not license:
- license, crunched_md5, lictext = crunch_license(licfile)
+ crunched_md5, lictext = crunch_license(licfile)
+ license = crunched_md5sums.get(crunched_md5, None)
if lictext and not license:
license = 'Unknown'
logger.info("Please add the following line for '%s' to a 'lib/recipetool/licenses.csv' " \
@@ -1396,6 +1414,7 @@ def register_commands(subparsers):
parser_create.add_argument('-B', '--srcbranch', help='Branch in source repository if fetching from an SCM such as git (default master)')
parser_create.add_argument('--keep-temp', action="store_true", help='Keep temporary directory (for debugging)')
parser_create.add_argument('--npm-dev', action="store_true", help='For npm, also fetch devDependencies')
+ parser_create.add_argument('--no-pypi', action="store_true", help='Do not inherit pypi class')
parser_create.add_argument('--devtool', action="store_true", help=argparse.SUPPRESS)
parser_create.add_argument('--mirrors', action="store_true", help='Enable PREMIRRORS and MIRRORS for source tree fetching (disabled by default).')
parser_create.set_defaults(func=create_recipe)
diff --git a/scripts/lib/recipetool/create_buildsys.py b/scripts/lib/recipetool/create_buildsys.py
index 5015634476..ec9d510e23 100644
--- a/scripts/lib/recipetool/create_buildsys.py
+++ b/scripts/lib/recipetool/create_buildsys.py
@@ -5,9 +5,9 @@
# SPDX-License-Identifier: GPL-2.0-only
#
+import os
import re
import logging
-import glob
from recipetool.create import RecipeHandler, validate_pv
logger = logging.getLogger('recipetool')
@@ -137,15 +137,15 @@ class CmakeRecipeHandler(RecipeHandler):
deps = []
unmappedpkgs = []
- proj_re = re.compile('project\s*\(([^)]*)\)', re.IGNORECASE)
- pkgcm_re = re.compile('pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
- pkgsm_re = re.compile('pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
- findpackage_re = re.compile('find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
- findlibrary_re = re.compile('find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
- checklib_re = re.compile('check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
- include_re = re.compile('include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
- subdir_re = re.compile('add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
- dep_re = re.compile('([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
+ proj_re = re.compile(r'project\s*\(([^)]*)\)', re.IGNORECASE)
+ pkgcm_re = re.compile(r'pkg_check_modules\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?\s+([^)\s]+)\s*\)', re.IGNORECASE)
+ pkgsm_re = re.compile(r'pkg_search_module\s*\(\s*[a-zA-Z0-9-_]+\s*(REQUIRED)?((\s+[^)\s]+)+)\s*\)', re.IGNORECASE)
+ findpackage_re = re.compile(r'find_package\s*\(\s*([a-zA-Z0-9-_]+)\s*.*', re.IGNORECASE)
+ findlibrary_re = re.compile(r'find_library\s*\(\s*[a-zA-Z0-9-_]+\s*(NAMES\s+)?([a-zA-Z0-9-_ ]+)\s*.*')
+ checklib_re = re.compile(r'check_library_exists\s*\(\s*([^\s)]+)\s*.*', re.IGNORECASE)
+ include_re = re.compile(r'include\s*\(\s*([^)\s]*)\s*\)', re.IGNORECASE)
+ subdir_re = re.compile(r'add_subdirectory\s*\(\s*([^)\s]*)\s*([^)\s]*)\s*\)', re.IGNORECASE)
+ dep_re = re.compile(r'([^ ><=]+)( *[<>=]+ *[^ ><=]+)?')
def find_cmake_package(pkg):
RecipeHandler.load_devel_filemap(tinfoil.config_data)
@@ -423,16 +423,16 @@ class AutotoolsRecipeHandler(RecipeHandler):
'makeinfo': 'texinfo',
}
- pkg_re = re.compile('PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
- pkgce_re = re.compile('PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
- lib_re = re.compile('AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
- libx_re = re.compile('AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
- progs_re = re.compile('_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
- dep_re = re.compile('([^ ><=]+)( [<>=]+ [^ ><=]+)?')
- ac_init_re = re.compile('AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
- am_init_re = re.compile('AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
- define_re = re.compile('\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
- version_re = re.compile('([0-9.]+)')
+ pkg_re = re.compile(r'PKG_CHECK_MODULES\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
+ pkgce_re = re.compile(r'PKG_CHECK_EXISTS\(\s*\[?([^,\]]*)\]?[),].*')
+ lib_re = re.compile(r'AC_CHECK_LIB\(\s*\[?([^,\]]*)\]?,.*')
+ libx_re = re.compile(r'AX_CHECK_LIBRARY\(\s*\[?[^,\]]*\]?,\s*\[?([^,\]]*)\]?,\s*\[?([a-zA-Z0-9-]*)\]?,.*')
+ progs_re = re.compile(r'_PROGS?\(\s*\[?[a-zA-Z0-9_]*\]?,\s*\[?([^,\]]*)\]?[),].*')
+ dep_re = re.compile(r'([^ ><=]+)( [<>=]+ [^ ><=]+)?')
+ ac_init_re = re.compile(r'AC_INIT\(\s*([^,]+),\s*([^,]+)[,)].*')
+ am_init_re = re.compile(r'AM_INIT_AUTOMAKE\(\s*([^,]+),\s*([^,]+)[,)].*')
+ define_re = re.compile(r'\s*(m4_)?define\(\s*([^,]+),\s*([^,]+)\)')
+ version_re = re.compile(r'([0-9.]+)')
defines = {}
def subst_defines(value):
diff --git a/scripts/lib/recipetool/create_buildsys_python.py b/scripts/lib/recipetool/create_buildsys_python.py
index 4675cc68fa..a807dafae5 100644
--- a/scripts/lib/recipetool/create_buildsys_python.py
+++ b/scripts/lib/recipetool/create_buildsys_python.py
@@ -10,7 +10,7 @@ import codecs
import collections
import setuptools.command.build_py
import email
-import imp
+import importlib
import glob
import itertools
import logging
@@ -18,7 +18,11 @@ import os
import re
import sys
import subprocess
+import json
+import urllib.request
from recipetool.create import RecipeHandler
+from urllib.parse import urldefrag
+from recipetool.create import determine_from_url
logger = logging.getLogger('recipetool')
@@ -37,63 +41,8 @@ class PythonRecipeHandler(RecipeHandler):
assume_provided = ['builtins', 'os.path']
# Assumes that the host python3 builtin_module_names is sane for target too
assume_provided = assume_provided + list(sys.builtin_module_names)
+ excluded_fields = []
- bbvar_map = {
- 'Name': 'PN',
- 'Version': 'PV',
- 'Home-page': 'HOMEPAGE',
- 'Summary': 'SUMMARY',
- 'Description': 'DESCRIPTION',
- 'License': 'LICENSE',
- 'Requires': 'RDEPENDS:${PN}',
- 'Provides': 'RPROVIDES:${PN}',
- 'Obsoletes': 'RREPLACES:${PN}',
- }
- # PN/PV are already set by recipetool core & desc can be extremely long
- excluded_fields = [
- 'Description',
- ]
- setup_parse_map = {
- 'Url': 'Home-page',
- 'Classifiers': 'Classifier',
- 'Description': 'Summary',
- }
- setuparg_map = {
- 'Home-page': 'url',
- 'Classifier': 'classifiers',
- 'Summary': 'description',
- 'Description': 'long-description',
- }
- # Values which are lists, used by the setup.py argument based metadata
- # extraction method, to determine how to process the setup.py output.
- setuparg_list_fields = [
- 'Classifier',
- 'Requires',
- 'Provides',
- 'Obsoletes',
- 'Platform',
- 'Supported-Platform',
- ]
- setuparg_multi_line_values = ['Description']
- replacements = [
- ('License', r' +$', ''),
- ('License', r'^ +', ''),
- ('License', r' ', '-'),
- ('License', r'^GNU-', ''),
- ('License', r'-[Ll]icen[cs]e(,?-[Vv]ersion)?', ''),
- ('License', r'^UNKNOWN$', ''),
-
- # Remove currently unhandled version numbers from these variables
- ('Requires', r' *\([^)]*\)', ''),
- ('Provides', r' *\([^)]*\)', ''),
- ('Obsoletes', r' *\([^)]*\)', ''),
- ('Install-requires', r'^([^><= ]+).*', r'\1'),
- ('Extras-require', r'^([^><= ]+).*', r'\1'),
- ('Tests-require', r'^([^><= ]+).*', r'\1'),
-
- # Remove unhandled dependency on particular features (e.g. foo[PDF])
- ('Install-requires', r'\[[^\]]+\]$', ''),
- ]
classifier_license_map = {
'License :: OSI Approved :: Academic Free License (AFL)': 'AFL',
@@ -166,16 +115,473 @@ class PythonRecipeHandler(RecipeHandler):
def __init__(self):
pass
+ def process_url(self, args, classes, handled, extravalues):
+ """
+ Convert any pypi url https://pypi.org/project/<package>/<version> into https://files.pythonhosted.org/packages/source/...
+ which corresponds to the archive location, and add pypi class
+ """
+
+ if 'url' in handled:
+ return None
+
+ fetch_uri = None
+ source = args.source
+ required_version = args.version if args.version else None
+ match = re.match(r'https?://pypi.org/project/([^/]+)(?:/([^/]+))?/?$', urldefrag(source)[0])
+ if match:
+ package = match.group(1)
+ version = match.group(2) if match.group(2) else required_version
+
+ json_url = f"https://pypi.org/pypi/%s/json" % package
+ response = urllib.request.urlopen(json_url)
+ if response.status == 200:
+ data = json.loads(response.read())
+ if not version:
+ # grab latest version
+ version = data["info"]["version"]
+ pypi_package = data["info"]["name"]
+ for release in reversed(data["releases"][version]):
+ if release["packagetype"] == "sdist":
+ fetch_uri = release["url"]
+ break
+ else:
+ logger.warning("Cannot handle pypi url %s: cannot fetch package information using %s", source, json_url)
+ return None
+ else:
+ match = re.match(r'^https?://files.pythonhosted.org/packages.*/(.*)-.*$', source)
+ if match:
+ fetch_uri = source
+ pypi_package = match.group(1)
+ _, version = determine_from_url(fetch_uri)
+
+ if match and not args.no_pypi:
+ if required_version and version != required_version:
+ raise Exception("Version specified using --version/-V (%s) and version specified in the url (%s) do not match" % (required_version, version))
+ # This is optionnal if BPN looks like "python-<pypi_package>" or "python3-<pypi_package>" (see pypi.bbclass)
+ # but at this point we cannot know because because user can specify the output name of the recipe on the command line
+ extravalues["PYPI_PACKAGE"] = pypi_package
+ # If the tarball extension is not 'tar.gz' (default value in pypi.bblcass) whe should set PYPI_PACKAGE_EXT in the recipe
+ pypi_package_ext = re.match(r'.*%s-%s\.(.*)$' % (pypi_package, version), fetch_uri)
+ if pypi_package_ext:
+ pypi_package_ext = pypi_package_ext.group(1)
+ if pypi_package_ext != "tar.gz":
+ extravalues["PYPI_PACKAGE_EXT"] = pypi_package_ext
+
+ # Pypi class will handle S and SRC_URI variables, so remove them
+ # TODO: allow oe.recipeutils.patch_recipe_lines() to accept regexp so we can simplify the following to:
+ # extravalues['SRC_URI(?:\[.*?\])?'] = None
+ extravalues['S'] = None
+ extravalues['SRC_URI'] = None
+
+ classes.append('pypi')
+
+ handled.append('url')
+ return fetch_uri
+
+ def handle_classifier_license(self, classifiers, existing_licenses=""):
+
+ licenses = []
+ for classifier in classifiers:
+ if classifier in self.classifier_license_map:
+ license = self.classifier_license_map[classifier]
+ if license == 'Apache' and 'Apache-2.0' in existing_licenses:
+ license = 'Apache-2.0'
+ elif license == 'GPL':
+ if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
+ license = 'GPL-2.0'
+ elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
+ license = 'GPL-3.0'
+ elif license == 'LGPL':
+ if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
+ license = 'LGPL-2.1'
+ elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
+ license = 'LGPL-2.0'
+ elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
+ license = 'LGPL-3.0'
+ licenses.append(license)
+
+ if licenses:
+ return ' & '.join(licenses)
+
+ return None
+
+ def map_info_to_bbvar(self, info, extravalues):
+
+ # Map PKG-INFO & setup.py fields to bitbake variables
+ for field, values in info.items():
+ if field in self.excluded_fields:
+ continue
+
+ if field not in self.bbvar_map:
+ continue
+
+ if isinstance(values, str):
+ value = values
+ else:
+ value = ' '.join(str(v) for v in values if v)
+
+ bbvar = self.bbvar_map[field]
+ if bbvar == "PN":
+ # by convention python recipes start with "python3-"
+ if not value.startswith('python'):
+ value = 'python3-' + value
+
+ if bbvar not in extravalues and value:
+ extravalues[bbvar] = value
+
+ def apply_info_replacements(self, info):
+ if not self.replacements:
+ return
+
+ for variable, search, replace in self.replacements:
+ if variable not in info:
+ continue
+
+ def replace_value(search, replace, value):
+ if replace is None:
+ if re.search(search, value):
+ return None
+ else:
+ new_value = re.sub(search, replace, value)
+ if value != new_value:
+ return new_value
+ return value
+
+ value = info[variable]
+ if isinstance(value, str):
+ new_value = replace_value(search, replace, value)
+ if new_value is None:
+ del info[variable]
+ elif new_value != value:
+ info[variable] = new_value
+ elif hasattr(value, 'items'):
+ for dkey, dvalue in list(value.items()):
+ new_list = []
+ for pos, a_value in enumerate(dvalue):
+ new_value = replace_value(search, replace, a_value)
+ if new_value is not None and new_value != value:
+ new_list.append(new_value)
+
+ if value != new_list:
+ value[dkey] = new_list
+ else:
+ new_list = []
+ for pos, a_value in enumerate(value):
+ new_value = replace_value(search, replace, a_value)
+ if new_value is not None and new_value != value:
+ new_list.append(new_value)
+
+ if value != new_list:
+ info[variable] = new_list
+
+
+ def scan_python_dependencies(self, paths):
+ deps = set()
+ try:
+ dep_output = self.run_command(['pythondeps', '-d'] + paths)
+ except (OSError, subprocess.CalledProcessError):
+ pass
+ else:
+ for line in dep_output.splitlines():
+ line = line.rstrip()
+ dep, filename = line.split('\t', 1)
+ if filename.endswith('/setup.py'):
+ continue
+ deps.add(dep)
+
+ try:
+ provides_output = self.run_command(['pythondeps', '-p'] + paths)
+ except (OSError, subprocess.CalledProcessError):
+ pass
+ else:
+ provides_lines = (l.rstrip() for l in provides_output.splitlines())
+ provides = set(l for l in provides_lines if l and l != 'setup')
+ deps -= provides
+
+ return deps
+
+ def parse_pkgdata_for_python_packages(self):
+ pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
+
+ ldata = tinfoil.config_data.createCopy()
+ bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True)
+ python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
+
+ dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
+ python_dirs = [python_sitedir + os.sep,
+ os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
+ os.path.dirname(python_sitedir) + os.sep]
+ packages = {}
+ for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
+ files_info = None
+ with open(pkgdatafile, 'r') as f:
+ for line in f.readlines():
+ field, value = line.split(': ', 1)
+ if field.startswith('FILES_INFO'):
+ files_info = ast.literal_eval(value)
+ break
+ else:
+ continue
+
+ for fn in files_info:
+ for suffix in importlib.machinery.all_suffixes():
+ if fn.endswith(suffix):
+ break
+ else:
+ continue
+
+ if fn.startswith(dynload_dir + os.sep):
+ if '/.debug/' in fn:
+ continue
+ base = os.path.basename(fn)
+ provided = base.split('.', 1)[0]
+ packages[provided] = os.path.basename(pkgdatafile)
+ continue
+
+ for python_dir in python_dirs:
+ if fn.startswith(python_dir):
+ relpath = fn[len(python_dir):]
+ relstart, _, relremaining = relpath.partition(os.sep)
+ if relstart.endswith('.egg'):
+ relpath = relremaining
+ base, _ = os.path.splitext(relpath)
+
+ if '/.debug/' in base:
+ continue
+ if os.path.basename(base) == '__init__':
+ base = os.path.dirname(base)
+ base = base.replace(os.sep + os.sep, os.sep)
+ provided = base.replace(os.sep, '.')
+ packages[provided] = os.path.basename(pkgdatafile)
+ return packages
+
+ @classmethod
+ def run_command(cls, cmd, **popenargs):
+ if 'stderr' not in popenargs:
+ popenargs['stderr'] = subprocess.STDOUT
+ try:
+ return subprocess.check_output(cmd, **popenargs).decode('utf-8')
+ except OSError as exc:
+ logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
+ raise
+ except subprocess.CalledProcessError as exc:
+ logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
+ raise
+
+class PythonSetupPyRecipeHandler(PythonRecipeHandler):
+ bbvar_map = {
+ 'Name': 'PN',
+ 'Version': 'PV',
+ 'Home-page': 'HOMEPAGE',
+ 'Summary': 'SUMMARY',
+ 'Description': 'DESCRIPTION',
+ 'License': 'LICENSE',
+ 'Requires': 'RDEPENDS:${PN}',
+ 'Provides': 'RPROVIDES:${PN}',
+ 'Obsoletes': 'RREPLACES:${PN}',
+ }
+ # PN/PV are already set by recipetool core & desc can be extremely long
+ excluded_fields = [
+ 'Description',
+ ]
+ setup_parse_map = {
+ 'Url': 'Home-page',
+ 'Classifiers': 'Classifier',
+ 'Description': 'Summary',
+ }
+ setuparg_map = {
+ 'Home-page': 'url',
+ 'Classifier': 'classifiers',
+ 'Summary': 'description',
+ 'Description': 'long-description',
+ }
+ # Values which are lists, used by the setup.py argument based metadata
+ # extraction method, to determine how to process the setup.py output.
+ setuparg_list_fields = [
+ 'Classifier',
+ 'Requires',
+ 'Provides',
+ 'Obsoletes',
+ 'Platform',
+ 'Supported-Platform',
+ ]
+ setuparg_multi_line_values = ['Description']
+
+ replacements = [
+ ('License', r' +$', ''),
+ ('License', r'^ +', ''),
+ ('License', r' ', '-'),
+ ('License', r'^GNU-', ''),
+ ('License', r'-[Ll]icen[cs]e(,?-[Vv]ersion)?', ''),
+ ('License', r'^UNKNOWN$', ''),
+
+ # Remove currently unhandled version numbers from these variables
+ ('Requires', r' *\([^)]*\)', ''),
+ ('Provides', r' *\([^)]*\)', ''),
+ ('Obsoletes', r' *\([^)]*\)', ''),
+ ('Install-requires', r'^([^><= ]+).*', r'\1'),
+ ('Extras-require', r'^([^><= ]+).*', r'\1'),
+ ('Tests-require', r'^([^><= ]+).*', r'\1'),
+
+ # Remove unhandled dependency on particular features (e.g. foo[PDF])
+ ('Install-requires', r'\[[^\]]+\]$', ''),
+ ]
+
+ def __init__(self):
+ pass
+
+ def parse_setup_py(self, setupscript='./setup.py'):
+ with codecs.open(setupscript) as f:
+ info, imported_modules, non_literals, extensions = gather_setup_info(f)
+
+ def _map(key):
+ key = key.replace('_', '-')
+ key = key[0].upper() + key[1:]
+ if key in self.setup_parse_map:
+ key = self.setup_parse_map[key]
+ return key
+
+ # Naive mapping of setup() arguments to PKG-INFO field names
+ for d in [info, non_literals]:
+ for key, value in list(d.items()):
+ if key is None:
+ continue
+ new_key = _map(key)
+ if new_key != key:
+ del d[key]
+ d[new_key] = value
+
+ return info, 'setuptools' in imported_modules, non_literals, extensions
+
+ def get_setup_args_info(self, setupscript='./setup.py'):
+ cmd = ['python3', setupscript]
+ info = {}
+ keys = set(self.bbvar_map.keys())
+ keys |= set(self.setuparg_list_fields)
+ keys |= set(self.setuparg_multi_line_values)
+ grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
+ for index, keys in grouped_keys:
+ if index == (True, False):
+ # Splitlines output for each arg as a list value
+ for key in keys:
+ arg = self.setuparg_map.get(key, key.lower())
+ try:
+ arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
+ except (OSError, subprocess.CalledProcessError):
+ pass
+ else:
+ info[key] = [l.rstrip() for l in arg_info.splitlines()]
+ elif index == (False, True):
+ # Entire output for each arg
+ for key in keys:
+ arg = self.setuparg_map.get(key, key.lower())
+ try:
+ arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
+ except (OSError, subprocess.CalledProcessError):
+ pass
+ else:
+ info[key] = arg_info
+ else:
+ info.update(self.get_setup_byline(list(keys), setupscript))
+ return info
+
+ def get_setup_byline(self, fields, setupscript='./setup.py'):
+ info = {}
+
+ cmd = ['python3', setupscript]
+ cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields)
+ try:
+ info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
+ except (OSError, subprocess.CalledProcessError):
+ pass
+ else:
+ if len(fields) != len(info_lines):
+ logger.error('Mismatch between setup.py output lines and number of fields')
+ sys.exit(1)
+
+ for lineno, line in enumerate(info_lines):
+ line = line.rstrip()
+ info[fields[lineno]] = line
+ return info
+
+ def get_pkginfo(self, pkginfo_fn):
+ msg = email.message_from_file(open(pkginfo_fn, 'r'))
+ msginfo = {}
+ for field in msg.keys():
+ values = msg.get_all(field)
+ if len(values) == 1:
+ msginfo[field] = values[0]
+ else:
+ msginfo[field] = values
+ return msginfo
+
+ def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
+ if 'Package-dir' in setup_info:
+ package_dir = setup_info['Package-dir']
+ else:
+ package_dir = {}
+
+ dist = setuptools.Distribution()
+
+ class PackageDir(setuptools.command.build_py.build_py):
+ def __init__(self, package_dir):
+ self.package_dir = package_dir
+ self.dist = dist
+ super().__init__(self.dist)
+
+ pd = PackageDir(package_dir)
+ to_scan = []
+ if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
+ if 'Py-modules' in setup_info:
+ for module in setup_info['Py-modules']:
+ try:
+ package, module = module.rsplit('.', 1)
+ except ValueError:
+ package, module = '.', module
+ module_path = os.path.join(pd.get_package_dir(package), module + '.py')
+ to_scan.append(module_path)
+
+ if 'Packages' in setup_info:
+ for package in setup_info['Packages']:
+ to_scan.append(pd.get_package_dir(package))
+
+ if 'Scripts' in setup_info:
+ to_scan.extend(setup_info['Scripts'])
+ else:
+ logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.")
+
+ if not to_scan:
+ to_scan = ['.']
+
+ logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
+
+ provided_packages = self.parse_pkgdata_for_python_packages()
+ scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
+ mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
+ for dep in scanned_deps:
+ mapped = provided_packages.get(dep)
+ if mapped:
+ logger.debug('Mapped %s to %s' % (dep, mapped))
+ mapped_deps.add(mapped)
+ else:
+ logger.debug('Could not map %s' % dep)
+ unmapped_deps.add(dep)
+ return mapped_deps, unmapped_deps
+
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
+
if 'buildsystem' in handled:
return False
+ logger.debug("Trying setup.py parser")
+
# Check for non-zero size setup.py files
setupfiles = RecipeHandler.checkfiles(srctree, ['setup.py'])
for fn in setupfiles:
if os.path.getsize(fn):
break
else:
+ logger.debug("No setup.py found")
return False
# setup.py is always parsed to get at certain required information, such as
@@ -254,51 +660,16 @@ class PythonRecipeHandler(RecipeHandler):
if license_str:
for i, line in enumerate(lines_before):
- if line.startswith('LICENSE = '):
+ if line.startswith('##LICENSE_PLACEHOLDER##'):
lines_before.insert(i, '# NOTE: License in setup.py/PKGINFO is: %s' % license_str)
break
if 'Classifier' in info:
- existing_licenses = info.get('License', '')
- licenses = []
- for classifier in info['Classifier']:
- if classifier in self.classifier_license_map:
- license = self.classifier_license_map[classifier]
- if license == 'Apache' and 'Apache-2.0' in existing_licenses:
- license = 'Apache-2.0'
- elif license == 'GPL':
- if 'GPL-2.0' in existing_licenses or 'GPLv2' in existing_licenses:
- license = 'GPL-2.0'
- elif 'GPL-3.0' in existing_licenses or 'GPLv3' in existing_licenses:
- license = 'GPL-3.0'
- elif license == 'LGPL':
- if 'LGPL-2.1' in existing_licenses or 'LGPLv2.1' in existing_licenses:
- license = 'LGPL-2.1'
- elif 'LGPL-2.0' in existing_licenses or 'LGPLv2' in existing_licenses:
- license = 'LGPL-2.0'
- elif 'LGPL-3.0' in existing_licenses or 'LGPLv3' in existing_licenses:
- license = 'LGPL-3.0'
- licenses.append(license)
-
- if licenses:
- info['License'] = ' & '.join(licenses)
-
- # Map PKG-INFO & setup.py fields to bitbake variables
- for field, values in info.items():
- if field in self.excluded_fields:
- continue
+ license = self.handle_classifier_license(info['Classifier'], info.get('License', ''))
+ if license:
+ info['License'] = license
- if field not in self.bbvar_map:
- continue
-
- if isinstance(values, str):
- value = values
- else:
- value = ' '.join(str(v) for v in values if v)
-
- bbvar = self.bbvar_map[field]
- if bbvar not in extravalues and value:
- extravalues[bbvar] = value
+ self.map_info_to_bbvar(info, extravalues)
mapped_deps, unmapped_deps = self.scan_setup_python_deps(srctree, setup_info, setup_non_literals)
@@ -355,279 +726,283 @@ class PythonRecipeHandler(RecipeHandler):
handled.append('buildsystem')
- def get_pkginfo(self, pkginfo_fn):
- msg = email.message_from_file(open(pkginfo_fn, 'r'))
- msginfo = {}
- for field in msg.keys():
- values = msg.get_all(field)
- if len(values) == 1:
- msginfo[field] = values[0]
- else:
- msginfo[field] = values
- return msginfo
+class PythonPyprojectTomlRecipeHandler(PythonRecipeHandler):
+ """Base class to support PEP517 and PEP518
+
+ PEP517 https://peps.python.org/pep-0517/#source-trees
+ PEP518 https://peps.python.org/pep-0518/#build-system-table
+ """
+ # bitbake currently supports the 4 following backends
+ build_backend_map = {
+ "setuptools.build_meta": "python_setuptools_build_meta",
+ "poetry.core.masonry.api": "python_poetry_core",
+ "flit_core.buildapi": "python_flit_core",
+ "hatchling.build": "python_hatchling",
+ "maturin": "python_maturin",
+ "mesonpy": "python_mesonpy",
+ }
- def parse_setup_py(self, setupscript='./setup.py'):
- with codecs.open(setupscript) as f:
- info, imported_modules, non_literals, extensions = gather_setup_info(f)
+ # setuptools.build_meta and flit declare project metadata into the "project" section of pyproject.toml
+ # according to PEP-621: https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata
+ # while poetry uses the "tool.poetry" section according to its official documentation: https://python-poetry.org/docs/pyproject/
+ # keys from "project" and "tool.poetry" sections are almost the same except for the HOMEPAGE which is "homepage" for tool.poetry
+ # and "Homepage" for "project" section. So keep both
+ bbvar_map = {
+ "name": "PN",
+ "version": "PV",
+ "Homepage": "HOMEPAGE",
+ "homepage": "HOMEPAGE",
+ "description": "SUMMARY",
+ "license": "LICENSE",
+ "dependencies": "RDEPENDS:${PN}",
+ "requires": "DEPENDS",
+ }
- def _map(key):
- key = key.replace('_', '-')
- key = key[0].upper() + key[1:]
- if key in self.setup_parse_map:
- key = self.setup_parse_map[key]
- return key
+ replacements = [
+ ("license", r" +$", ""),
+ ("license", r"^ +", ""),
+ ("license", r" ", "-"),
+ ("license", r"^GNU-", ""),
+ ("license", r"-[Ll]icen[cs]e(,?-[Vv]ersion)?", ""),
+ ("license", r"^UNKNOWN$", ""),
+ # Remove currently unhandled version numbers from these variables
+ ("requires", r"\[[^\]]+\]$", ""),
+ ("requires", r"^([^><= ]+).*", r"\1"),
+ ("dependencies", r"\[[^\]]+\]$", ""),
+ ("dependencies", r"^([^><= ]+).*", r"\1"),
+ ]
- # Naive mapping of setup() arguments to PKG-INFO field names
- for d in [info, non_literals]:
- for key, value in list(d.items()):
- if key is None:
- continue
- new_key = _map(key)
- if new_key != key:
- del d[key]
- d[new_key] = value
+ excluded_native_pkgdeps = [
+ # already provided by python_setuptools_build_meta.bbclass
+ "python3-setuptools-native",
+ "python3-wheel-native",
+ # already provided by python_poetry_core.bbclass
+ "python3-poetry-core-native",
+ # already provided by python_flit_core.bbclass
+ "python3-flit-core-native",
+ # already provided by python_mesonpy
+ "python3-meson-python-native",
+ ]
- return info, 'setuptools' in imported_modules, non_literals, extensions
+ # add here a list of known and often used packages and the corresponding bitbake package
+ known_deps_map = {
+ "setuptools": "python3-setuptools",
+ "wheel": "python3-wheel",
+ "poetry-core": "python3-poetry-core",
+ "flit_core": "python3-flit-core",
+ "setuptools-scm": "python3-setuptools-scm",
+ "hatchling": "python3-hatchling",
+ "hatch-vcs": "python3-hatch-vcs",
+ "meson-python" : "python3-meson-python",
+ }
- def get_setup_args_info(self, setupscript='./setup.py'):
- cmd = ['python3', setupscript]
- info = {}
- keys = set(self.bbvar_map.keys())
- keys |= set(self.setuparg_list_fields)
- keys |= set(self.setuparg_multi_line_values)
- grouped_keys = itertools.groupby(keys, lambda k: (k in self.setuparg_list_fields, k in self.setuparg_multi_line_values))
- for index, keys in grouped_keys:
- if index == (True, False):
- # Splitlines output for each arg as a list value
- for key in keys:
- arg = self.setuparg_map.get(key, key.lower())
- try:
- arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
- except (OSError, subprocess.CalledProcessError):
- pass
- else:
- info[key] = [l.rstrip() for l in arg_info.splitlines()]
- elif index == (False, True):
- # Entire output for each arg
- for key in keys:
- arg = self.setuparg_map.get(key, key.lower())
- try:
- arg_info = self.run_command(cmd + ['--' + arg], cwd=os.path.dirname(setupscript))
- except (OSError, subprocess.CalledProcessError):
- pass
- else:
- info[key] = arg_info
- else:
- info.update(self.get_setup_byline(list(keys), setupscript))
- return info
+ def __init__(self):
+ pass
- def get_setup_byline(self, fields, setupscript='./setup.py'):
+ def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
info = {}
+ metadata = {}
- cmd = ['python3', setupscript]
- cmd.extend('--' + self.setuparg_map.get(f, f.lower()) for f in fields)
- try:
- info_lines = self.run_command(cmd, cwd=os.path.dirname(setupscript)).splitlines()
- except (OSError, subprocess.CalledProcessError):
- pass
- else:
- if len(fields) != len(info_lines):
- logger.error('Mismatch between setup.py output lines and number of fields')
- sys.exit(1)
-
- for lineno, line in enumerate(info_lines):
- line = line.rstrip()
- info[fields[lineno]] = line
- return info
-
- def apply_info_replacements(self, info):
- for variable, search, replace in self.replacements:
- if variable not in info:
- continue
-
- def replace_value(search, replace, value):
- if replace is None:
- if re.search(search, value):
- return None
- else:
- new_value = re.sub(search, replace, value)
- if value != new_value:
- return new_value
- return value
-
- value = info[variable]
- if isinstance(value, str):
- new_value = replace_value(search, replace, value)
- if new_value is None:
- del info[variable]
- elif new_value != value:
- info[variable] = new_value
- elif hasattr(value, 'items'):
- for dkey, dvalue in list(value.items()):
- new_list = []
- for pos, a_value in enumerate(dvalue):
- new_value = replace_value(search, replace, a_value)
- if new_value is not None and new_value != value:
- new_list.append(new_value)
-
- if value != new_list:
- value[dkey] = new_list
- else:
- new_list = []
- for pos, a_value in enumerate(value):
- new_value = replace_value(search, replace, a_value)
- if new_value is not None and new_value != value:
- new_list.append(new_value)
-
- if value != new_list:
- info[variable] = new_list
-
- def scan_setup_python_deps(self, srctree, setup_info, setup_non_literals):
- if 'Package-dir' in setup_info:
- package_dir = setup_info['Package-dir']
- else:
- package_dir = {}
-
- dist = setuptools.Distribution()
-
- class PackageDir(setuptools.command.build_py.build_py):
- def __init__(self, package_dir):
- self.package_dir = package_dir
- self.dist = dist
- super().__init__(self.dist)
-
- pd = PackageDir(package_dir)
- to_scan = []
- if not any(v in setup_non_literals for v in ['Py-modules', 'Scripts', 'Packages']):
- if 'Py-modules' in setup_info:
- for module in setup_info['Py-modules']:
- try:
- package, module = module.rsplit('.', 1)
- except ValueError:
- package, module = '.', module
- module_path = os.path.join(pd.get_package_dir(package), module + '.py')
- to_scan.append(module_path)
+ if 'buildsystem' in handled:
+ return False
- if 'Packages' in setup_info:
- for package in setup_info['Packages']:
- to_scan.append(pd.get_package_dir(package))
+ logger.debug("Trying pyproject.toml parser")
- if 'Scripts' in setup_info:
- to_scan.extend(setup_info['Scripts'])
+ # Check for non-zero size setup.py files
+ setupfiles = RecipeHandler.checkfiles(srctree, ["pyproject.toml"])
+ for fn in setupfiles:
+ if os.path.getsize(fn):
+ break
else:
- logger.info("Scanning the entire source tree, as one or more of the following setup keywords are non-literal: py_modules, scripts, packages.")
-
- if not to_scan:
- to_scan = ['.']
-
- logger.info("Scanning paths for packages & dependencies: %s", ', '.join(to_scan))
+ logger.debug("No pyproject.toml found")
+ return False
- provided_packages = self.parse_pkgdata_for_python_packages()
- scanned_deps = self.scan_python_dependencies([os.path.join(srctree, p) for p in to_scan])
- mapped_deps, unmapped_deps = set(self.base_pkgdeps), set()
- for dep in scanned_deps:
- mapped = provided_packages.get(dep)
- if mapped:
- logger.debug('Mapped %s to %s' % (dep, mapped))
- mapped_deps.add(mapped)
- else:
- logger.debug('Could not map %s' % dep)
- unmapped_deps.add(dep)
- return mapped_deps, unmapped_deps
+ setupscript = os.path.join(srctree, "pyproject.toml")
- def scan_python_dependencies(self, paths):
- deps = set()
try:
- dep_output = self.run_command(['pythondeps', '-d'] + paths)
- except (OSError, subprocess.CalledProcessError):
- pass
- else:
- for line in dep_output.splitlines():
- line = line.rstrip()
- dep, filename = line.split('\t', 1)
- if filename.endswith('/setup.py'):
- continue
- deps.add(dep)
+ try:
+ import tomllib
+ except ImportError:
+ try:
+ import tomli as tomllib
+ except ImportError:
+ logger.error("Neither 'tomllib' nor 'tomli' could be imported, cannot scan pyproject.toml.")
+ return False
+
+ try:
+ with open(setupscript, "rb") as f:
+ config = tomllib.load(f)
+ except Exception:
+ logger.exception("Failed to parse pyproject.toml")
+ return False
+
+ build_backend = config["build-system"]["build-backend"]
+ if build_backend in self.build_backend_map:
+ classes.append(self.build_backend_map[build_backend])
+ else:
+ logger.error(
+ "Unsupported build-backend: %s, cannot use pyproject.toml. Will try to use legacy setup.py"
+ % build_backend
+ )
+ return False
- try:
- provides_output = self.run_command(['pythondeps', '-p'] + paths)
- except (OSError, subprocess.CalledProcessError):
- pass
- else:
- provides_lines = (l.rstrip() for l in provides_output.splitlines())
- provides = set(l for l in provides_lines if l and l != 'setup')
- deps -= provides
+ licfile = ""
- return deps
+ if build_backend == "poetry.core.masonry.api":
+ if "tool" in config and "poetry" in config["tool"]:
+ metadata = config["tool"]["poetry"]
+ else:
+ if "project" in config:
+ metadata = config["project"]
+
+ if metadata:
+ for field, values in metadata.items():
+ if field == "license":
+ # For setuptools.build_meta and flit, licence is a table
+ # but for poetry licence is a string
+ # for hatchling, both table (jsonschema) and string (iniconfig) have been used
+ if build_backend == "poetry.core.masonry.api":
+ value = values
+ else:
+ value = values.get("text", "")
+ if not value:
+ licfile = values.get("file", "")
+ continue
+ elif field == "dependencies" and build_backend == "poetry.core.masonry.api":
+ # For poetry backend, "dependencies" section looks like:
+ # [tool.poetry.dependencies]
+ # requests = "^2.13.0"
+ # requests = { version = "^2.13.0", source = "private" }
+ # See https://python-poetry.org/docs/master/pyproject/#dependencies-and-dependency-groups for more details
+ # This class doesn't handle versions anyway, so we just get the dependencies name here and construct a list
+ value = []
+ for k in values.keys():
+ value.append(k)
+ elif isinstance(values, dict):
+ for k, v in values.items():
+ info[k] = v
+ continue
+ else:
+ value = values
- def parse_pkgdata_for_python_packages(self):
- suffixes = [t[0] for t in imp.get_suffixes()]
- pkgdata_dir = tinfoil.config_data.getVar('PKGDATA_DIR')
+ info[field] = value
- ldata = tinfoil.config_data.createCopy()
- bb.parse.handle('classes-recipe/python3-dir.bbclass', ldata, True)
- python_sitedir = ldata.getVar('PYTHON_SITEPACKAGES_DIR')
+ # Grab the license value before applying replacements
+ license_str = info.get("license", "").strip()
- dynload_dir = os.path.join(os.path.dirname(python_sitedir), 'lib-dynload')
- python_dirs = [python_sitedir + os.sep,
- os.path.join(os.path.dirname(python_sitedir), 'dist-packages') + os.sep,
- os.path.dirname(python_sitedir) + os.sep]
- packages = {}
- for pkgdatafile in glob.glob('{}/runtime/*'.format(pkgdata_dir)):
- files_info = None
- with open(pkgdatafile, 'r') as f:
- for line in f.readlines():
- field, value = line.split(': ', 1)
- if field.startswith('FILES_INFO'):
- files_info = ast.literal_eval(value)
+ if license_str:
+ for i, line in enumerate(lines_before):
+ if line.startswith("##LICENSE_PLACEHOLDER##"):
+ lines_before.insert(
+ i, "# NOTE: License in pyproject.toml is: %s" % license_str
+ )
break
- else:
- continue
- for fn in files_info:
- for suffix in suffixes:
- if fn.endswith(suffix):
- break
- else:
- continue
+ info["requires"] = config["build-system"]["requires"]
+
+ self.apply_info_replacements(info)
+
+ if "classifiers" in info:
+ license = self.handle_classifier_license(
+ info["classifiers"], info.get("license", "")
+ )
+ if license:
+ if licfile:
+ lines = []
+ md5value = bb.utils.md5_file(os.path.join(srctree, licfile))
+ lines.append('LICENSE = "%s"' % license)
+ lines.append(
+ 'LIC_FILES_CHKSUM = "file://%s;md5=%s"'
+ % (licfile, md5value)
+ )
+ lines.append("")
+
+ # Replace the placeholder so we get the values in the right place in the recipe file
+ try:
+ pos = lines_before.index("##LICENSE_PLACEHOLDER##")
+ except ValueError:
+ pos = -1
+ if pos == -1:
+ lines_before.extend(lines)
+ else:
+ lines_before[pos : pos + 1] = lines
- if fn.startswith(dynload_dir + os.sep):
- if '/.debug/' in fn:
- continue
- base = os.path.basename(fn)
- provided = base.split('.', 1)[0]
- packages[provided] = os.path.basename(pkgdatafile)
- continue
+ handled.append(("license", [license, licfile, md5value]))
+ else:
+ info["license"] = license
- for python_dir in python_dirs:
- if fn.startswith(python_dir):
- relpath = fn[len(python_dir):]
- relstart, _, relremaining = relpath.partition(os.sep)
- if relstart.endswith('.egg'):
- relpath = relremaining
- base, _ = os.path.splitext(relpath)
+ provided_packages = self.parse_pkgdata_for_python_packages()
+ provided_packages.update(self.known_deps_map)
+ native_mapped_deps, native_unmapped_deps = set(), set()
+ mapped_deps, unmapped_deps = set(), set()
- if '/.debug/' in base:
- continue
- if os.path.basename(base) == '__init__':
- base = os.path.dirname(base)
- base = base.replace(os.sep + os.sep, os.sep)
- provided = base.replace(os.sep, '.')
- packages[provided] = os.path.basename(pkgdatafile)
- return packages
+ if "requires" in info:
+ for require in info["requires"]:
+ mapped = provided_packages.get(require)
- @classmethod
- def run_command(cls, cmd, **popenargs):
- if 'stderr' not in popenargs:
- popenargs['stderr'] = subprocess.STDOUT
- try:
- return subprocess.check_output(cmd, **popenargs).decode('utf-8')
- except OSError as exc:
- logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc)
- raise
- except subprocess.CalledProcessError as exc:
- logger.error('Unable to run `{}`: {}', ' '.join(cmd), exc.output)
- raise
+ if mapped:
+ logger.debug("Mapped %s to %s" % (require, mapped))
+ native_mapped_deps.add(mapped)
+ else:
+ logger.debug("Could not map %s" % require)
+ native_unmapped_deps.add(require)
+
+ info.pop("requires")
+
+ if native_mapped_deps != set():
+ native_mapped_deps = {
+ item + "-native" for item in native_mapped_deps
+ }
+ native_mapped_deps -= set(self.excluded_native_pkgdeps)
+ if native_mapped_deps != set():
+ info["requires"] = " ".join(sorted(native_mapped_deps))
+
+ if native_unmapped_deps:
+ lines_after.append("")
+ lines_after.append(
+ "# WARNING: We were unable to map the following python package/module"
+ )
+ lines_after.append(
+ "# dependencies to the bitbake packages which include them:"
+ )
+ lines_after.extend(
+ "# {}".format(d) for d in sorted(native_unmapped_deps)
+ )
+
+ if "dependencies" in info:
+ for dependency in info["dependencies"]:
+ mapped = provided_packages.get(dependency)
+ if mapped:
+ logger.debug("Mapped %s to %s" % (dependency, mapped))
+ mapped_deps.add(mapped)
+ else:
+ logger.debug("Could not map %s" % dependency)
+ unmapped_deps.add(dependency)
+
+ info.pop("dependencies")
+
+ if mapped_deps != set():
+ if mapped_deps != set():
+ info["dependencies"] = " ".join(sorted(mapped_deps))
+
+ if unmapped_deps:
+ lines_after.append("")
+ lines_after.append(
+ "# WARNING: We were unable to map the following python package/module"
+ )
+ lines_after.append(
+ "# runtime dependencies to the bitbake packages which include them:"
+ )
+ lines_after.extend(
+ "# {}".format(d) for d in sorted(unmapped_deps)
+ )
+
+ self.map_info_to_bbvar(info, extravalues)
+
+ handled.append("buildsystem")
+ except Exception:
+ logger.exception("Failed to correctly handle pyproject.toml, falling back to another method")
+ return False
def gather_setup_info(fileobj):
@@ -743,5 +1118,7 @@ def has_non_literals(value):
def register_recipe_handlers(handlers):
- # We need to make sure this is ahead of the makefile fallback handler
- handlers.append((PythonRecipeHandler(), 70))
+ # We need to make sure these are ahead of the makefile fallback handler
+ # and the pyproject.toml handler ahead of the setup.py handler
+ handlers.append((PythonPyprojectTomlRecipeHandler(), 75))
+ handlers.append((PythonSetupPyRecipeHandler(), 70))
diff --git a/scripts/lib/recipetool/create_go.py b/scripts/lib/recipetool/create_go.py
new file mode 100644
index 0000000000..a85a2f2786
--- /dev/null
+++ b/scripts/lib/recipetool/create_go.py
@@ -0,0 +1,777 @@
+# Recipe creation tool - go support plugin
+#
+# The code is based on golang internals. See the afftected
+# methods for further reference and information.
+#
+# Copyright (C) 2023 Weidmueller GmbH & Co KG
+# Author: Lukas Funke <lukas.funke@weidmueller.com>
+#
+# SPDX-License-Identifier: GPL-2.0-only
+#
+
+
+from collections import namedtuple
+from enum import Enum
+from html.parser import HTMLParser
+from recipetool.create import RecipeHandler, handle_license_vars
+from recipetool.create import guess_license, tidy_licenses, fixup_license
+from recipetool.create import determine_from_url
+from urllib.error import URLError, HTTPError
+
+import bb.utils
+import json
+import logging
+import os
+import re
+import subprocess
+import sys
+import shutil
+import tempfile
+import urllib.parse
+import urllib.request
+
+
+GoImport = namedtuple('GoImport', 'root vcs url suffix')
+logger = logging.getLogger('recipetool')
+CodeRepo = namedtuple(
+ 'CodeRepo', 'path codeRoot codeDir pathMajor pathPrefix pseudoMajor')
+
+tinfoil = None
+
+# Regular expression to parse pseudo semantic version
+# see https://go.dev/ref/mod#pseudo-versions
+re_pseudo_semver = re.compile(
+ r"^v[0-9]+\.(0\.0-|\d+\.\d+-([^+]*\.)?0\.)(?P<utc>\d{14})-(?P<commithash>[A-Za-z0-9]+)(\+[0-9A-Za-z-]+(\.[0-9A-Za-z-]+)*)?$")
+# Regular expression to parse semantic version
+re_semver = re.compile(
+ r"^v(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$")
+
+
+def tinfoil_init(instance):
+ global tinfoil
+ tinfoil = instance
+
+
+class GoRecipeHandler(RecipeHandler):
+ """Class to handle the go recipe creation"""
+
+ @staticmethod
+ def __ensure_go():
+ """Check if the 'go' command is available in the recipes"""
+ recipe = "go-native"
+ if not tinfoil.recipes_parsed:
+ tinfoil.parse_recipes()
+ try:
+ rd = tinfoil.parse_recipe(recipe)
+ except bb.providers.NoProvider:
+ bb.error(
+ "Nothing provides '%s' which is required for the build" % (recipe))
+ bb.note(
+ "You will likely need to add a layer that provides '%s'" % (recipe))
+ return None
+
+ bindir = rd.getVar('STAGING_BINDIR_NATIVE')
+ gopath = os.path.join(bindir, 'go')
+
+ if not os.path.exists(gopath):
+ tinfoil.build_targets(recipe, 'addto_recipe_sysroot')
+
+ if not os.path.exists(gopath):
+ logger.error(
+ '%s required to process specified source, but %s did not seem to populate it' % 'go', recipe)
+ return None
+
+ return bindir
+
+ def __resolve_repository_static(self, modulepath):
+ """Resolve the repository in a static manner
+
+ The method is based on the go implementation of
+ `repoRootFromVCSPaths` in
+ https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
+ """
+
+ url = urllib.parse.urlparse("https://" + modulepath)
+ req = urllib.request.Request(url.geturl())
+
+ try:
+ resp = urllib.request.urlopen(req)
+ # Some modulepath are just redirects to github (or some other vcs
+ # hoster). Therefore, we check if this modulepath redirects to
+ # somewhere else
+ if resp.geturl() != url.geturl():
+ bb.debug(1, "%s is redirectred to %s" %
+ (url.geturl(), resp.geturl()))
+ url = urllib.parse.urlparse(resp.geturl())
+ modulepath = url.netloc + url.path
+
+ except URLError as url_err:
+ # This is probably because the module path
+ # contains the subdir and major path. Thus,
+ # we ignore this error for now
+ logger.debug(
+ 1, "Failed to fetch page from [%s]: %s" % (url, str(url_err)))
+
+ host, _, _ = modulepath.partition('/')
+
+ class vcs(Enum):
+ pathprefix = "pathprefix"
+ regexp = "regexp"
+ type = "type"
+ repo = "repo"
+ check = "check"
+ schemelessRepo = "schemelessRepo"
+
+ # GitHub
+ vcsGitHub = {}
+ vcsGitHub[vcs.pathprefix] = "github.com"
+ vcsGitHub[vcs.regexp] = re.compile(
+ r'^(?P<root>github\.com/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsGitHub[vcs.type] = "git"
+ vcsGitHub[vcs.repo] = "https://\\g<root>"
+
+ # Bitbucket
+ vcsBitbucket = {}
+ vcsBitbucket[vcs.pathprefix] = "bitbucket.org"
+ vcsBitbucket[vcs.regexp] = re.compile(
+ r'^(?P<root>bitbucket\.org/(?P<bitname>[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+))(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsBitbucket[vcs.type] = "git"
+ vcsBitbucket[vcs.repo] = "https://\\g<root>"
+
+ # IBM DevOps Services (JazzHub)
+ vcsIBMDevOps = {}
+ vcsIBMDevOps[vcs.pathprefix] = "hub.jazz.net/git"
+ vcsIBMDevOps[vcs.regexp] = re.compile(
+ r'^(?P<root>hub\.jazz\.net/git/[a-z0-9]+/[A-Za-z0-9_.\-]+)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsIBMDevOps[vcs.type] = "git"
+ vcsIBMDevOps[vcs.repo] = "https://\\g<root>"
+
+ # Git at Apache
+ vcsApacheGit = {}
+ vcsApacheGit[vcs.pathprefix] = "git.apache.org"
+ vcsApacheGit[vcs.regexp] = re.compile(
+ r'^(?P<root>git\.apache\.org/[a-z0-9_.\-]+\.git)(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsApacheGit[vcs.type] = "git"
+ vcsApacheGit[vcs.repo] = "https://\\g<root>"
+
+ # Git at OpenStack
+ vcsOpenStackGit = {}
+ vcsOpenStackGit[vcs.pathprefix] = "git.openstack.org"
+ vcsOpenStackGit[vcs.regexp] = re.compile(
+ r'^(?P<root>git\.openstack\.org/[A-Za-z0-9_.\-]+/[A-Za-z0-9_.\-]+)(\.git)?(/(?P<suffix>[A-Za-z0-9_.\-]+))*$')
+ vcsOpenStackGit[vcs.type] = "git"
+ vcsOpenStackGit[vcs.repo] = "https://\\g<root>"
+
+ # chiselapp.com for fossil
+ vcsChiselapp = {}
+ vcsChiselapp[vcs.pathprefix] = "chiselapp.com"
+ vcsChiselapp[vcs.regexp] = re.compile(
+ r'^(?P<root>chiselapp\.com/user/[A-Za-z0-9]+/repository/[A-Za-z0-9_.\-]+)$')
+ vcsChiselapp[vcs.type] = "fossil"
+ vcsChiselapp[vcs.repo] = "https://\\g<root>"
+
+ # General syntax for any server.
+ # Must be last.
+ vcsGeneralServer = {}
+ vcsGeneralServer[vcs.regexp] = re.compile(
+ "(?P<root>(?P<repo>([a-z0-9.\\-]+\\.)+[a-z0-9.\\-]+(:[0-9]+)?(/~?[A-Za-z0-9_.\\-]+)+?)\\.(?P<vcs>bzr|fossil|git|hg|svn))(/~?(?P<suffix>[A-Za-z0-9_.\\-]+))*$")
+ vcsGeneralServer[vcs.schemelessRepo] = True
+
+ vcsPaths = [vcsGitHub, vcsBitbucket, vcsIBMDevOps,
+ vcsApacheGit, vcsOpenStackGit, vcsChiselapp,
+ vcsGeneralServer]
+
+ if modulepath.startswith("example.net") or modulepath == "rsc.io":
+ logger.warning("Suspicious module path %s" % modulepath)
+ return None
+ if modulepath.startswith("http:") or modulepath.startswith("https:"):
+ logger.warning("Import path should not start with %s %s" %
+ ("http", "https"))
+ return None
+
+ rootpath = None
+ vcstype = None
+ repourl = None
+ suffix = None
+
+ for srv in vcsPaths:
+ m = srv[vcs.regexp].match(modulepath)
+ if vcs.pathprefix in srv:
+ if host == srv[vcs.pathprefix]:
+ rootpath = m.group('root')
+ vcstype = srv[vcs.type]
+ repourl = m.expand(srv[vcs.repo])
+ suffix = m.group('suffix')
+ break
+ elif m and srv[vcs.schemelessRepo]:
+ rootpath = m.group('root')
+ vcstype = m[vcs.type]
+ repourl = m[vcs.repo]
+ suffix = m.group('suffix')
+ break
+
+ return GoImport(rootpath, vcstype, repourl, suffix)
+
+ def __resolve_repository_dynamic(self, modulepath):
+ """Resolve the repository root in a dynamic manner.
+
+ The method is based on the go implementation of
+ `repoRootForImportDynamic` in
+ https://github.com/golang/go/blob/master/src/cmd/go/internal/vcs/vcs.go
+ """
+ url = urllib.parse.urlparse("https://" + modulepath)
+
+ class GoImportHTMLParser(HTMLParser):
+
+ def __init__(self):
+ super().__init__()
+ self.__srv = {}
+
+ def handle_starttag(self, tag, attrs):
+ if tag == 'meta' and list(
+ filter(lambda a: (a[0] == 'name' and a[1] == 'go-import'), attrs)):
+ content = list(
+ filter(lambda a: (a[0] == 'content'), attrs))
+ if content:
+ srv = content[0][1].split()
+ self.__srv[srv[0]] = srv
+
+ def go_import(self, modulepath):
+ if modulepath in self.__srv:
+ srv = self.__srv[modulepath]
+ return GoImport(srv[0], srv[1], srv[2], None)
+ return None
+
+ url = url.geturl() + "?go-get=1"
+ req = urllib.request.Request(url)
+
+ try:
+ body = urllib.request.urlopen(req).read()
+ except HTTPError as http_err:
+ logger.warning(
+ "Unclean status when fetching page from [%s]: %s", url, str(http_err))
+ body = http_err.fp.read()
+ except URLError as url_err:
+ logger.warning(
+ "Failed to fetch page from [%s]: %s", url, str(url_err))
+ return None
+
+ parser = GoImportHTMLParser()
+ parser.feed(body.decode('utf-8'))
+ parser.close()
+
+ return parser.go_import(modulepath)
+
+ def __resolve_from_golang_proxy(self, modulepath, version):
+ """
+ Resolves repository data from golang proxy
+ """
+ url = urllib.parse.urlparse("https://proxy.golang.org/"
+ + modulepath
+ + "/@v/"
+ + version
+ + ".info")
+
+ # Transform url to lower case, golang proxy doesn't like mixed case
+ req = urllib.request.Request(url.geturl().lower())
+
+ try:
+ resp = urllib.request.urlopen(req)
+ except URLError as url_err:
+ logger.warning(
+ "Failed to fetch page from [%s]: %s", url, str(url_err))
+ return None
+
+ golang_proxy_res = resp.read().decode('utf-8')
+ modinfo = json.loads(golang_proxy_res)
+
+ if modinfo and 'Origin' in modinfo:
+ origin = modinfo['Origin']
+ _root_url = urllib.parse.urlparse(origin['URL'])
+
+ # We normalize the repo URL since we don't want the scheme in it
+ _subdir = origin['Subdir'] if 'Subdir' in origin else None
+ _root, _, _ = self.__split_path_version(modulepath)
+ if _subdir:
+ _root = _root[:-len(_subdir)].strip('/')
+
+ _commit = origin['Hash']
+ _vcs = origin['VCS']
+ return (GoImport(_root, _vcs, _root_url.geturl(), None), _commit)
+
+ return None
+
+ def __resolve_repository(self, modulepath):
+ """
+ Resolves src uri from go module-path
+ """
+ repodata = self.__resolve_repository_static(modulepath)
+ if not repodata or not repodata.url:
+ repodata = self.__resolve_repository_dynamic(modulepath)
+ if not repodata or not repodata.url:
+ logger.error(
+ "Could not resolve repository for module path '%s'" % modulepath)
+ # There is no way to recover from this
+ sys.exit(14)
+ if repodata:
+ logger.debug(1, "Resolved download path for import '%s' => %s" % (
+ modulepath, repodata.url))
+ return repodata
+
+ def __split_path_version(self, path):
+ i = len(path)
+ dot = False
+ for j in range(i, 0, -1):
+ if path[j - 1] < '0' or path[j - 1] > '9':
+ break
+ if path[j - 1] == '.':
+ dot = True
+ break
+ i = j - 1
+
+ if i <= 1 or i == len(
+ path) or path[i - 1] != 'v' or path[i - 2] != '/':
+ return path, "", True
+
+ prefix, pathMajor = path[:i - 2], path[i - 2:]
+ if dot or len(
+ pathMajor) <= 2 or pathMajor[2] == '0' or pathMajor == "/v1":
+ return path, "", False
+
+ return prefix, pathMajor, True
+
+ def __get_path_major(self, pathMajor):
+ if not pathMajor:
+ return ""
+
+ if pathMajor[0] != '/' and pathMajor[0] != '.':
+ logger.error(
+ "pathMajor suffix %s passed to PathMajorPrefix lacks separator", pathMajor)
+
+ if pathMajor.startswith(".v") and pathMajor.endswith("-unstable"):
+ pathMajor = pathMajor[:len("-unstable") - 2]
+
+ return pathMajor[1:]
+
+ def __build_coderepo(self, repo, path):
+ codedir = ""
+ pathprefix, pathMajor, _ = self.__split_path_version(path)
+ if repo.root == path:
+ pathprefix = path
+ elif path.startswith(repo.root):
+ codedir = pathprefix[len(repo.root):].strip('/')
+
+ pseudoMajor = self.__get_path_major(pathMajor)
+
+ logger.debug("root='%s', codedir='%s', prefix='%s', pathMajor='%s', pseudoMajor='%s'",
+ repo.root, codedir, pathprefix, pathMajor, pseudoMajor)
+
+ return CodeRepo(path, repo.root, codedir,
+ pathMajor, pathprefix, pseudoMajor)
+
+ def __resolve_version(self, repo, path, version):
+ hash = None
+ coderoot = self.__build_coderepo(repo, path)
+
+ def vcs_fetch_all():
+ tmpdir = tempfile.mkdtemp()
+ clone_cmd = "%s clone --bare %s %s" % ('git', repo.url, tmpdir)
+ bb.process.run(clone_cmd)
+ log_cmd = "git log --all --pretty='%H %d' --decorate=short"
+ output, _ = bb.process.run(
+ log_cmd, shell=True, stderr=subprocess.PIPE, cwd=tmpdir)
+ bb.utils.prunedir(tmpdir)
+ return output.strip().split('\n')
+
+ def vcs_fetch_remote(tag):
+ # add * to grab ^{}
+ refs = {}
+ ls_remote_cmd = "git ls-remote -q --tags {} {}*".format(
+ repo.url, tag)
+ output, _ = bb.process.run(ls_remote_cmd)
+ output = output.strip().split('\n')
+ for line in output:
+ f = line.split(maxsplit=1)
+ if len(f) != 2:
+ continue
+
+ for prefix in ["HEAD", "refs/heads/", "refs/tags/"]:
+ if f[1].startswith(prefix):
+ refs[f[1][len(prefix):]] = f[0]
+
+ for key, hash in refs.items():
+ if key.endswith(r"^{}"):
+ refs[key.strip(r"^{}")] = hash
+
+ return refs[tag]
+
+ m_pseudo_semver = re_pseudo_semver.match(version)
+
+ if m_pseudo_semver:
+ remote_refs = vcs_fetch_all()
+ short_commit = m_pseudo_semver.group('commithash')
+ for l in remote_refs:
+ r = l.split(maxsplit=1)
+ sha1 = r[0] if len(r) else None
+ if not sha1:
+ logger.error(
+ "Ups: could not resolve abbref commit for %s" % short_commit)
+
+ elif sha1.startswith(short_commit):
+ hash = sha1
+ break
+ else:
+ m_semver = re_semver.match(version)
+ if m_semver:
+
+ def get_sha1_remote(re):
+ rsha1 = None
+ for line in remote_refs:
+ # Split lines of the following format:
+ # 22e90d9b964610628c10f673ca5f85b8c2a2ca9a (tag: sometag)
+ lineparts = line.split(maxsplit=1)
+ sha1 = lineparts[0] if len(lineparts) else None
+ refstring = lineparts[1] if len(
+ lineparts) == 2 else None
+ if refstring:
+ # Normalize tag string and split in case of multiple
+ # regs e.g. (tag: speech/v1.10.0, tag: orchestration/v1.5.0 ...)
+ refs = refstring.strip('(), ').split(',')
+ for ref in refs:
+ if re.match(ref.strip()):
+ rsha1 = sha1
+ return rsha1
+
+ semver = "v" + m_semver.group('major') + "."\
+ + m_semver.group('minor') + "."\
+ + m_semver.group('patch') \
+ + (("-" + m_semver.group('prerelease'))
+ if m_semver.group('prerelease') else "")
+
+ tag = os.path.join(
+ coderoot.codeDir, semver) if coderoot.codeDir else semver
+
+ # probe tag using 'ls-remote', which is faster than fetching
+ # complete history
+ hash = vcs_fetch_remote(tag)
+ if not hash:
+ # backup: fetch complete history
+ remote_refs = vcs_fetch_all()
+ hash = get_sha1_remote(
+ re.compile(fr"(tag:|HEAD ->) ({tag})"))
+
+ logger.debug(
+ "Resolving commit for tag '%s' -> '%s'", tag, hash)
+ return hash
+
+ def __generate_srcuri_inline_fcn(self, path, version, replaces=None):
+ """Generate SRC_URI functions for go imports"""
+
+ logger.info("Resolving repository for module %s", path)
+ # First try to resolve repo and commit from golang proxy
+ # Most info is already there and we don't have to go through the
+ # repository or even perform the version resolve magic
+ golang_proxy_info = self.__resolve_from_golang_proxy(path, version)
+ if golang_proxy_info:
+ repo = golang_proxy_info[0]
+ commit = golang_proxy_info[1]
+ else:
+ # Fallback
+ # Resolve repository by 'hand'
+ repo = self.__resolve_repository(path)
+ commit = self.__resolve_version(repo, path, version)
+
+ url = urllib.parse.urlparse(repo.url)
+ repo_url = url.netloc + url.path
+
+ coderoot = self.__build_coderepo(repo, path)
+
+ inline_fcn = "${@go_src_uri("
+ inline_fcn += f"'{repo_url}','{version}'"
+ if repo_url != path:
+ inline_fcn += f",path='{path}'"
+ if coderoot.codeDir:
+ inline_fcn += f",subdir='{coderoot.codeDir}'"
+ if repo.vcs != 'git':
+ inline_fcn += f",vcs='{repo.vcs}'"
+ if replaces:
+ inline_fcn += f",replaces='{replaces}'"
+ if coderoot.pathMajor:
+ inline_fcn += f",pathmajor='{coderoot.pathMajor}'"
+ inline_fcn += ")}"
+
+ return inline_fcn, commit
+
+ def __go_handle_dependencies(self, go_mod, srctree, localfilesdir, extravalues, d):
+
+ import re
+ src_uris = []
+ src_revs = []
+
+ def generate_src_rev(path, version, commithash):
+ src_rev = f"# {path}@{version} => {commithash}\n"
+ # Ups...maybe someone manipulated the source repository and the
+ # version or commit could not be resolved. This is a sign of
+ # a) the supply chain was manipulated (bad)
+ # b) the implementation for the version resolving didn't work
+ # anymore (less bad)
+ if not commithash:
+ src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
+ src_rev += f"#!!! Could not resolve version !!!\n"
+ src_rev += f"#!!! Possible supply chain attack !!!\n"
+ src_rev += f"#!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!\n"
+ src_rev += f"SRCREV_{path.replace('/', '.')} = \"{commithash}\""
+
+ return src_rev
+
+ # we first go over replacement list, because we are essentialy
+ # interested only in the replaced path
+ if go_mod['Replace']:
+ for replacement in go_mod['Replace']:
+ oldpath = replacement['Old']['Path']
+ path = replacement['New']['Path']
+ version = ''
+ if 'Version' in replacement['New']:
+ version = replacement['New']['Version']
+
+ if os.path.exists(os.path.join(srctree, path)):
+ # the module refers to the local path, remove it from requirement list
+ # because it's a local module
+ go_mod['Require'][:] = [v for v in go_mod['Require'] if v.get('Path') != oldpath]
+ else:
+ # Replace the path and the version, so we don't iterate replacement list anymore
+ for require in go_mod['Require']:
+ if require['Path'] == oldpath:
+ require.update({'Path': path, 'Version': version})
+ break
+
+ for require in go_mod['Require']:
+ path = require['Path']
+ version = require['Version']
+
+ inline_fcn, commithash = self.__generate_srcuri_inline_fcn(
+ path, version)
+ src_uris.append(inline_fcn)
+ src_revs.append(generate_src_rev(path, version, commithash))
+
+ # strip version part from module URL /vXX
+ baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
+ pn, _ = determine_from_url(baseurl)
+ go_mods_basename = "%s-modules.inc" % pn
+
+ go_mods_filename = os.path.join(localfilesdir, go_mods_basename)
+ with open(go_mods_filename, "w") as f:
+ # We introduce this indirection to make the tests a little easier
+ f.write("SRC_URI += \"${GO_DEPENDENCIES_SRC_URI}\"\n")
+ f.write("GO_DEPENDENCIES_SRC_URI = \"\\\n")
+ for uri in src_uris:
+ f.write(" " + uri + " \\\n")
+ f.write("\"\n\n")
+ for rev in src_revs:
+ f.write(rev + "\n")
+
+ extravalues['extrafiles'][go_mods_basename] = go_mods_filename
+
+ def __go_run_cmd(self, cmd, cwd, d):
+ return bb.process.run(cmd, env=dict(os.environ, PATH=d.getVar('PATH')),
+ shell=True, cwd=cwd)
+
+ def __go_native_version(self, d):
+ stdout, _ = self.__go_run_cmd("go version", None, d)
+ m = re.match(r".*\sgo((\d+).(\d+).(\d+))\s([\w\/]*)", stdout)
+ major = int(m.group(2))
+ minor = int(m.group(3))
+ patch = int(m.group(4))
+
+ return major, minor, patch
+
+ def __go_mod_patch(self, srctree, localfilesdir, extravalues, d):
+
+ patchfilename = "go.mod.patch"
+ go_native_version_major, go_native_version_minor, _ = self.__go_native_version(
+ d)
+ self.__go_run_cmd("go mod tidy -go=%d.%d" %
+ (go_native_version_major, go_native_version_minor), srctree, d)
+ stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
+
+ # Create patch in order to upgrade go version
+ self.__go_run_cmd("git diff go.mod > %s" % (patchfilename), srctree, d)
+ # Restore original state
+ self.__go_run_cmd("git checkout HEAD go.mod go.sum", srctree, d)
+
+ go_mod = json.loads(stdout)
+ tmpfile = os.path.join(localfilesdir, patchfilename)
+ shutil.move(os.path.join(srctree, patchfilename), tmpfile)
+
+ extravalues['extrafiles'][patchfilename] = tmpfile
+
+ return go_mod, patchfilename
+
+ def __go_mod_vendor(self, go_mod, srctree, localfilesdir, extravalues, d):
+ # Perform vendoring to retrieve the correct modules.txt
+ tmp_vendor_dir = tempfile.mkdtemp()
+
+ # -v causes to go to print modules.txt to stderr
+ _, stderr = self.__go_run_cmd(
+ "go mod vendor -v -o %s" % (tmp_vendor_dir), srctree, d)
+
+ modules_txt_basename = "modules.txt"
+ modules_txt_filename = os.path.join(localfilesdir, modules_txt_basename)
+ with open(modules_txt_filename, "w") as f:
+ f.write(stderr)
+
+ extravalues['extrafiles'][modules_txt_basename] = modules_txt_filename
+
+ licenses = []
+ lic_files_chksum = []
+ licvalues = guess_license(tmp_vendor_dir, d)
+ shutil.rmtree(tmp_vendor_dir)
+
+ if licvalues:
+ for licvalue in licvalues:
+ license = licvalue[0]
+ lics = tidy_licenses(fixup_license(license))
+ lics = [lic for lic in lics if lic not in licenses]
+ if len(lics):
+ licenses.extend(lics)
+ lic_files_chksum.append(
+ 'file://src/${GO_IMPORT}/vendor/%s;md5=%s' % (licvalue[1], licvalue[2]))
+
+ # strip version part from module URL /vXX
+ baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
+ pn, _ = determine_from_url(baseurl)
+ licenses_basename = "%s-licenses.inc" % pn
+
+ licenses_filename = os.path.join(localfilesdir, licenses_basename)
+ with open(licenses_filename, "w") as f:
+ f.write("GO_MOD_LICENSES = \"%s\"\n\n" %
+ ' & '.join(sorted(licenses, key=str.casefold)))
+ # We introduce this indirection to make the tests a little easier
+ f.write("LIC_FILES_CHKSUM += \"${VENDORED_LIC_FILES_CHKSUM}\"\n")
+ f.write("VENDORED_LIC_FILES_CHKSUM = \"\\\n")
+ for lic in lic_files_chksum:
+ f.write(" " + lic + " \\\n")
+ f.write("\"\n")
+
+ extravalues['extrafiles'][licenses_basename] = licenses_filename
+
+ def process(self, srctree, classes, lines_before,
+ lines_after, handled, extravalues):
+
+ if 'buildsystem' in handled:
+ return False
+
+ files = RecipeHandler.checkfiles(srctree, ['go.mod'])
+ if not files:
+ return False
+
+ d = bb.data.createCopy(tinfoil.config_data)
+ go_bindir = self.__ensure_go()
+ if not go_bindir:
+ sys.exit(14)
+
+ d.prependVar('PATH', '%s:' % go_bindir)
+ handled.append('buildsystem')
+ classes.append("go-vendor")
+
+ stdout, _ = self.__go_run_cmd("go mod edit -json", srctree, d)
+
+ go_mod = json.loads(stdout)
+ go_import = go_mod['Module']['Path']
+ go_version_match = re.match("([0-9]+).([0-9]+)", go_mod['Go'])
+ go_version_major = int(go_version_match.group(1))
+ go_version_minor = int(go_version_match.group(2))
+ src_uris = []
+
+ localfilesdir = tempfile.mkdtemp(prefix='recipetool-go-')
+ extravalues.setdefault('extrafiles', {})
+
+ # Use an explicit name determined from the module name because it
+ # might differ from the actual URL for replaced modules
+ # strip version part from module URL /vXX
+ baseurl = re.sub(r'/v(\d+)$', '', go_mod['Module']['Path'])
+ pn, _ = determine_from_url(baseurl)
+
+ # go.mod files with version < 1.17 may not include all indirect
+ # dependencies. Thus, we have to upgrade the go version.
+ if go_version_major == 1 and go_version_minor < 17:
+ logger.warning(
+ "go.mod files generated by Go < 1.17 might have incomplete indirect dependencies.")
+ go_mod, patchfilename = self.__go_mod_patch(srctree, localfilesdir,
+ extravalues, d)
+ src_uris.append(
+ "file://%s;patchdir=src/${GO_IMPORT}" % (patchfilename))
+
+ # Check whether the module is vendored. If so, we have nothing to do.
+ # Otherwise we gather all dependencies and add them to the recipe
+ if not os.path.exists(os.path.join(srctree, "vendor")):
+
+ # Write additional $BPN-modules.inc file
+ self.__go_mod_vendor(go_mod, srctree, localfilesdir, extravalues, d)
+ lines_before.append("LICENSE += \" & ${GO_MOD_LICENSES}\"")
+ lines_before.append("require %s-licenses.inc" % (pn))
+
+ self.__rewrite_src_uri(lines_before, ["file://modules.txt"])
+
+ self.__go_handle_dependencies(go_mod, srctree, localfilesdir, extravalues, d)
+ lines_before.append("require %s-modules.inc" % (pn))
+
+ # Do generic license handling
+ handle_license_vars(srctree, lines_before, handled, extravalues, d)
+ self.__rewrite_lic_uri(lines_before)
+
+ lines_before.append("GO_IMPORT = \"{}\"".format(baseurl))
+ lines_before.append("SRCREV_FORMAT = \"${BPN}\"")
+
+ def __update_lines_before(self, updated, newlines, lines_before):
+ if updated:
+ del lines_before[:]
+ for line in newlines:
+ # Hack to avoid newlines that edit_metadata inserts
+ if line.endswith('\n'):
+ line = line[:-1]
+ lines_before.append(line)
+ return updated
+
+ def __rewrite_lic_uri(self, lines_before):
+
+ def varfunc(varname, origvalue, op, newlines):
+ if varname == 'LIC_FILES_CHKSUM':
+ new_licenses = []
+ licenses = origvalue.split('\\')
+ for license in licenses:
+ if not license:
+ logger.warning("No license file was detected for the main module!")
+ # the license list of the main recipe must be empty
+ # this can happen for example in case of CLOSED license
+ # Fall through to complete recipe generation
+ continue
+ license = license.strip()
+ uri, chksum = license.split(';', 1)
+ url = urllib.parse.urlparse(uri)
+ new_uri = os.path.join(
+ url.scheme + "://", "src", "${GO_IMPORT}", url.netloc + url.path) + ";" + chksum
+ new_licenses.append(new_uri)
+
+ return new_licenses, None, -1, True
+ return origvalue, None, 0, True
+
+ updated, newlines = bb.utils.edit_metadata(
+ lines_before, ['LIC_FILES_CHKSUM'], varfunc)
+ return self.__update_lines_before(updated, newlines, lines_before)
+
+ def __rewrite_src_uri(self, lines_before, additional_uris = []):
+
+ def varfunc(varname, origvalue, op, newlines):
+ if varname == 'SRC_URI':
+ src_uri = ["git://${GO_IMPORT};destsuffix=git/src/${GO_IMPORT};nobranch=1;name=${BPN};protocol=https"]
+ src_uri.extend(additional_uris)
+ return src_uri, None, -1, True
+ return origvalue, None, 0, True
+
+ updated, newlines = bb.utils.edit_metadata(lines_before, ['SRC_URI'], varfunc)
+ return self.__update_lines_before(updated, newlines, lines_before)
+
+
+def register_recipe_handlers(handlers):
+ handlers.append((GoRecipeHandler(), 60))
diff --git a/scripts/lib/recipetool/create_npm.py b/scripts/lib/recipetool/create_npm.py
index 3394a89970..113a89f6a6 100644
--- a/scripts/lib/recipetool/create_npm.py
+++ b/scripts/lib/recipetool/create_npm.py
@@ -13,6 +13,7 @@ import sys
import tempfile
import bb
from bb.fetch2.npm import NpmEnvironment
+from bb.fetch2.npm import npm_package
from bb.fetch2.npmsw import foreach_dependencies
from recipetool.create import RecipeHandler
from recipetool.create import get_license_md5sums
@@ -31,15 +32,6 @@ class NpmRecipeHandler(RecipeHandler):
"""Class to handle the npm recipe creation"""
@staticmethod
- def _npm_name(name):
- """Generate a Yocto friendly npm name"""
- name = re.sub("/", "-", name)
- name = name.lower()
- name = re.sub(r"[^\-a-z0-9]", "", name)
- name = name.strip("-")
- return name
-
- @staticmethod
def _get_registry(lines):
"""Get the registry value from the 'npm://registry' url"""
registry = None
@@ -142,11 +134,10 @@ class NpmRecipeHandler(RecipeHandler):
licfiles.append(os.path.relpath(readme, srctree))
# Handle the dependencies
- def _handle_dependency(name, params, deptree):
- suffix = "-".join([self._npm_name(dep) for dep in deptree])
- destdirs = [os.path.join("node_modules", dep) for dep in deptree]
- destdir = os.path.join(*destdirs)
- packages["${PN}-" + suffix] = destdir
+ def _handle_dependency(name, params, destdir):
+ deptree = destdir.split('node_modules/')
+ suffix = "-".join([npm_package(dep) for dep in deptree])
+ packages["${PN}" + suffix] = destdir
_licfiles_append_fallback_readme_files(destdir)
with open(shrinkwrap_file, "r") as f:
@@ -155,6 +146,23 @@ class NpmRecipeHandler(RecipeHandler):
foreach_dependencies(shrinkwrap, _handle_dependency, dev)
return licfiles, packages
+
+ # Handle the peer dependencies
+ def _handle_peer_dependency(self, shrinkwrap_file):
+ """Check if package has peer dependencies and show warning if it is the case"""
+ with open(shrinkwrap_file, "r") as f:
+ shrinkwrap = json.load(f)
+
+ packages = shrinkwrap.get("packages", {})
+ peer_deps = packages.get("", {}).get("peerDependencies", {})
+
+ for peer_dep in peer_deps:
+ peer_dep_yocto_name = npm_package(peer_dep)
+ bb.warn(peer_dep + " is a peer dependencie of the actual package. " +
+ "Please add this peer dependencie to the RDEPENDS variable as %s and generate its recipe with devtool"
+ % peer_dep_yocto_name)
+
+
def process(self, srctree, classes, lines_before, lines_after, handled, extravalues):
"""Handle the npm recipe creation"""
@@ -173,7 +181,7 @@ class NpmRecipeHandler(RecipeHandler):
if "name" not in data or "version" not in data:
return False
- extravalues["PN"] = self._npm_name(data["name"])
+ extravalues["PN"] = npm_package(data["name"])
extravalues["PV"] = data["version"]
if "description" in data:
@@ -242,7 +250,7 @@ class NpmRecipeHandler(RecipeHandler):
value = origvalue.replace("version=" + data["version"], "version=${PV}")
value = value.replace("version=latest", "version=${PV}")
values = [line.strip() for line in value.strip('\n').splitlines()]
- if "dependencies" in shrinkwrap:
+ if "dependencies" in shrinkwrap.get("packages", {}).get("", {}):
values.append(url_recipe)
return values, None, 4, False
@@ -292,6 +300,9 @@ class NpmRecipeHandler(RecipeHandler):
classes.append("npm")
handled.append("buildsystem")
+ # Check if package has peer dependencies and inform the user
+ self._handle_peer_dependency(shrinkwrap_file)
+
return True
def register_recipe_handlers(handlers):
diff --git a/scripts/lib/recipetool/setvar.py b/scripts/lib/recipetool/setvar.py
index f8e2ee75fb..b5ad335cae 100644
--- a/scripts/lib/recipetool/setvar.py
+++ b/scripts/lib/recipetool/setvar.py
@@ -49,6 +49,7 @@ def setvar(args):
for patch in patches:
for line in patch:
sys.stdout.write(line)
+ tinfoil.modified_files()
return 0