summaryrefslogtreecommitdiffstats
path: root/meta/lib/oe/recipeutils.py
diff options
context:
space:
mode:
Diffstat (limited to 'meta/lib/oe/recipeutils.py')
-rw-r--r--meta/lib/oe/recipeutils.py362
1 files changed, 288 insertions, 74 deletions
diff --git a/meta/lib/oe/recipeutils.py b/meta/lib/oe/recipeutils.py
index 4e0859e6d9..de1fbdd3a8 100644
--- a/meta/lib/oe/recipeutils.py
+++ b/meta/lib/oe/recipeutils.py
@@ -4,6 +4,8 @@
#
# Copyright (C) 2013-2017 Intel Corporation
#
+# SPDX-License-Identifier: GPL-2.0-only
+#
import sys
import os
@@ -16,40 +18,40 @@ import shutil
import re
import fnmatch
import glob
-from collections import OrderedDict, defaultdict
+import bb.tinfoil
+from collections import OrderedDict, defaultdict
+from bb.utils import vercmp_string
# Help us to find places to insert values
-recipe_progression = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRCPV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()']
+recipe_progression = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION', 'LICENSE', 'LICENSE_FLAGS', 'LIC_FILES_CHKSUM', 'PROVIDES', 'DEPENDS', 'PR', 'PV', 'SRCREV', 'SRC_URI', 'S', 'do_fetch()', 'do_unpack()', 'do_patch()', 'EXTRA_OECONF', 'EXTRA_OECMAKE', 'EXTRA_OESCONS', 'do_configure()', 'EXTRA_OEMAKE', 'do_compile()', 'do_install()', 'do_populate_sysroot()', 'INITSCRIPT', 'USERADD', 'GROUPADD', 'PACKAGES', 'FILES', 'RDEPENDS', 'RRECOMMENDS', 'RSUGGESTS', 'RPROVIDES', 'RREPLACES', 'RCONFLICTS', 'ALLOW_EMPTY', 'populate_packages()', 'do_package()', 'do_deploy()', 'BBCLASSEXTEND']
# Variables that sometimes are a bit long but shouldn't be wrapped
-nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', 'SRC_URI\[(.+\.)?md5sum\]', 'SRC_URI\[(.+\.)?sha256sum\]']
+nowrap_vars = ['SUMMARY', 'HOMEPAGE', 'BUGTRACKER', r'SRC_URI\[(.+\.)?md5sum\]', r'SRC_URI\[(.+\.)?sha[0-9]+sum\]']
list_vars = ['SRC_URI', 'LIC_FILES_CHKSUM']
meta_vars = ['SUMMARY', 'DESCRIPTION', 'HOMEPAGE', 'BUGTRACKER', 'SECTION']
-def pn_to_recipe(cooker, pn, mc=''):
- """Convert a recipe name (PN) to the path to the recipe file"""
-
- best = cooker.findBestProvider(pn, mc)
- return best[3]
-
-
-def get_unavailable_reasons(cooker, pn):
- """If a recipe could not be found, find out why if possible"""
- import bb.taskdata
- taskdata = bb.taskdata.TaskData(None, skiplist=cooker.skiplist)
- return taskdata.get_reasons(pn)
-
-
-def parse_recipe(cooker, fn, appendfiles):
+def simplify_history(history, d):
"""
- Parse an individual recipe file, optionally with a list of
- bbappend files.
+ Eliminate any irrelevant events from a variable history
"""
- import bb.cache
- parser = bb.cache.NoCache(cooker.databuilder)
- envdata = parser.loadDataFull(fn, appendfiles)
- return envdata
+ ret_history = []
+ has_set = False
+ # Go backwards through the history and remove any immediate operations
+ # before the most recent set
+ for event in reversed(history):
+ if 'flag' in event or not 'file' in event:
+ continue
+ if event['op'] == 'set':
+ if has_set:
+ continue
+ has_set = True
+ elif event['op'] in ('append', 'prepend', 'postdot', 'predot'):
+ # Reminder: "append" and "prepend" mean += and =+ respectively, NOT :append / :prepend
+ if has_set:
+ continue
+ ret_history.insert(0, event)
+ return ret_history
def get_var_files(fn, varlist, d):
@@ -58,11 +60,19 @@ def get_var_files(fn, varlist, d):
"""
varfiles = {}
for v in varlist:
- history = d.varhistory.variable(v)
files = []
- for event in history:
- if 'file' in event and not 'flag' in event:
- files.append(event['file'])
+ if '[' in v:
+ varsplit = v.split('[')
+ varflag = varsplit[1].split(']')[0]
+ history = d.varhistory.variable(varsplit[0])
+ for event in history:
+ if 'file' in event and event.get('flag', '') == varflag:
+ files.append(event['file'])
+ else:
+ history = d.varhistory.variable(v)
+ for event in history:
+ if 'file' in event and not 'flag' in event:
+ files.append(event['file'])
if files:
actualfile = files[-1]
else:
@@ -153,7 +163,7 @@ def patch_recipe_lines(fromlines, values, trailing_newline=True):
key = item[:-2]
else:
key = item
- restr = '%s(_[a-zA-Z0-9-_$(){}]+|\[[^\]]*\])?' % key
+ restr = r'%s(_[a-zA-Z0-9-_$(){}]+|\[[^\]]*\])?' % key
if item.endswith('()'):
recipe_progression_restrs.append(restr + '()')
else:
@@ -176,7 +186,14 @@ def patch_recipe_lines(fromlines, values, trailing_newline=True):
def outputvalue(name, lines, rewindcomments=False):
if values[name] is None:
return
- rawtext = '%s = "%s"%s' % (name, values[name], newline)
+ if isinstance(values[name], tuple):
+ op, value = values[name]
+ if op == '+=' and value.strip() == '':
+ return
+ else:
+ value = values[name]
+ op = '='
+ rawtext = '%s %s "%s"%s' % (name, op, value, newline)
addlines = []
nowrap = False
for nowrap_re in nowrap_vars_res:
@@ -186,10 +203,10 @@ def patch_recipe_lines(fromlines, values, trailing_newline=True):
if nowrap:
addlines.append(rawtext)
elif name in list_vars:
- splitvalue = split_var_value(values[name], assignment=False)
+ splitvalue = split_var_value(value, assignment=False)
if len(splitvalue) > 1:
linesplit = ' \\\n' + (' ' * (len(name) + 4))
- addlines.append('%s = "%s%s"%s' % (name, linesplit.join(splitvalue), linesplit, newline))
+ addlines.append('%s %s "%s%s"%s' % (name, op, linesplit.join(splitvalue), linesplit, newline))
else:
addlines.append(rawtext)
else:
@@ -321,12 +338,47 @@ def patch_recipe(d, fn, varvalues, patch=False, relpath='', redirect_output=None
"""Modify a list of variable values in the specified recipe. Handles inc files if
used by the recipe.
"""
+ overrides = d.getVar('OVERRIDES').split(':')
+ def override_applicable(hevent):
+ op = hevent['op']
+ if '[' in op:
+ opoverrides = op.split('[')[1].split(']')[0].split(':')
+ for opoverride in opoverrides:
+ if not opoverride in overrides:
+ return False
+ return True
+
varlist = varvalues.keys()
+ fn = os.path.abspath(fn)
varfiles = get_var_files(fn, varlist, d)
locs = localise_file_vars(fn, varfiles, varlist)
patches = []
for f,v in locs.items():
vals = {k: varvalues[k] for k in v}
+ f = os.path.abspath(f)
+ if f == fn:
+ extravals = {}
+ for var, value in vals.items():
+ if var in list_vars:
+ history = simplify_history(d.varhistory.variable(var), d)
+ recipe_set = False
+ for event in history:
+ if os.path.abspath(event['file']) == fn:
+ if event['op'] == 'set':
+ recipe_set = True
+ if not recipe_set:
+ for event in history:
+ if event['op'].startswith(':remove'):
+ continue
+ if not override_applicable(event):
+ continue
+ newvalue = value.replace(event['detail'], '')
+ if newvalue == value and os.path.abspath(event['file']) == fn and event['op'].startswith(':'):
+ op = event['op'].replace('[', ':').replace(']', '')
+ extravals[var + op] = None
+ value = newvalue
+ vals[var] = ('+=', value)
+ vals.update(extravals)
patchdata = patch_recipe_file(f, vals, patch, relpath, redirect_output)
if patch:
patches.append(patchdata)
@@ -357,12 +409,12 @@ def copy_recipe_files(d, tgt_dir, whole_dir=False, download=True, all_variants=F
fetch.download()
for pth in fetch.localpaths():
if pth not in localpaths:
- localpaths.append(pth)
+ localpaths.append(os.path.abspath(pth))
uri_values.append(srcuri)
fetch_urls(d)
if all_variants:
- # Get files for other variants e.g. in the case of a SRC_URI_append
+ # Get files for other variants e.g. in the case of a SRC_URI:append
localdata = bb.data.createCopy(d)
variants = (localdata.getVar('BBCLASSEXTEND') or '').split()
if variants:
@@ -408,7 +460,7 @@ def get_recipe_local_files(d, patches=False, archives=False):
# fetcher) though note that this only encompasses actual container formats
# i.e. that can contain multiple files as opposed to those that only
# contain a compressed stream (i.e. .tar.gz as opposed to just .gz)
- archive_exts = ['.tar', '.tgz', '.tar.gz', '.tar.Z', '.tbz', '.tbz2', '.tar.bz2', '.tar.xz', '.tar.lz', '.zip', '.jar', '.rpm', '.srpm', '.deb', '.ipk', '.tar.7z', '.7z']
+ archive_exts = ['.tar', '.tgz', '.tar.gz', '.tar.Z', '.tbz', '.tbz2', '.tar.bz2', '.txz', '.tar.xz', '.tar.lz', '.zip', '.jar', '.rpm', '.srpm', '.deb', '.ipk', '.tar.7z', '.7z']
ret = {}
for uri in uris:
if fetch.ud[uri].type == 'file':
@@ -432,7 +484,14 @@ def get_recipe_local_files(d, patches=False, archives=False):
unpack = fetch.ud[uri].parm.get('unpack', True)
if unpack:
continue
- ret[fname] = localpath
+ if os.path.isdir(localpath):
+ for root, dirs, files in os.walk(localpath):
+ for fname in files:
+ fileabspath = os.path.join(root,fname)
+ srcdir = os.path.dirname(localpath)
+ ret[os.path.relpath(fileabspath,srcdir)] = fileabspath
+ else:
+ ret[fname] = localpath
return ret
@@ -502,6 +561,23 @@ def get_bbfile_path(d, destdir, extrapathhint=None):
confdata = bb.cookerdata.parse_config_file(destlayerconf, confdata)
pn = d.getVar('PN')
+ # Parse BBFILES_DYNAMIC and append to BBFILES
+ bbfiles_dynamic = (confdata.getVar('BBFILES_DYNAMIC') or "").split()
+ collections = (confdata.getVar('BBFILE_COLLECTIONS') or "").split()
+ invalid = []
+ for entry in bbfiles_dynamic:
+ parts = entry.split(":", 1)
+ if len(parts) != 2:
+ invalid.append(entry)
+ continue
+ l, f = parts
+ invert = l[0] == "!"
+ if invert:
+ l = l[1:]
+ if (l in collections and not invert) or (l not in collections and invert):
+ confdata.appendVar("BBFILES", " " + f)
+ if invalid:
+ return None
bbfilespecs = (confdata.getVar('BBFILES') or '').split()
if destdir == destlayerdir:
for bbfilespec in bbfilespecs:
@@ -588,19 +664,23 @@ def get_bbappend_path(d, destlayerdir, wildcardver=False):
return (appendpath, pathok)
-def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None):
+def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False, machine=None, extralines=None, removevalues=None, redirect_output=None, params=None, update_original_recipe=False):
"""
Writes a bbappend file for a recipe
Parameters:
rd: data dictionary for the recipe
destlayerdir: base directory of the layer to place the bbappend in
(subdirectory path from there will be determined automatically)
- srcfiles: dict of source files to add to SRC_URI, where the value
- is the full path to the file to be added, and the value is the
- original filename as it would appear in SRC_URI or None if it
- isn't already present. You may pass None for this parameter if
- you simply want to specify your own content via the extralines
- parameter.
+ srcfiles: dict of source files to add to SRC_URI, where the key
+ is the full path to the file to be added, and the value is a
+ dict with following optional keys:
+ path: the original filename as it would appear in SRC_URI
+ or None if it isn't already present.
+ patchdir: the patchdir parameter
+ newname: the name to give to the new added file. None to use
+ the default value: basename(path)
+ You may pass None for this parameter if you simply want to specify
+ your own content via the extralines parameter.
install: dict mapping entries in srcfiles to a tuple of two elements:
install path (*without* ${D} prefix) and permission value (as a
string, e.g. '0644').
@@ -618,18 +698,32 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
redirect_output:
If specified, redirects writing the output file to the
specified directory (for dry-run purposes)
+ params:
+ Parameters to use when adding entries to SRC_URI. If specified,
+ should be a list of dicts with the same length as srcfiles.
+ update_original_recipe:
+ Force to update the original recipe instead of creating/updating
+ a bbapend. destlayerdir must contain the original recipe
"""
if not removevalues:
removevalues = {}
- # Determine how the bbappend should be named
- appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver)
- if not appendpath:
- bb.error('Unable to determine layer directory containing %s' % recipefile)
- return (None, None)
- if not pathok:
- bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath)))
+ recipefile = rd.getVar('FILE')
+ if update_original_recipe:
+ if destlayerdir not in recipefile:
+ bb.error("destlayerdir %s doesn't contain the original recipe (%s), cannot update it" % (destlayerdir, recipefile))
+ return (None, None)
+
+ appendpath = recipefile
+ else:
+ # Determine how the bbappend should be named
+ appendpath, pathok = get_bbappend_path(rd, destlayerdir, wildcardver)
+ if not appendpath:
+ bb.error('Unable to determine layer directory containing %s' % recipefile)
+ return (None, None)
+ if not pathok:
+ bb.warn('Unable to determine correct subdirectory path for bbappend file - check that what %s adds to BBFILES also matches .bbappend files. Using %s for now, but until you fix this the bbappend will not be applied.' % (os.path.join(destlayerdir, 'conf', 'layer.conf'), os.path.dirname(appendpath)))
appenddir = os.path.dirname(appendpath)
if not redirect_output:
@@ -674,30 +768,48 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
bbappendlines.append((varname, op, value))
destsubdir = rd.getVar('PN')
- if srcfiles:
- bbappendlines.append(('FILESEXTRAPATHS_prepend', ':=', '${THISDIR}/${PN}:'))
+ if not update_original_recipe and srcfiles:
+ bbappendlines.append(('FILESEXTRAPATHS:prepend', ':=', '${THISDIR}/${PN}:'))
appendoverride = ''
if machine:
bbappendlines.append(('PACKAGE_ARCH', '=', '${MACHINE_ARCH}'))
- appendoverride = '_%s' % machine
+ appendoverride = ':%s' % machine
copyfiles = {}
if srcfiles:
instfunclines = []
- for newfile, origsrcfile in srcfiles.items():
- srcfile = origsrcfile
+ for i, (newfile, param) in enumerate(srcfiles.items()):
srcurientry = None
- if not srcfile:
- srcfile = os.path.basename(newfile)
+ if not 'path' in param or not param['path']:
+ if 'newname' in param and param['newname']:
+ srcfile = param['newname']
+ else:
+ srcfile = os.path.basename(newfile)
srcurientry = 'file://%s' % srcfile
+ oldentry = None
+ for uri in rd.getVar('SRC_URI').split():
+ if srcurientry in uri:
+ oldentry = uri
+ if params and params[i]:
+ srcurientry = '%s;%s' % (srcurientry, ';'.join('%s=%s' % (k,v) for k,v in params[i].items()))
# Double-check it's not there already
# FIXME do we care if the entry is added by another bbappend that might go away?
if not srcurientry in rd.getVar('SRC_URI').split():
if machine:
- appendline('SRC_URI_append%s' % appendoverride, '=', ' ' + srcurientry)
+ if oldentry:
+ appendline('SRC_URI:remove%s' % appendoverride, '=', ' ' + oldentry)
+ appendline('SRC_URI:append%s' % appendoverride, '=', ' ' + srcurientry)
else:
+ if oldentry:
+ if update_original_recipe:
+ removevalues['SRC_URI'] = oldentry
+ else:
+ appendline('SRC_URI:remove', '=', oldentry)
appendline('SRC_URI', '+=', srcurientry)
- copyfiles[newfile] = srcfile
+ param['path'] = srcfile
+ else:
+ srcfile = param['path']
+ copyfiles[newfile] = param
if install:
institem = install.pop(newfile, None)
if institem:
@@ -708,7 +820,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
instfunclines.append(instdirline)
instfunclines.append('install -m %s ${WORKDIR}/%s ${D}%s' % (perms, os.path.basename(srcfile), instdestpath))
if instfunclines:
- bbappendlines.append(('do_install_append%s()' % appendoverride, '', instfunclines))
+ bbappendlines.append(('do_install:append%s()' % appendoverride, '', instfunclines))
if redirect_output:
bb.note('Writing append file %s (dry-run)' % appendpath)
@@ -717,6 +829,8 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
# multiple times per operation when we're handling overrides)
if os.path.exists(appendpath) and not os.path.exists(outfile):
shutil.copy2(appendpath, outfile)
+ elif update_original_recipe:
+ outfile = recipefile
else:
bb.note('Writing append file %s' % appendpath)
outfile = appendpath
@@ -726,15 +840,15 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
extvars = {'destsubdir': destsubdir}
def appendfile_varfunc(varname, origvalue, op, newlines):
- if varname == 'FILESEXTRAPATHS_prepend':
+ if varname == 'FILESEXTRAPATHS:prepend':
if origvalue.startswith('${THISDIR}/'):
- popline('FILESEXTRAPATHS_prepend')
+ popline('FILESEXTRAPATHS:prepend')
extvars['destsubdir'] = rd.expand(origvalue.split('${THISDIR}/', 1)[1].rstrip(':'))
elif varname == 'PACKAGE_ARCH':
if machine:
popline('PACKAGE_ARCH')
return (machine, None, 4, False)
- elif varname.startswith('do_install_append'):
+ elif varname.startswith('do_install:append'):
func = popline(varname)
if func:
instfunclines = [line.strip() for line in origvalue.strip('\n').splitlines()]
@@ -746,7 +860,7 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
splitval = split_var_value(origvalue, assignment=False)
changed = False
removevar = varname
- if varname in ['SRC_URI', 'SRC_URI_append%s' % appendoverride]:
+ if varname in ['SRC_URI', 'SRC_URI:append%s' % appendoverride]:
removevar = 'SRC_URI'
line = popline(varname)
if line:
@@ -775,11 +889,11 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
newvalue = splitval
if len(newvalue) == 1:
# Ensure it's written out as one line
- if '_append' in varname:
+ if ':append' in varname:
newvalue = ' ' + newvalue[0]
else:
newvalue = newvalue[0]
- if not newvalue and (op in ['+=', '.='] or '_append' in varname):
+ if not newvalue and (op in ['+=', '.='] or ':append' in varname):
# There's no point appending nothing
newvalue = None
if varname.endswith('()'):
@@ -820,7 +934,12 @@ def bbappend_recipe(rd, destlayerdir, srcfiles, install=None, wildcardver=False,
outdir = redirect_output
else:
outdir = appenddir
- for newfile, srcfile in copyfiles.items():
+ for newfile, param in copyfiles.items():
+ srcfile = param['path']
+ patchdir = param.get('patchdir', ".")
+
+ if patchdir != ".":
+ newfile = os.path.join(os.path.split(newfile)[0], patchdir, os.path.split(newfile)[1])
filedest = os.path.join(outdir, destsubdir, os.path.basename(srcfile))
if os.path.abspath(newfile) != os.path.abspath(filedest):
if newfile.startswith(tempfile.gettempdir()):
@@ -864,10 +983,9 @@ def replace_dir_vars(path, d):
path = path.replace(dirpath, '${%s}' % dirvars[dirpath])
return path
-def get_recipe_pv_without_srcpv(pv, uri_type):
+def get_recipe_pv_with_pfx_sfx(pv, uri_type):
"""
- Get PV without SRCPV common in SCM's for now only
- support git.
+ Get PV separating prefix and suffix components.
Returns tuple with pv, prefix and suffix.
"""
@@ -875,7 +993,7 @@ def get_recipe_pv_without_srcpv(pv, uri_type):
sfx = ''
if uri_type == 'git':
- git_regex = re.compile("(?P<pfx>v?)(?P<ver>[^\+]*)((?P<sfx>\+(git)?r?(AUTOINC\+))(?P<rev>.*))?")
+ git_regex = re.compile(r"(?P<pfx>v?)(?P<ver>.*?)(?P<sfx>\+[^\+]*(git)?r?(AUTOINC\+)?)(?P<rev>.*)")
m = git_regex.match(pv)
if m:
@@ -883,7 +1001,7 @@ def get_recipe_pv_without_srcpv(pv, uri_type):
pfx = m.group('pfx')
sfx = m.group('sfx')
else:
- regex = re.compile("(?P<pfx>(v|r)?)(?P<ver>.*)")
+ regex = re.compile(r"(?P<pfx>(v|r)?)(?P<ver>.*)")
m = regex.match(pv)
if m:
pv = m.group('ver')
@@ -927,7 +1045,7 @@ def get_recipe_upstream_version(rd):
src_uri = src_uris.split()[0]
uri_type, _, _, _, _, _ = decodeurl(src_uri)
- (pv, pfx, sfx) = get_recipe_pv_without_srcpv(rd.getVar('PV'), uri_type)
+ (pv, pfx, sfx) = get_recipe_pv_with_pfx_sfx(rd.getVar('PV'), uri_type)
ru['current_version'] = pv
manual_upstream_version = rd.getVar("RECIPE_UPSTREAM_VERSION")
@@ -951,10 +1069,11 @@ def get_recipe_upstream_version(rd):
else:
ud = bb.fetch2.FetchData(src_uri, rd)
if rd.getVar("UPSTREAM_CHECK_COMMITS") == "1":
+ bb.fetch2.get_srcrev(rd)
revision = ud.method.latest_revision(ud, rd, 'default')
upversion = pv
if revision != rd.getVar("SRCREV"):
- upversion = upversion + "-new-commits-available"
+ upversion = upversion + "-new-commits-available"
else:
pupver = ud.method.latest_versionstring(ud, rd)
(upversion, revision) = pupver
@@ -969,3 +1088,98 @@ def get_recipe_upstream_version(rd):
ru['datetime'] = datetime.now()
return ru
+
+def _get_recipe_upgrade_status(data):
+ uv = get_recipe_upstream_version(data)
+
+ pn = data.getVar('PN')
+ cur_ver = uv['current_version']
+
+ upstream_version_unknown = data.getVar('UPSTREAM_VERSION_UNKNOWN')
+ if not uv['version']:
+ status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
+ else:
+ cmp = vercmp_string(uv['current_version'], uv['version'])
+ if cmp == -1:
+ status = "UPDATE" if not upstream_version_unknown else "KNOWN_BROKEN"
+ elif cmp == 0:
+ status = "MATCH" if not upstream_version_unknown else "KNOWN_BROKEN"
+ else:
+ status = "UNKNOWN" if upstream_version_unknown else "UNKNOWN_BROKEN"
+
+ next_ver = uv['version'] if uv['version'] else "N/A"
+ revision = uv['revision'] if uv['revision'] else "N/A"
+ maintainer = data.getVar('RECIPE_MAINTAINER')
+ no_upgrade_reason = data.getVar('RECIPE_NO_UPDATE_REASON')
+
+ return (pn, status, cur_ver, next_ver, maintainer, revision, no_upgrade_reason)
+
+def get_recipe_upgrade_status(recipes=None):
+ pkgs_list = []
+ data_copy_list = []
+ copy_vars = ('SRC_URI',
+ 'PV',
+ 'DL_DIR',
+ 'PN',
+ 'CACHE',
+ 'PERSISTENT_DIR',
+ 'BB_URI_HEADREVS',
+ 'UPSTREAM_CHECK_COMMITS',
+ 'UPSTREAM_CHECK_GITTAGREGEX',
+ 'UPSTREAM_CHECK_REGEX',
+ 'UPSTREAM_CHECK_URI',
+ 'UPSTREAM_VERSION_UNKNOWN',
+ 'RECIPE_MAINTAINER',
+ 'RECIPE_NO_UPDATE_REASON',
+ 'RECIPE_UPSTREAM_VERSION',
+ 'RECIPE_UPSTREAM_DATE',
+ 'CHECK_DATE',
+ 'FETCHCMD_bzr',
+ 'FETCHCMD_ccrc',
+ 'FETCHCMD_cvs',
+ 'FETCHCMD_git',
+ 'FETCHCMD_hg',
+ 'FETCHCMD_npm',
+ 'FETCHCMD_osc',
+ 'FETCHCMD_p4',
+ 'FETCHCMD_repo',
+ 'FETCHCMD_s3',
+ 'FETCHCMD_svn',
+ 'FETCHCMD_wget',
+ )
+
+ with bb.tinfoil.Tinfoil() as tinfoil:
+ tinfoil.prepare(config_only=False)
+
+ if not recipes:
+ recipes = tinfoil.all_recipe_files(variants=False)
+
+ for fn in recipes:
+ try:
+ if fn.startswith("/"):
+ data = tinfoil.parse_recipe_file(fn)
+ else:
+ data = tinfoil.parse_recipe(fn)
+ except bb.providers.NoProvider:
+ bb.note(" No provider for %s" % fn)
+ continue
+
+ unreliable = data.getVar('UPSTREAM_CHECK_UNRELIABLE')
+ if unreliable == "1":
+ bb.note(" Skip package %s as upstream check unreliable" % pn)
+ continue
+
+ data_copy = bb.data.init()
+ for var in copy_vars:
+ data_copy.setVar(var, data.getVar(var))
+ for k in data:
+ if k.startswith('SRCREV'):
+ data_copy.setVar(k, data.getVar(k))
+
+ data_copy_list.append(data_copy)
+
+ from concurrent.futures import ProcessPoolExecutor
+ with ProcessPoolExecutor(max_workers=utils.cpu_count()) as executor:
+ pkgs_list = executor.map(_get_recipe_upgrade_status, data_copy_list)
+
+ return pkgs_list