pe() self._exception = None self._result = None def run(self): try: ret = self._target(*self._args, **self._kwargs) self._cconn.send((None, ret)) except Exception as e: tb = traceback.format_exc() self._cconn.send((e, tb)) def update(self): if self._pconn.poll(): (e, tb) = self._pconn.recv() if e is not None: self._exception = (e, tb) else: self._result = tb @property def exception(self): self.update() return self._exception @property def result(self): self.update() return self._result max_process = int(d.getVar("BB_NUMBER_THREADS") or os.cpu_count() or 1) launched = [] errors = [] results = [] items = list(items) while (items and not errors) or launched: if not errors and items and len(launched) < max_process: args = (items.pop(),) if extraargs is not None: args = args + extraargs p = ProcessLaunch(target=target, args=args) p.start() launched.append(p) for q in launched: # Have to manually call update() to avoid deadlocks. The pipe can be full and # transfer stalled until we try and read the results object but the subprocess won't exit # as it still has data to write (https://bugs.python.org/issue8426) q.update() # The finished processes are joined when calling is_alive() if not q.is_alive(): if q.exception: errors.append(q.exception) if q.result: results.append(q.result) launched.remove(q) # Paranoia doesn't hurt for p in launched: p.join() if errors: msg = "" for (e, tb) in errors: if isinstance(e, subprocess.CalledProcessError) and e.output: msg = msg + str(e) + "\n" msg = msg + "Subprocess output:" msg = msg + e.output.decode("utf-8", errors="ignore") else: msg = msg + str(e) + ": " + str(tb) + "\n" bb.fatal("Fatal errors occurred in subprocesses:\n%s" % msg) return results def squashspaces(string): import re return re.sub(r"\s+", " ", string).strip() def rprovides_map(pkgdata_dir, pkg_dict): # Map file -> pkg provider rprov_map = {} for pkg in pkg_dict: path_to_pkgfile = os.path.join(pkgdata_dir, 'runtime-reverse', pkg) if not os.path.isfile(path_to_pkgfile): continue with open(path_to_pkgfile) as f: for line in f: if line.startswith('RPROVIDES') or line.startswith('FILERPROVIDES'): # List all components provided by pkg. # Exclude version strings, i.e. those starting with ( provides = [x for x in line.split()[1:] if not x.startswith('(')] for prov in provides: if prov in rprov_map: rprov_map[prov].append(pkg) else: rprov_map[prov] = [pkg] return rprov_map def format_pkg_list(pkg_dict, ret_format=None, pkgdata_dir=None): output = [] if ret_format == "arch": for pkg in sorted(pkg_dict): output.append("%s %s" % (pkg, pkg_dict[pkg]["arch"])) elif ret_format == "file": for pkg in sorted(pkg_dict): output.append("%s %s %s" % (pkg, pkg_dict[pkg]["filename"], pkg_dict[pkg]["arch"])) elif ret_format == "ver": for pkg in sorted(pkg_dict): output.append("%s %s %s" % (pkg, pkg_dict[pkg]["arch"], pkg_dict[pkg]["ver"])) elif ret_format == "deps": rprov_map = rprovides_map(pkgdata_dir, pkg_dict) for pkg in sorted(pkg_dict): for dep in pkg_dict[pkg]["deps"]: if dep in rprov_map: # There could be multiple providers within the image for pkg_provider in rprov_map[dep]: output.append("%s|%s * %s [RPROVIDES]" % (pkg, pkg_provider, dep)) else: output.append("%s|%s" % (pkg, dep)) else: for pkg in sorted(pkg_dict): output.append(pkg) output_str = '\n'.join(output) if output_str: # make sure last line is newline terminated output_str += '\n' return output_str # Helper function to get the host compiler version # Do not assume the compiler is gcc def get_host_compiler_version(d, taskcontextonly=False): import re, subprocess if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1': return compiler = d.getVar("BUILD_CC") # Get rid of ccache since it is not present when parsing. if compiler.startswith('ccache '): compiler = compiler[7:] try: env = os.environ.copy() # datastore PATH does not contain session PATH as set by environment-setup-... # this breaks the install-buildtools use-case # env["PATH"] = d.getVar("PATH") output = subprocess.check_output("%s --version" % compiler, \ shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8") except subprocess.CalledProcessError as e: bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8"))) match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0]) if not match: bb.fatal("Can't get compiler version from %s --version output" % compiler) version = match.group(1) return compiler, version def host_gcc_version(d, taskcontextonly=False): import re, subprocess if taskcontextonly and d.getVar('BB_WORKERCONTEXT') != '1': return compiler = d.getVar("BUILD_CC") # Get rid of ccache since it is not present when parsing. if compiler.startswith('ccache '): compiler = compiler[7:] try: env = os.environ.copy() env["PATH"] = d.getVar("PATH") output = subprocess.check_output("%s --version" % compiler, \ shell=True, env=env, stderr=subprocess.STDOUT).decode("utf-8") except subprocess.CalledProcessError as e: bb.fatal("Error running %s --version: %s" % (compiler, e.output.decode("utf-8"))) match = re.match(r".* (\d+\.\d+)\.\d+.*", output.split('\n')[0]) if not match: bb.fatal("Can't get compiler version from %s --version output" % compiler) version = match.group(1) return "-%s" % version if version in ("4.8", "4.9") else "" def get_multilib_datastore(variant, d): localdata = bb.data.createCopy(d) if variant: overrides = localdata.getVar("OVERRIDES", False) + ":virtclass-multilib-" + variant localdata.setVar("OVERRIDES", overrides) localdata.setVar("MLPREFIX", variant + "-") else: origdefault = localdata.getVar("DEFAULTTUNE_MULTILIB_ORIGINAL") if origdefault: localdata.setVar("DEFAULTTUNE", origdefault) overrides = localdata.getVar("OVERRIDES", False).split(":") overrides = ":".join([x for x in overrides if not x.startswith("virtclass-multilib-")]) localdata.setVar("OVERRIDES", overrides) localdata.setVar("MLPREFIX", "") return localdata class ImageQAFailed(Exception): def __init__(self, description, name=None, logfile=None): self.description = description self.name = name self.logfile=logfile def __str__(self): msg = 'Function failed: %s' % self.name if self.description: msg = msg + ' (%s)' % self.description return msg def sh_quote(string): import shlex return shlex.quote(string) def directory_size(root, blocksize=4096): """ Calculate the size of the directory, taking into account hard links, rounding up every size to multiples of the blocksize. """ def roundup(size): """ Round the size up to the nearest multiple of the block size. """ import math return math.ceil(size / blocksize) * blocksize def getsize(filename): """ Get the size of the filename, not following symlinks, taking into account hard links. """ stat = os.lstat(filename) if stat.st_ino not in inodes: inodes.add(stat.st_ino) return stat.st_size else: return 0 inodes = set() total = 0 for root, dirs, files in os.walk(root): total += sum(roundup(getsize(os.path.join(root, name))) for name in files) total += roundup(getsize(root)) return total