aboutsummaryrefslogtreecommitdiffstats
path: root/bitbake/lib/bb
diff options
context:
space:
mode:
authorKhem Raj <raj.khem@gmail.com>2011-02-23 10:48:51 -0800
committerKhem Raj <raj.khem@gmail.com>2011-02-23 10:48:51 -0800
commit951cbf3f65f347c7a7bbcae193218f9187a15fbf (patch)
tree96e9c74551e6931804992f8f49af05f732eb0fff /bitbake/lib/bb
parentadbaae2179a6c3746e53f7fbb2ca0939e85a7ea9 (diff)
downloadopenembedded-core-contrib-951cbf3f65f347c7a7bbcae193218f9187a15fbf.tar.gz
bitbake: Remove in-tree version
Bitbake should be used by checking it out from its own repo Signed-off-by: Khem Raj <raj.khem@gmail.com>
Diffstat (limited to 'bitbake/lib/bb')
-rw-r--r--bitbake/lib/bb/COW.py323
-rw-r--r--bitbake/lib/bb/__init__.py139
-rw-r--r--bitbake/lib/bb/build.py472
-rw-r--r--bitbake/lib/bb/cache.py632
-rw-r--r--bitbake/lib/bb/codeparser.py336
-rw-r--r--bitbake/lib/bb/command.py271
-rw-r--r--bitbake/lib/bb/cooker.py1078
-rw-r--r--bitbake/lib/bb/daemonize.py190
-rw-r--r--bitbake/lib/bb/data.py338
-rw-r--r--bitbake/lib/bb/data_smart.py428
-rw-r--r--bitbake/lib/bb/event.py386
-rw-r--r--bitbake/lib/bb/fetch/__init__.py836
-rw-r--r--bitbake/lib/bb/fetch/bzr.py148
-rw-r--r--bitbake/lib/bb/fetch/cvs.py172
-rw-r--r--bitbake/lib/bb/fetch/git.py339
-rw-r--r--bitbake/lib/bb/fetch/hg.py180
-rw-r--r--bitbake/lib/bb/fetch/local.py73
-rw-r--r--bitbake/lib/bb/fetch/osc.py143
-rw-r--r--bitbake/lib/bb/fetch/perforce.py206
-rw-r--r--bitbake/lib/bb/fetch/repo.py98
-rw-r--r--bitbake/lib/bb/fetch/ssh.py118
-rw-r--r--bitbake/lib/bb/fetch/svk.py104
-rw-r--r--bitbake/lib/bb/fetch/svn.py204
-rw-r--r--bitbake/lib/bb/fetch/wget.py93
-rw-r--r--bitbake/lib/bb/fetch2/__init__.py1074
-rw-r--r--bitbake/lib/bb/fetch2/bzr.py141
-rw-r--r--bitbake/lib/bb/fetch2/cvs.py181
-rw-r--r--bitbake/lib/bb/fetch2/git.py242
-rw-r--r--bitbake/lib/bb/fetch2/hg.py174
-rw-r--r--bitbake/lib/bb/fetch2/local.py80
-rw-r--r--bitbake/lib/bb/fetch2/osc.py135
-rw-r--r--bitbake/lib/bb/fetch2/perforce.py196
-rw-r--r--bitbake/lib/bb/fetch2/repo.py98
-rw-r--r--bitbake/lib/bb/fetch2/ssh.py120
-rw-r--r--bitbake/lib/bb/fetch2/svk.py97
-rw-r--r--bitbake/lib/bb/fetch2/svn.py180
-rw-r--r--bitbake/lib/bb/fetch2/wget.py91
-rw-r--r--bitbake/lib/bb/methodpool.py84
-rw-r--r--bitbake/lib/bb/msg.py200
-rw-r--r--bitbake/lib/bb/parse/__init__.py123
-rw-r--r--bitbake/lib/bb/parse/ast.py446
-rw-r--r--bitbake/lib/bb/parse/parse_py/BBHandler.py254
-rw-r--r--bitbake/lib/bb/parse/parse_py/ConfHandler.py139
-rw-r--r--bitbake/lib/bb/parse/parse_py/__init__.py33
-rw-r--r--bitbake/lib/bb/persist_data.py194
-rw-r--r--bitbake/lib/bb/process.py109
-rw-r--r--bitbake/lib/bb/providers.py330
-rw-r--r--bitbake/lib/bb/pysh/__init__.py0
-rw-r--r--bitbake/lib/bb/pysh/builtin.py710
-rw-r--r--bitbake/lib/bb/pysh/interp.py1367
-rw-r--r--bitbake/lib/bb/pysh/lsprof.py116
-rw-r--r--bitbake/lib/bb/pysh/pysh.py167
-rw-r--r--bitbake/lib/bb/pysh/pyshlex.py888
-rw-r--r--bitbake/lib/bb/pysh/pyshyacc.py779
-rw-r--r--bitbake/lib/bb/pysh/sherrors.py41
-rw-r--r--bitbake/lib/bb/pysh/subprocess_fix.py77
-rw-r--r--bitbake/lib/bb/runqueue.py1663
-rw-r--r--bitbake/lib/bb/server/__init__.py0
-rw-r--r--bitbake/lib/bb/server/none.py195
-rw-r--r--bitbake/lib/bb/server/xmlrpc.py260
-rw-r--r--bitbake/lib/bb/shell.py820
-rw-r--r--bitbake/lib/bb/siggen.py298
-rw-r--r--bitbake/lib/bb/taskdata.py586
-rw-r--r--bitbake/lib/bb/ui/__init__.py17
-rw-r--r--bitbake/lib/bb/ui/crumbs/__init__.py17
-rw-r--r--bitbake/lib/bb/ui/crumbs/buildmanager.py455
-rw-r--r--bitbake/lib/bb/ui/crumbs/progress.py17
-rw-r--r--bitbake/lib/bb/ui/crumbs/puccho.glade606
-rw-r--r--bitbake/lib/bb/ui/crumbs/runningbuild.py311
-rw-r--r--bitbake/lib/bb/ui/depexp.py307
-rw-r--r--bitbake/lib/bb/ui/goggle.py110
-rw-r--r--bitbake/lib/bb/ui/knotty.py248
-rw-r--r--bitbake/lib/bb/ui/ncurses.py352
-rw-r--r--bitbake/lib/bb/ui/puccho.py425
-rw-r--r--bitbake/lib/bb/ui/uievent.py127
-rw-r--r--bitbake/lib/bb/ui/uihelper.py42
-rw-r--r--bitbake/lib/bb/utils.py845
77 files changed, 0 insertions, 23874 deletions
diff --git a/bitbake/lib/bb/COW.py b/bitbake/lib/bb/COW.py
deleted file mode 100644
index 6917ec378a..0000000000
--- a/bitbake/lib/bb/COW.py
+++ /dev/null
@@ -1,323 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# This is a copy on write dictionary and set which abuses classes to try and be nice and fast.
-#
-# Copyright (C) 2006 Tim Amsell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Please Note:
-# Be careful when using mutable types (ie Dict and Lists) - operations involving these are SLOW.
-# Assign a file to __warn__ to get warnings about slow operations.
-#
-
-from __future__ import print_function
-import copy
-import types
-ImmutableTypes = (
- types.NoneType,
- bool,
- complex,
- float,
- int,
- long,
- tuple,
- frozenset,
- basestring
-)
-
-MUTABLE = "__mutable__"
-
-class COWMeta(type):
- pass
-
-class COWDictMeta(COWMeta):
- __warn__ = False
- __hasmutable__ = False
- __marker__ = tuple()
-
- def __str__(cls):
- # FIXME: I have magic numbers!
- return "<COWDict Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) - 3)
- __repr__ = __str__
-
- def cow(cls):
- class C(cls):
- __count__ = cls.__count__ + 1
- return C
- copy = cow
- __call__ = cow
-
- def __setitem__(cls, key, value):
- if not isinstance(value, ImmutableTypes):
- if not isinstance(value, COWMeta):
- cls.__hasmutable__ = True
- key += MUTABLE
- setattr(cls, key, value)
-
- def __getmutable__(cls, key, readonly=False):
- nkey = key + MUTABLE
- try:
- return cls.__dict__[nkey]
- except KeyError:
- pass
-
- value = getattr(cls, nkey)
- if readonly:
- return value
-
- if not cls.__warn__ is False and not isinstance(value, COWMeta):
- print("Warning: Doing a copy because %s is a mutable type." % key, file=cls.__warn__)
- try:
- value = value.copy()
- except AttributeError as e:
- value = copy.copy(value)
- setattr(cls, nkey, value)
- return value
-
- __getmarker__ = []
- def __getreadonly__(cls, key, default=__getmarker__):
- """\
- Get a value (even if mutable) which you promise not to change.
- """
- return cls.__getitem__(key, default, True)
-
- def __getitem__(cls, key, default=__getmarker__, readonly=False):
- try:
- try:
- value = getattr(cls, key)
- except AttributeError:
- value = cls.__getmutable__(key, readonly)
-
- # This is for values which have been deleted
- if value is cls.__marker__:
- raise AttributeError("key %s does not exist." % key)
-
- return value
- except AttributeError as e:
- if not default is cls.__getmarker__:
- return default
-
- raise KeyError(str(e))
-
- def __delitem__(cls, key):
- cls.__setitem__(key, cls.__marker__)
-
- def __revertitem__(cls, key):
- if not cls.__dict__.has_key(key):
- key += MUTABLE
- delattr(cls, key)
-
- def __contains__(cls, key):
- return cls.has_key(key)
-
- def has_key(cls, key):
- value = cls.__getreadonly__(key, cls.__marker__)
- if value is cls.__marker__:
- return False
- return True
-
- def iter(cls, type, readonly=False):
- for key in dir(cls):
- if key.startswith("__"):
- continue
-
- if key.endswith(MUTABLE):
- key = key[:-len(MUTABLE)]
-
- if type == "keys":
- yield key
-
- try:
- if readonly:
- value = cls.__getreadonly__(key)
- else:
- value = cls[key]
- except KeyError:
- continue
-
- if type == "values":
- yield value
- if type == "items":
- yield (key, value)
- raise StopIteration()
-
- def iterkeys(cls):
- return cls.iter("keys")
- def itervalues(cls, readonly=False):
- if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
- print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
- return cls.iter("values", readonly)
- def iteritems(cls, readonly=False):
- if not cls.__warn__ is False and cls.__hasmutable__ and readonly is False:
- print("Warning: If you arn't going to change any of the values call with True.", file=cls.__warn__)
- return cls.iter("items", readonly)
-
-class COWSetMeta(COWDictMeta):
- def __str__(cls):
- # FIXME: I have magic numbers!
- return "<COWSet Level: %i Current Keys: %i>" % (cls.__count__, len(cls.__dict__) -3)
- __repr__ = __str__
-
- def cow(cls):
- class C(cls):
- __count__ = cls.__count__ + 1
- return C
-
- def add(cls, value):
- COWDictMeta.__setitem__(cls, repr(hash(value)), value)
-
- def remove(cls, value):
- COWDictMeta.__delitem__(cls, repr(hash(value)))
-
- def __in__(cls, value):
- return COWDictMeta.has_key(repr(hash(value)))
-
- def iterkeys(cls):
- raise TypeError("sets don't have keys")
-
- def iteritems(cls):
- raise TypeError("sets don't have 'items'")
-
-# These are the actual classes you use!
-class COWDictBase(object):
- __metaclass__ = COWDictMeta
- __count__ = 0
-
-class COWSetBase(object):
- __metaclass__ = COWSetMeta
- __count__ = 0
-
-if __name__ == "__main__":
- import sys
- COWDictBase.__warn__ = sys.stderr
- a = COWDictBase()
- print("a", a)
-
- a['a'] = 'a'
- a['b'] = 'b'
- a['dict'] = {}
-
- b = a.copy()
- print("b", b)
- b['c'] = 'b'
-
- print()
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems():
- print(x)
- print()
-
- b['dict']['a'] = 'b'
- b['a'] = 'c'
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems():
- print(x)
- print()
-
- try:
- b['dict2']
- except KeyError as e:
- print("Okay!")
-
- a['set'] = COWSetBase()
- a['set'].add("o1")
- a['set'].add("o1")
- a['set'].add("o2")
-
- print("a", a)
- for x in a['set'].itervalues():
- print(x)
- print("--")
- print("b", b)
- for x in b['set'].itervalues():
- print(x)
- print()
-
- b['set'].add('o3')
-
- print("a", a)
- for x in a['set'].itervalues():
- print(x)
- print("--")
- print("b", b)
- for x in b['set'].itervalues():
- print(x)
- print()
-
- a['set2'] = set()
- a['set2'].add("o1")
- a['set2'].add("o1")
- a['set2'].add("o2")
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- del b['b']
- try:
- print(b['b'])
- except KeyError:
- print("Yay! deleted key raises error")
-
- if b.has_key('b'):
- print("Boo!")
- else:
- print("Yay - has_key with delete works!")
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- b.__revertitem__('b')
-
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
-
- b.__revertitem__('dict')
- print("a", a)
- for x in a.iteritems():
- print(x)
- print("--")
- print("b", b)
- for x in b.iteritems(readonly=True):
- print(x)
- print()
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py
deleted file mode 100644
index 4c7afc9c21..0000000000
--- a/bitbake/lib/bb/__init__.py
+++ /dev/null
@@ -1,139 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake Build System Python Library
-#
-# Copyright (C) 2003 Holger Schurig
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# Based on Gentoo's portage.py.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-__version__ = "1.11.0"
-
-import sys
-if sys.version_info < (2, 6, 0):
- raise RuntimeError("Sorry, python 2.6.0 or later is required for this version of bitbake")
-
-import os
-import logging
-import traceback
-
-class NullHandler(logging.Handler):
- def emit(self, record):
- pass
-
-Logger = logging.getLoggerClass()
-class BBLogger(Logger):
- def __init__(self, name):
- if name.split(".")[0] == "BitBake":
- self.debug = self.bbdebug
- Logger.__init__(self, name)
-
- def bbdebug(self, level, msg, *args, **kwargs):
- return self.log(logging.DEBUG - level + 1, msg, *args, **kwargs)
-
- def plain(self, msg, *args, **kwargs):
- return self.log(logging.INFO + 1, msg, *args, **kwargs)
-
- def verbose(self, msg, *args, **kwargs):
- return self.log(logging.INFO - 1, msg, *args, **kwargs)
-
- def exception(self, msg, *args, **kwargs):
- return self.critical("%s\n%s" % (msg, traceback.format_exc()), *args, **kwargs)
-
-logging.raiseExceptions = False
-logging.setLoggerClass(BBLogger)
-
-logger = logging.getLogger("BitBake")
-logger.addHandler(NullHandler())
-logger.setLevel(logging.INFO)
-
-# This has to be imported after the setLoggerClass, as the import of bb.msg
-# can result in construction of the various loggers.
-import bb.msg
-
-if "BBDEBUG" in os.environ:
- level = int(os.environ["BBDEBUG"])
- if level:
- bb.msg.set_debug_level(level)
-
-if True or os.environ.get("BBFETCH2"):
- from bb import fetch2 as fetch
- sys.modules['bb.fetch'] = sys.modules['bb.fetch2']
-
-# Messaging convenience functions
-def plain(*args):
- logger.plain(''.join(args))
-
-def debug(lvl, *args):
- logger.debug(lvl, ''.join(args))
-
-def note(*args):
- logger.info(''.join(args))
-
-def warn(*args):
- logger.warn(''.join(args))
-
-def error(*args):
- logger.error(''.join(args))
-
-def fatal(*args):
- logger.critical(''.join(args))
- sys.exit(1)
-
-
-def deprecated(func, name = None, advice = ""):
- """This is a decorator which can be used to mark functions
- as deprecated. It will result in a warning being emmitted
- when the function is used."""
- import warnings
-
- if advice:
- advice = ": %s" % advice
- if name is None:
- name = func.__name__
-
- def newFunc(*args, **kwargs):
- warnings.warn("Call to deprecated function %s%s." % (name,
- advice),
- category = PendingDeprecationWarning,
- stacklevel = 2)
- return func(*args, **kwargs)
- newFunc.__name__ = func.__name__
- newFunc.__doc__ = func.__doc__
- newFunc.__dict__.update(func.__dict__)
- return newFunc
-
-# For compatibility
-def deprecate_import(current, modulename, fromlist, renames = None):
- """Import objects from one module into another, wrapping them with a DeprecationWarning"""
- import sys
-
- module = __import__(modulename, fromlist = fromlist)
- for position, objname in enumerate(fromlist):
- obj = getattr(module, objname)
- newobj = deprecated(obj, "{0}.{1}".format(current, objname),
- "Please use {0}.{1} instead".format(modulename, objname))
- if renames:
- newname = renames[position]
- else:
- newname = objname
-
- setattr(sys.modules[current], newname, newobj)
-
-deprecate_import(__name__, "bb.fetch", ("MalformedUrl", "encodeurl", "decodeurl"))
-deprecate_import(__name__, "bb.utils", ("mkdirhier", "movefile", "copyfile", "which"))
-deprecate_import(__name__, "bb.utils", ["vercmp_string"], ["vercmp"])
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py
deleted file mode 100644
index a7664bd36d..0000000000
--- a/bitbake/lib/bb/build.py
+++ /dev/null
@@ -1,472 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake 'Build' implementation
-#
-# Core code for function execution and task handling in the
-# BitBake build tools.
-#
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# Based on Gentoo's portage.py.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-import bb.msg
-import bb.process
-from contextlib import nested
-from bb import data, event, mkdirhier, utils
-
-bblogger = logging.getLogger('BitBake')
-logger = logging.getLogger('BitBake.Build')
-
-NULL = open(os.devnull, 'r+')
-
-
-# When we execute a python function we'd like certain things
-# in all namespaces, hence we add them to __builtins__
-# If we do not do this and use the exec globals, they will
-# not be available to subfunctions.
-__builtins__['bb'] = bb
-__builtins__['os'] = os
-
-class FuncFailed(Exception):
- def __init__(self, name = None, logfile = None):
- self.logfile = logfile
- self.name = name
- if name:
- self.msg = "Function '%s' failed" % name
- else:
- self.msg = "Function failed"
-
- def __str__(self):
- if self.logfile and os.path.exists(self.logfile):
- msg = ("%s (see %s for further information)" %
- (self.msg, self.logfile))
- else:
- msg = self.msg
- return msg
-
-class TaskBase(event.Event):
- """Base class for task events"""
-
- def __init__(self, t, d ):
- self._task = t
- self._package = bb.data.getVar("PF", d, 1)
- event.Event.__init__(self)
- self._message = "package %s: task %s: %s" % (bb.data.getVar("PF", d, 1), t, bb.event.getName(self)[4:])
-
- def getTask(self):
- return self._task
-
- def setTask(self, task):
- self._task = task
-
- task = property(getTask, setTask, None, "task property")
-
-class TaskStarted(TaskBase):
- """Task execution started"""
-
-class TaskSucceeded(TaskBase):
- """Task execution completed"""
-
-class TaskFailed(TaskBase):
- """Task execution failed"""
-
- def __init__(self, task, logfile, metadata):
- self.logfile = logfile
- super(TaskFailed, self).__init__(task, metadata)
-
-class TaskInvalid(TaskBase):
-
- def __init__(self, task, metadata):
- super(TaskInvalid, self).__init__(task, metadata)
- self._message = "No such task '%s'" % task
-
-
-class LogTee(object):
- def __init__(self, logger, outfile):
- self.outfile = outfile
- self.logger = logger
- self.name = self.outfile.name
-
- def write(self, string):
- self.logger.plain(string)
- self.outfile.write(string)
-
- def __enter__(self):
- self.outfile.__enter__()
- return self
-
- def __exit__(self, *excinfo):
- self.outfile.__exit__(*excinfo)
-
- def __repr__(self):
- return '<LogTee {0}>'.format(self.name)
-
-
-def exec_func(func, d, dirs = None):
- """Execute an BB 'function'"""
-
- body = data.getVar(func, d)
- if not body:
- if body is None:
- logger.warn("Function %s doesn't exist", func)
- return
-
- flags = data.getVarFlags(func, d)
- cleandirs = flags.get('cleandirs')
- if cleandirs:
- for cdir in data.expand(cleandirs, d).split():
- bb.utils.remove(cdir, True)
-
- if dirs is None:
- dirs = flags.get('dirs')
- if dirs:
- dirs = data.expand(dirs, d).split()
-
- if dirs:
- for adir in dirs:
- bb.utils.mkdirhier(adir)
- adir = dirs[-1]
- else:
- adir = data.getVar('B', d, 1)
- if not os.path.exists(adir):
- adir = None
-
- ispython = flags.get('python')
- if flags.get('fakeroot') and not flags.get('task'):
- bb.fatal("Function %s specifies fakeroot but isn't a task?!" % func)
-
- lockflag = flags.get('lockfiles')
- if lockflag:
- lockfiles = [data.expand(f, d) for f in lockflag.split()]
- else:
- lockfiles = None
-
- tempdir = data.getVar('T', d, 1)
- runfile = os.path.join(tempdir, 'run.{0}.{1}'.format(func, os.getpid()))
-
- with bb.utils.fileslocked(lockfiles):
- if ispython:
- exec_func_python(func, d, runfile, cwd=adir)
- else:
- exec_func_shell(func, d, runfile, cwd=adir)
-
-_functionfmt = """
-def {function}(d):
-{body}
-
-{function}(d)
-"""
-logformatter = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
-def exec_func_python(func, d, runfile, cwd=None):
- """Execute a python BB 'function'"""
-
- bbfile = d.getVar('FILE', True)
- try:
- olddir = os.getcwd()
- except OSError:
- olddir = None
- code = _functionfmt.format(function=func, body=d.getVar(func, True))
- bb.utils.mkdirhier(os.path.dirname(runfile))
- with open(runfile, 'w') as script:
- script.write(code)
-
- if cwd:
- os.chdir(cwd)
-
- try:
- comp = utils.better_compile(code, func, bbfile)
- utils.better_exec(comp, {"d": d}, code, bbfile)
- except:
- if sys.exc_info()[0] in (bb.parse.SkipPackage, bb.build.FuncFailed):
- raise
-
- raise FuncFailed(func, None)
- finally:
- if olddir:
- os.chdir(olddir)
-
-def exec_func_shell(function, d, runfile, cwd=None):
- """Execute a shell function from the metadata
-
- Note on directory behavior. The 'dirs' varflag should contain a list
- of the directories you need created prior to execution. The last
- item in the list is where we will chdir/cd to.
- """
-
- # Don't let the emitted shell script override PWD
- d.delVarFlag('PWD', 'export')
-
- with open(runfile, 'w') as script:
- script.write('#!/bin/sh -e\n')
- if logger.isEnabledFor(logging.DEBUG):
- script.write("set -x\n")
- data.emit_func(function, script, d)
-
- script.write("%s\n" % function)
- os.fchmod(script.fileno(), 0775)
-
- env = {
- 'PATH': d.getVar('PATH', True),
- 'LC_ALL': 'C',
- }
-
- cmd = runfile
-
- if logger.isEnabledFor(logging.DEBUG):
- logfile = LogTee(logger, sys.stdout)
- else:
- logfile = sys.stdout
-
- try:
- bb.process.run(cmd, env=env, cwd=cwd, shell=False, stdin=NULL,
- log=logfile)
- except bb.process.CmdError:
- logfn = d.getVar('BB_LOGFILE', True)
- raise FuncFailed(function, logfn)
-
-def _task_data(fn, task, d):
- localdata = data.createCopy(d)
- localdata.setVar('BB_FILENAME', fn)
- localdata.setVar('BB_CURRENTTASK', task[3:])
- localdata.setVar('OVERRIDES', 'task-%s:%s' %
- (task[3:], d.getVar('OVERRIDES', False)))
- localdata.finalize()
- data.expandKeys(localdata)
- return localdata
-
-def _exec_task(fn, task, d, quieterr):
- """Execute a BB 'task'
-
- Execution of a task involves a bit more setup than executing a function,
- running it with its own local metadata, and with some useful variables set.
- """
- if not data.getVarFlag(task, 'task', d):
- event.fire(TaskInvalid(task, d), d)
- logger.error("No such task: %s" % task)
- return 1
-
- logger.debug(1, "Executing task %s", task)
-
- localdata = _task_data(fn, task, d)
- tempdir = localdata.getVar('T', True)
- if not tempdir:
- bb.fatal("T variable not set, unable to build")
-
- bb.utils.mkdirhier(tempdir)
- loglink = os.path.join(tempdir, 'log.{0}'.format(task))
- logfn = os.path.join(tempdir, 'log.{0}.{1}'.format(task, os.getpid()))
- if loglink:
- bb.utils.remove(loglink)
-
- try:
- os.symlink(logfn, loglink)
- except OSError:
- pass
-
- prefuncs = localdata.getVarFlag(task, 'prefuncs', expand=True)
- postfuncs = localdata.getVarFlag(task, 'postfuncs', expand=True)
-
- # Handle logfiles
- si = file('/dev/null', 'r')
- try:
- logfile = file(logfn, 'w')
- except OSError:
- logger.exception("Opening log file '%s'", logfn)
- pass
-
- # Dup the existing fds so we dont lose them
- osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()]
- oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()]
- ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()]
-
- # Replace those fds with our own
- os.dup2(si.fileno(), osi[1])
- os.dup2(logfile.fileno(), oso[1])
- os.dup2(logfile.fileno(), ose[1])
-
- # Ensure python logging goes to the logfile
- handler = logging.StreamHandler(logfile)
- handler.setFormatter(logformatter)
- bblogger.addHandler(handler)
-
- localdata.setVar('BB_LOGFILE', logfn)
-
- event.fire(TaskStarted(task, localdata), localdata)
- try:
- for func in (prefuncs or '').split():
- exec_func(func, localdata)
- exec_func(task, localdata)
- for func in (postfuncs or '').split():
- exec_func(func, localdata)
- except FuncFailed as exc:
- if not quieterr:
- logger.error(str(exc))
- event.fire(TaskFailed(task, logfn, localdata), localdata)
- return 1
- finally:
- sys.stdout.flush()
- sys.stderr.flush()
-
- bblogger.removeHandler(handler)
-
- # Restore the backup fds
- os.dup2(osi[0], osi[1])
- os.dup2(oso[0], oso[1])
- os.dup2(ose[0], ose[1])
-
- # Close the backup fds
- os.close(osi[0])
- os.close(oso[0])
- os.close(ose[0])
- si.close()
-
- logfile.close()
- if os.path.exists(logfn) and os.path.getsize(logfn) == 0:
- logger.debug(2, "Zero size logfn %s, removing", logfn)
- bb.utils.remove(logfn)
- bb.utils.remove(loglink)
- event.fire(TaskSucceeded(task, localdata), localdata)
-
- if not localdata.getVarFlag(task, 'nostamp') and not localdata.getVarFlag(task, 'selfstamp'):
- make_stamp(task, localdata)
-
- return 0
-
-def exec_task(fn, task, d):
- try:
- quieterr = False
- if d.getVarFlag(task, "quieterrors") is not None:
- quieterr = True
-
- return _exec_task(fn, task, d, quieterr)
- except Exception:
- from traceback import format_exc
- if not quieterr:
- logger.error("Build of %s failed" % (task))
- logger.error(format_exc())
- failedevent = TaskFailed(task, None, d)
- event.fire(failedevent, d)
- return 1
-
-def stamp_internal(taskname, d, file_name):
- """
- Internal stamp helper function
- Makes sure the stamp directory exists
- Returns the stamp path+filename
-
- In the bitbake core, d can be a CacheData and file_name will be set.
- When called in task context, d will be a data store, file_name will not be set
- """
- taskflagname = taskname
- if taskname.endswith("_setscene") and taskname != "do_setscene":
- taskflagname = taskname.replace("_setscene", "")
-
- if file_name:
- stamp = d.stamp[file_name]
- extrainfo = d.stamp_extrainfo[file_name].get(taskflagname) or ""
- else:
- stamp = d.getVar('STAMP', True)
- file_name = d.getVar('BB_FILENAME', True)
- extrainfo = d.getVarFlag(taskflagname, 'stamp-extra-info', True) or ""
-
- if not stamp:
- return
-
- stamp = bb.parse.siggen.stampfile(stamp, file_name, taskname, extrainfo)
-
- bb.utils.mkdirhier(os.path.dirname(stamp))
-
- return stamp
-
-def make_stamp(task, d, file_name = None):
- """
- Creates/updates a stamp for a given task
- (d can be a data dict or dataCache)
- """
- stamp = stamp_internal(task, d, file_name)
- # Remove the file and recreate to force timestamp
- # change on broken NFS filesystems
- if stamp:
- bb.utils.remove(stamp)
- f = open(stamp, "w")
- f.close()
-
-def del_stamp(task, d, file_name = None):
- """
- Removes a stamp for a given task
- (d can be a data dict or dataCache)
- """
- stamp = stamp_internal(task, d, file_name)
- bb.utils.remove(stamp)
-
-def stampfile(taskname, d, file_name = None):
- """
- Return the stamp for a given task
- (d can be a data dict or dataCache)
- """
- return stamp_internal(taskname, d, file_name)
-
-def add_tasks(tasklist, d):
- task_deps = data.getVar('_task_deps', d)
- if not task_deps:
- task_deps = {}
- if not 'tasks' in task_deps:
- task_deps['tasks'] = []
- if not 'parents' in task_deps:
- task_deps['parents'] = {}
-
- for task in tasklist:
- task = data.expand(task, d)
- data.setVarFlag(task, 'task', 1, d)
-
- if not task in task_deps['tasks']:
- task_deps['tasks'].append(task)
-
- flags = data.getVarFlags(task, d)
- def getTask(name):
- if not name in task_deps:
- task_deps[name] = {}
- if name in flags:
- deptask = data.expand(flags[name], d)
- task_deps[name][task] = deptask
- getTask('depends')
- getTask('deptask')
- getTask('rdeptask')
- getTask('recrdeptask')
- getTask('nostamp')
- getTask('fakeroot')
- getTask('noexec')
- task_deps['parents'][task] = []
- for dep in flags['deps']:
- dep = data.expand(dep, d)
- task_deps['parents'][task].append(dep)
-
- # don't assume holding a reference
- data.setVar('_task_deps', task_deps, d)
-
-def remove_task(task, kill, d):
- """Remove an BB 'task'.
-
- If kill is 1, also remove tasks that depend on this task."""
-
- data.delVarFlag(task, 'task', d)
diff --git a/bitbake/lib/bb/cache.py b/bitbake/lib/bb/cache.py
deleted file mode 100644
index 7ea04ac1a5..0000000000
--- a/bitbake/lib/bb/cache.py
+++ /dev/null
@@ -1,632 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# BitBake 'Event' implementation
-#
-# Caching of bitbake variables before task execution
-
-# Copyright (C) 2006 Richard Purdie
-
-# but small sections based on code from bin/bitbake:
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-
-import os
-import logging
-from collections import defaultdict, namedtuple
-import bb.data
-import bb.utils
-
-logger = logging.getLogger("BitBake.Cache")
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info("Importing cPickle failed. "
- "Falling back to a very slow implementation.")
-
-__cache_version__ = "136"
-
-recipe_fields = (
- 'pn',
- 'pv',
- 'pr',
- 'pe',
- 'defaultpref',
- 'depends',
- 'provides',
- 'task_deps',
- 'stamp',
- 'stamp_extrainfo',
- 'broken',
- 'not_world',
- 'skipped',
- 'timestamp',
- 'packages',
- 'packages_dynamic',
- 'rdepends',
- 'rdepends_pkg',
- 'rprovides',
- 'rprovides_pkg',
- 'rrecommends',
- 'rrecommends_pkg',
- 'nocache',
- 'variants',
- 'file_depends',
- 'tasks',
- 'basetaskhashes',
- 'hashfilename',
-)
-
-
-class RecipeInfo(namedtuple('RecipeInfo', recipe_fields)):
- __slots__ = ()
-
- @classmethod
- def listvar(cls, var, metadata):
- return cls.getvar(var, metadata).split()
-
- @classmethod
- def intvar(cls, var, metadata):
- return int(cls.getvar(var, metadata) or 0)
-
- @classmethod
- def depvar(cls, var, metadata):
- return bb.utils.explode_deps(cls.getvar(var, metadata))
-
- @classmethod
- def pkgvar(cls, var, packages, metadata):
- return dict((pkg, cls.depvar("%s_%s" % (var, pkg), metadata))
- for pkg in packages)
-
- @classmethod
- def taskvar(cls, var, tasks, metadata):
- return dict((task, cls.getvar("%s_task-%s" % (var, task), metadata))
- for task in tasks)
-
- @classmethod
- def flaglist(cls, flag, varlist, metadata):
- return dict((var, metadata.getVarFlag(var, flag, True))
- for var in varlist)
-
- @classmethod
- def getvar(cls, var, metadata):
- return metadata.getVar(var, True) or ''
-
- @classmethod
- def make_optional(cls, default=None, **kwargs):
- """Construct the namedtuple from the specified keyword arguments,
- with every value considered optional, using the default value if
- it was not specified."""
- for field in cls._fields:
- kwargs[field] = kwargs.get(field, default)
- return cls(**kwargs)
-
- @classmethod
- def from_metadata(cls, filename, metadata):
- if cls.getvar('__SKIPPED', metadata):
- return cls.make_optional(skipped=True)
-
- tasks = metadata.getVar('__BBTASKS', False)
-
- pn = cls.getvar('PN', metadata)
- packages = cls.listvar('PACKAGES', metadata)
- if not pn in packages:
- packages.append(pn)
-
- return RecipeInfo(
- tasks = tasks,
- basetaskhashes = cls.taskvar('BB_BASEHASH', tasks, metadata),
- hashfilename = cls.getvar('BB_HASHFILENAME', metadata),
-
- file_depends = metadata.getVar('__depends', False),
- task_deps = metadata.getVar('_task_deps', False) or
- {'tasks': [], 'parents': {}},
- variants = cls.listvar('__VARIANTS', metadata) + [''],
-
- skipped = False,
- timestamp = bb.parse.cached_mtime(filename),
- packages = cls.listvar('PACKAGES', metadata),
- pn = pn,
- pe = cls.getvar('PE', metadata),
- pv = cls.getvar('PV', metadata),
- pr = cls.getvar('PR', metadata),
- nocache = cls.getvar('__BB_DONT_CACHE', metadata),
- defaultpref = cls.intvar('DEFAULT_PREFERENCE', metadata),
- broken = cls.getvar('BROKEN', metadata),
- not_world = cls.getvar('EXCLUDE_FROM_WORLD', metadata),
- stamp = cls.getvar('STAMP', metadata),
- stamp_extrainfo = cls.flaglist('stamp-extra-info', tasks, metadata),
- packages_dynamic = cls.listvar('PACKAGES_DYNAMIC', metadata),
- depends = cls.depvar('DEPENDS', metadata),
- provides = cls.depvar('PROVIDES', metadata),
- rdepends = cls.depvar('RDEPENDS', metadata),
- rprovides = cls.depvar('RPROVIDES', metadata),
- rrecommends = cls.depvar('RRECOMMENDS', metadata),
- rprovides_pkg = cls.pkgvar('RPROVIDES', packages, metadata),
- rdepends_pkg = cls.pkgvar('RDEPENDS', packages, metadata),
- rrecommends_pkg = cls.pkgvar('RRECOMMENDS', packages, metadata),
- )
-
-
-class Cache(object):
- """
- BitBake Cache implementation
- """
-
- def __init__(self, data):
- self.cachedir = bb.data.getVar("CACHE", data, True)
- self.clean = set()
- self.checked = set()
- self.depends_cache = {}
- self.data = None
- self.data_fn = None
- self.cacheclean = True
-
- if self.cachedir in [None, '']:
- self.has_cache = False
- logger.info("Not using a cache. "
- "Set CACHE = <directory> to enable.")
- return
-
- self.has_cache = True
- self.cachefile = os.path.join(self.cachedir, "bb_cache.dat")
-
- logger.debug(1, "Using cache in '%s'", self.cachedir)
- bb.utils.mkdirhier(self.cachedir)
-
- # If any of configuration.data's dependencies are newer than the
- # cache there isn't even any point in loading it...
- newest_mtime = 0
- deps = bb.data.getVar("__base_depends", data)
-
- old_mtimes = [old_mtime for _, old_mtime in deps]
- old_mtimes.append(newest_mtime)
- newest_mtime = max(old_mtimes)
-
- if bb.parse.cached_mtime_noerror(self.cachefile) >= newest_mtime:
- self.load_cachefile()
- elif os.path.isfile(self.cachefile):
- logger.info("Out of date cache found, rebuilding...")
-
- def load_cachefile(self):
- with open(self.cachefile, "rb") as cachefile:
- pickled = pickle.Unpickler(cachefile)
- try:
- cache_ver = pickled.load()
- bitbake_ver = pickled.load()
- except Exception:
- logger.info('Invalid cache, rebuilding...')
- return
-
- if cache_ver != __cache_version__:
- logger.info('Cache version mismatch, rebuilding...')
- return
- elif bitbake_ver != bb.__version__:
- logger.info('Bitbake version mismatch, rebuilding...')
- return
-
- cachesize = os.fstat(cachefile.fileno()).st_size
- bb.event.fire(bb.event.CacheLoadStarted(cachesize), self.data)
-
- previous_percent = 0
- while cachefile:
- try:
- key = pickled.load()
- value = pickled.load()
- except Exception:
- break
-
- self.depends_cache[key] = value
-
- # only fire events on even percentage boundaries
- current_progress = cachefile.tell()
- current_percent = 100 * current_progress / cachesize
- if current_percent > previous_percent:
- previous_percent = current_percent
- bb.event.fire(bb.event.CacheLoadProgress(current_progress),
- self.data)
-
- bb.event.fire(bb.event.CacheLoadCompleted(cachesize,
- len(self.depends_cache)),
- self.data)
-
- @staticmethod
- def virtualfn2realfn(virtualfn):
- """
- Convert a virtual file name to a real one + the associated subclass keyword
- """
-
- fn = virtualfn
- cls = ""
- if virtualfn.startswith('virtual:'):
- cls = virtualfn.split(':', 2)[1]
- fn = virtualfn.replace('virtual:' + cls + ':', '')
- return (fn, cls)
-
- @staticmethod
- def realfn2virtual(realfn, cls):
- """
- Convert a real filename + the associated subclass keyword to a virtual filename
- """
- if cls == "":
- return realfn
- return "virtual:" + cls + ":" + realfn
-
- @classmethod
- def loadDataFull(cls, virtualfn, appends, cfgData):
- """
- Return a complete set of data for fn.
- To do this, we need to parse the file.
- """
-
- (fn, virtual) = cls.virtualfn2realfn(virtualfn)
-
- logger.debug(1, "Parsing %s (full)", fn)
-
- bb_data = cls.load_bbfile(fn, appends, cfgData)
- return bb_data[virtual]
-
- @classmethod
- def parse(cls, filename, appends, configdata):
- """Parse the specified filename, returning the recipe information"""
- infos = []
- datastores = cls.load_bbfile(filename, appends, configdata)
- depends = set()
- for variant, data in sorted(datastores.iteritems(),
- key=lambda i: i[0],
- reverse=True):
- virtualfn = cls.realfn2virtual(filename, variant)
- depends |= (data.getVar("__depends", False) or set())
- if depends and not variant:
- data.setVar("__depends", depends)
- info = RecipeInfo.from_metadata(filename, data)
- infos.append((virtualfn, info))
- return infos
-
- def load(self, filename, appends, configdata):
- """Obtain the recipe information for the specified filename,
- using cached values if available, otherwise parsing.
-
- Note that if it does parse to obtain the info, it will not
- automatically add the information to the cache or to your
- CacheData. Use the add or add_info method to do so after
- running this, or use loadData instead."""
- cached = self.cacheValid(filename)
- if cached:
- infos = []
- info = self.depends_cache[filename]
- for variant in info.variants:
- virtualfn = self.realfn2virtual(filename, variant)
- infos.append((virtualfn, self.depends_cache[virtualfn]))
- else:
- logger.debug(1, "Parsing %s", filename)
- return self.parse(filename, appends, configdata)
-
- return cached, infos
-
- def loadData(self, fn, appends, cfgData, cacheData):
- """Load the recipe info for the specified filename,
- parsing and adding to the cache if necessary, and adding
- the recipe information to the supplied CacheData instance."""
- skipped, virtuals = 0, 0
-
- cached, infos = self.load(fn, appends, cfgData)
- for virtualfn, info in infos:
- if info.skipped:
- logger.debug(1, "Skipping %s", virtualfn)
- skipped += 1
- else:
- self.add_info(virtualfn, info, cacheData, not cached)
- virtuals += 1
-
- return cached, skipped, virtuals
-
- def cacheValid(self, fn):
- """
- Is the cache valid for fn?
- Fast version, no timestamps checked.
- """
- if fn not in self.checked:
- self.cacheValidUpdate(fn)
-
- # Is cache enabled?
- if not self.has_cache:
- return False
- if fn in self.clean:
- return True
- return False
-
- def cacheValidUpdate(self, fn):
- """
- Is the cache valid for fn?
- Make thorough (slower) checks including timestamps.
- """
- # Is cache enabled?
- if not self.has_cache:
- return False
-
- self.checked.add(fn)
-
- # File isn't in depends_cache
- if not fn in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", fn)
- return False
-
- mtime = bb.parse.cached_mtime_noerror(fn)
-
- # Check file still exists
- if mtime == 0:
- logger.debug(2, "Cache: %s no longer exists", fn)
- self.remove(fn)
- return False
-
- info = self.depends_cache[fn]
- # Check the file's timestamp
- if mtime != info.timestamp:
- logger.debug(2, "Cache: %s changed", fn)
- self.remove(fn)
- return False
-
- # Check dependencies are still valid
- depends = info.file_depends
- if depends:
- for f, old_mtime in depends:
- fmtime = bb.parse.cached_mtime_noerror(f)
- # Check if file still exists
- if old_mtime != 0 and fmtime == 0:
- logger.debug(2, "Cache: %s's dependency %s was removed",
- fn, f)
- self.remove(fn)
- return False
-
- if (fmtime != old_mtime):
- logger.debug(2, "Cache: %s's dependency %s changed",
- fn, f)
- self.remove(fn)
- return False
-
- invalid = False
- for cls in info.variants:
- virtualfn = self.realfn2virtual(fn, cls)
- self.clean.add(virtualfn)
- if virtualfn not in self.depends_cache:
- logger.debug(2, "Cache: %s is not cached", virtualfn)
- invalid = True
-
- # If any one of the variants is not present, mark as invalid for all
- if invalid:
- for cls in info.variants:
- virtualfn = self.realfn2virtual(fn, cls)
- if virtualfn in self.clean:
- logger.debug(2, "Cache: Removing %s from cache", virtualfn)
- self.clean.remove(virtualfn)
- if fn in self.clean:
- logger.debug(2, "Cache: Marking %s as not clean", fn)
- self.clean.remove(fn)
- return False
-
- self.clean.add(fn)
- return True
-
- def remove(self, fn):
- """
- Remove a fn from the cache
- Called from the parser in error cases
- """
- if fn in self.depends_cache:
- logger.debug(1, "Removing %s from cache", fn)
- del self.depends_cache[fn]
- if fn in self.clean:
- logger.debug(1, "Marking %s as unclean", fn)
- self.clean.remove(fn)
-
- def sync(self):
- """
- Save the cache
- Called from the parser when complete (or exiting)
- """
-
- if not self.has_cache:
- return
-
- if self.cacheclean:
- logger.debug(2, "Cache is clean, not saving.")
- return
-
- with open(self.cachefile, "wb") as cachefile:
- pickler = pickle.Pickler(cachefile, pickle.HIGHEST_PROTOCOL)
- pickler.dump(__cache_version__)
- pickler.dump(bb.__version__)
- for key, value in self.depends_cache.iteritems():
- pickler.dump(key)
- pickler.dump(value)
-
- del self.depends_cache
-
- @staticmethod
- def mtime(cachefile):
- return bb.parse.cached_mtime_noerror(cachefile)
-
- def add_info(self, filename, info, cacheData, parsed=None):
- cacheData.add_from_recipeinfo(filename, info)
- if not self.has_cache:
- return
-
- if 'SRCREVINACTION' not in info.pv and not info.nocache:
- if parsed:
- self.cacheclean = False
- self.depends_cache[filename] = info
-
- def add(self, file_name, data, cacheData, parsed=None):
- """
- Save data we need into the cache
- """
-
- realfn = self.virtualfn2realfn(file_name)[0]
- info = RecipeInfo.from_metadata(realfn, data)
- self.add_info(file_name, info, cacheData, parsed)
-
- @staticmethod
- def load_bbfile(bbfile, appends, config):
- """
- Load and parse one .bb build file
- Return the data and whether parsing resulted in the file being skipped
- """
- chdir_back = False
-
- from bb import data, parse
-
- # expand tmpdir to include this topdir
- data.setVar('TMPDIR', data.getVar('TMPDIR', config, 1) or "", config)
- bbfile_loc = os.path.abspath(os.path.dirname(bbfile))
- oldpath = os.path.abspath(os.getcwd())
- parse.cached_mtime_noerror(bbfile_loc)
- bb_data = data.init_db(config)
- # The ConfHandler first looks if there is a TOPDIR and if not
- # then it would call getcwd().
- # Previously, we chdir()ed to bbfile_loc, called the handler
- # and finally chdir()ed back, a couple of thousand times. We now
- # just fill in TOPDIR to point to bbfile_loc if there is no TOPDIR yet.
- if not data.getVar('TOPDIR', bb_data):
- chdir_back = True
- data.setVar('TOPDIR', bbfile_loc, bb_data)
- try:
- if appends:
- data.setVar('__BBAPPEND', " ".join(appends), bb_data)
- bb_data = parse.handle(bbfile, bb_data)
- if chdir_back:
- os.chdir(oldpath)
- return bb_data
- except:
- if chdir_back:
- os.chdir(oldpath)
- raise
-
-
-def init(cooker):
- """
- The Objective: Cache the minimum amount of data possible yet get to the
- stage of building packages (i.e. tryBuild) without reparsing any .bb files.
-
- To do this, we intercept getVar calls and only cache the variables we see
- being accessed. We rely on the cache getVar calls being made for all
- variables bitbake might need to use to reach this stage. For each cached
- file we need to track:
-
- * Its mtime
- * The mtimes of all its dependencies
- * Whether it caused a parse.SkipPackage exception
-
- Files causing parsing errors are evicted from the cache.
-
- """
- return Cache(cooker.configuration.data)
-
-
-class CacheData(object):
- """
- The data structures we compile from the cached data
- """
-
- def __init__(self):
- # Direct cache variables
- self.providers = defaultdict(list)
- self.rproviders = defaultdict(list)
- self.packages = defaultdict(list)
- self.packages_dynamic = defaultdict(list)
- self.possible_world = []
- self.pkg_pn = defaultdict(list)
- self.pkg_fn = {}
- self.pkg_pepvpr = {}
- self.pkg_dp = {}
- self.pn_provides = defaultdict(list)
- self.fn_provides = {}
- self.all_depends = []
- self.deps = defaultdict(list)
- self.rundeps = defaultdict(lambda: defaultdict(list))
- self.runrecs = defaultdict(lambda: defaultdict(list))
- self.task_queues = {}
- self.task_deps = {}
- self.stamp = {}
- self.stamp_extrainfo = {}
- self.preferred = {}
- self.tasks = {}
- self.basetaskhash = {}
- self.hashfn = {}
-
- # Indirect Cache variables (set elsewhere)
- self.ignored_dependencies = []
- self.world_target = set()
- self.bbfile_priority = {}
- self.bbfile_config_priorities = []
-
- def add_from_recipeinfo(self, fn, info):
- self.task_deps[fn] = info.task_deps
- self.pkg_fn[fn] = info.pn
- self.pkg_pn[info.pn].append(fn)
- self.pkg_pepvpr[fn] = (info.pe, info.pv, info.pr)
- self.pkg_dp[fn] = info.defaultpref
- self.stamp[fn] = info.stamp
- self.stamp_extrainfo[fn] = info.stamp_extrainfo
-
- provides = [info.pn]
- for provide in info.provides:
- if provide not in provides:
- provides.append(provide)
- self.fn_provides[fn] = provides
-
- for provide in provides:
- self.providers[provide].append(fn)
- if provide not in self.pn_provides[info.pn]:
- self.pn_provides[info.pn].append(provide)
-
- for dep in info.depends:
- if dep not in self.deps[fn]:
- self.deps[fn].append(dep)
- if dep not in self.all_depends:
- self.all_depends.append(dep)
-
- rprovides = info.rprovides
- for package in info.packages:
- self.packages[package].append(fn)
- rprovides += info.rprovides_pkg[package]
-
- for rprovide in rprovides:
- self.rproviders[rprovide].append(fn)
-
- for package in info.packages_dynamic:
- self.packages_dynamic[package].append(fn)
-
- # Build hash of runtime depends and rececommends
- for package in info.packages + [info.pn]:
- self.rundeps[fn][package] = list(info.rdepends) + info.rdepends_pkg[package]
- self.runrecs[fn][package] = list(info.rrecommends) + info.rrecommends_pkg[package]
-
- # Collect files we may need for possible world-dep
- # calculations
- if not info.broken and not info.not_world:
- self.possible_world.append(fn)
-
- self.hashfn[fn] = info.hashfilename
- for task, taskhash in info.basetaskhashes.iteritems():
- identifier = '%s.%s' % (fn, task)
- self.basetaskhash[identifier] = taskhash
diff --git a/bitbake/lib/bb/codeparser.py b/bitbake/lib/bb/codeparser.py
deleted file mode 100644
index bfffcacc33..0000000000
--- a/bitbake/lib/bb/codeparser.py
+++ /dev/null
@@ -1,336 +0,0 @@
-import ast
-import codegen
-import logging
-import os.path
-import bb.utils, bb.data
-from itertools import chain
-from pysh import pyshyacc, pyshlex
-
-
-logger = logging.getLogger('BitBake.CodeParser')
-PARSERCACHE_VERSION = 2
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
-
-def check_indent(codestr):
- """If the code is indented, add a top level piece of code to 'remove' the indentation"""
-
- i = 0
- while codestr[i] in ["\n", " ", " "]:
- i = i + 1
-
- if i == 0:
- return codestr
-
- if codestr[i-1] is " " or codestr[i-1] is " ":
- return "if 1:\n" + codestr
-
- return codestr
-
-pythonparsecache = {}
-shellparsecache = {}
-
-def parser_cachefile(d):
- cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
- bb.data.getVar("CACHE", d, True))
- if cachedir in [None, '']:
- return None
- bb.utils.mkdirhier(cachedir)
- cachefile = os.path.join(cachedir, "bb_codeparser.dat")
- logger.debug(1, "Using cache in '%s' for codeparser cache", cachefile)
- return cachefile
-
-def parser_cache_init(d):
- global pythonparsecache
- global shellparsecache
-
- cachefile = parser_cachefile(d)
- if not cachefile:
- return
-
- try:
- p = pickle.Unpickler(file(cachefile, "rb"))
- data, version = p.load()
- except:
- return
-
- if version != PARSERCACHE_VERSION:
- return
-
- pythonparsecache = data[0]
- shellparsecache = data[1]
-
-def parser_cache_save(d):
- cachefile = parser_cachefile(d)
- if not cachefile:
- return
-
- p = pickle.Pickler(file(cachefile, "wb"), -1)
- p.dump([[pythonparsecache, shellparsecache], PARSERCACHE_VERSION])
-
-class PythonParser():
- class ValueVisitor():
- """Visitor to traverse a python abstract syntax tree and obtain
- the variables referenced via bitbake metadata APIs, and the external
- functions called.
- """
-
- getvars = ("d.getVar", "bb.data.getVar", "data.getVar")
- expands = ("d.expand", "bb.data.expand", "data.expand")
- execs = ("bb.build.exec_func", "bb.build.exec_task")
-
- @classmethod
- def _compare_name(cls, strparts, node):
- """Given a sequence of strings representing a python name,
- where the last component is the actual Name and the prior
- elements are Attribute nodes, determine if the supplied node
- matches.
- """
-
- if not strparts:
- return True
-
- current, rest = strparts[0], strparts[1:]
- if isinstance(node, ast.Attribute):
- if current == node.attr:
- return cls._compare_name(rest, node.value)
- elif isinstance(node, ast.Name):
- if current == node.id:
- return True
- return False
-
- @classmethod
- def compare_name(cls, value, node):
- """Convenience function for the _compare_node method, which
- can accept a string (which is split by '.' for you), or an
- iterable of strings, in which case it checks to see if any of
- them match, similar to isinstance.
- """
-
- if isinstance(value, basestring):
- return cls._compare_name(tuple(reversed(value.split("."))),
- node)
- else:
- return any(cls.compare_name(item, node) for item in value)
-
- def __init__(self, value):
- self.var_references = set()
- self.var_execs = set()
- self.direct_func_calls = set()
- self.var_expands = set()
- self.value = value
-
- @classmethod
- def warn(cls, func, arg):
- """Warn about calls of bitbake APIs which pass a non-literal
- argument for the variable name, as we're not able to track such
- a reference.
- """
-
- try:
- funcstr = codegen.to_source(func)
- argstr = codegen.to_source(arg)
- except TypeError:
- logger.debug(2, 'Failed to convert function and argument to source form')
- else:
- logger.debug(1, "Warning: in call to '%s', argument '%s' is "
- "not a literal", funcstr, argstr)
-
- def visit_Call(self, node):
- if self.compare_name(self.getvars, node.func):
- if isinstance(node.args[0], ast.Str):
- self.var_references.add(node.args[0].s)
- else:
- self.warn(node.func, node.args[0])
- elif self.compare_name(self.expands, node.func):
- if isinstance(node.args[0], ast.Str):
- self.warn(node.func, node.args[0])
- self.var_expands.update(node.args[0].s)
- elif isinstance(node.args[0], ast.Call) and \
- self.compare_name(self.getvars, node.args[0].func):
- pass
- else:
- self.warn(node.func, node.args[0])
- elif self.compare_name(self.execs, node.func):
- if isinstance(node.args[0], ast.Str):
- self.var_execs.add(node.args[0].s)
- else:
- self.warn(node.func, node.args[0])
- elif isinstance(node.func, ast.Name):
- self.direct_func_calls.add(node.func.id)
- elif isinstance(node.func, ast.Attribute):
- # We must have a qualified name. Therefore we need
- # to walk the chain of 'Attribute' nodes to determine
- # the qualification.
- attr_node = node.func.value
- identifier = node.func.attr
- while isinstance(attr_node, ast.Attribute):
- identifier = attr_node.attr + "." + identifier
- attr_node = attr_node.value
- if isinstance(attr_node, ast.Name):
- identifier = attr_node.id + "." + identifier
- self.direct_func_calls.add(identifier)
-
- def __init__(self):
- #self.funcdefs = set()
- self.execs = set()
- #self.external_cmds = set()
- self.references = set()
-
- def parse_python(self, node):
-
- h = hash(str(node))
-
- if h in pythonparsecache:
- self.references = pythonparsecache[h]["refs"]
- self.execs = pythonparsecache[h]["execs"]
- return
-
- code = compile(check_indent(str(node)), "<string>", "exec",
- ast.PyCF_ONLY_AST)
-
- visitor = self.ValueVisitor(code)
- for n in ast.walk(code):
- if n.__class__.__name__ == "Call":
- visitor.visit_Call(n)
-
- self.references.update(visitor.var_references)
- self.references.update(visitor.var_execs)
- self.execs = visitor.direct_func_calls
-
- pythonparsecache[h] = {}
- pythonparsecache[h]["refs"] = self.references
- pythonparsecache[h]["execs"] = self.execs
-
-class ShellParser():
- def __init__(self):
- self.funcdefs = set()
- self.allexecs = set()
- self.execs = set()
-
- def parse_shell(self, value):
- """Parse the supplied shell code in a string, returning the external
- commands it executes.
- """
-
- h = hash(str(value))
-
- if h in shellparsecache:
- self.execs = shellparsecache[h]["execs"]
- return self.execs
-
- try:
- tokens, _ = pyshyacc.parse(value, eof=True, debug=False)
- except pyshlex.NeedMore:
- raise ShellSyntaxError("Unexpected EOF")
-
- for token in tokens:
- self.process_tokens(token)
- self.execs = set(cmd for cmd in self.allexecs if cmd not in self.funcdefs)
-
- shellparsecache[h] = {}
- shellparsecache[h]["execs"] = self.execs
-
- return self.execs
-
- def process_tokens(self, tokens):
- """Process a supplied portion of the syntax tree as returned by
- pyshyacc.parse.
- """
-
- def function_definition(value):
- self.funcdefs.add(value.name)
- return [value.body], None
-
- def case_clause(value):
- # Element 0 of each item in the case is the list of patterns, and
- # Element 1 of each item in the case is the list of commands to be
- # executed when that pattern matches.
- words = chain(*[item[0] for item in value.items])
- cmds = chain(*[item[1] for item in value.items])
- return cmds, words
-
- def if_clause(value):
- main = chain(value.cond, value.if_cmds)
- rest = value.else_cmds
- if isinstance(rest, tuple) and rest[0] == "elif":
- return chain(main, if_clause(rest[1]))
- else:
- return chain(main, rest)
-
- def simple_command(value):
- return None, chain(value.words, (assign[1] for assign in value.assigns))
-
- token_handlers = {
- "and_or": lambda x: ((x.left, x.right), None),
- "async": lambda x: ([x], None),
- "brace_group": lambda x: (x.cmds, None),
- "for_clause": lambda x: (x.cmds, x.items),
- "function_definition": function_definition,
- "if_clause": lambda x: (if_clause(x), None),
- "pipeline": lambda x: (x.commands, None),
- "redirect_list": lambda x: ([x.cmd], None),
- "subshell": lambda x: (x.cmds, None),
- "while_clause": lambda x: (chain(x.condition, x.cmds), None),
- "until_clause": lambda x: (chain(x.condition, x.cmds), None),
- "simple_command": simple_command,
- "case_clause": case_clause,
- }
-
- for token in tokens:
- name, value = token
- try:
- more_tokens, words = token_handlers[name](value)
- except KeyError:
- raise NotImplementedError("Unsupported token type " + name)
-
- if more_tokens:
- self.process_tokens(more_tokens)
-
- if words:
- self.process_words(words)
-
- def process_words(self, words):
- """Process a set of 'words' in pyshyacc parlance, which includes
- extraction of executed commands from $() blocks, as well as grabbing
- the command name argument.
- """
-
- words = list(words)
- for word in list(words):
- wtree = pyshlex.make_wordtree(word[1])
- for part in wtree:
- if not isinstance(part, list):
- continue
-
- if part[0] in ('`', '$('):
- command = pyshlex.wordtree_as_string(part[1:-1])
- self.parse_shell(command)
-
- if word[0] in ("cmd_name", "cmd_word"):
- if word in words:
- words.remove(word)
-
- usetoken = False
- for word in words:
- if word[0] in ("cmd_name", "cmd_word") or \
- (usetoken and word[0] == "TOKEN"):
- if "=" in word[1]:
- usetoken = True
- continue
-
- cmd = word[1]
- if cmd.startswith("$"):
- logger.debug(1, "Warning: execution of non-literal "
- "command '%s'", cmd)
- elif cmd == "eval":
- command = " ".join(word for _, word in words[1:])
- self.parse_shell(command)
- else:
- self.allexecs.add(cmd)
- break
diff --git a/bitbake/lib/bb/command.py b/bitbake/lib/bb/command.py
deleted file mode 100644
index b88089298c..0000000000
--- a/bitbake/lib/bb/command.py
+++ /dev/null
@@ -1,271 +0,0 @@
-"""
-BitBake 'Command' module
-
-Provide an interface to interact with the bitbake server through 'commands'
-"""
-
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
-The bitbake server takes 'commands' from its UI/commandline.
-Commands are either synchronous or asynchronous.
-Async commands return data to the client in the form of events.
-Sync commands must only return data through the function return value
-and must not trigger events, directly or indirectly.
-Commands are queued in a CommandQueue
-"""
-
-import bb.event
-import bb.cooker
-import bb.data
-
-async_cmds = {}
-sync_cmds = {}
-
-
-class CommandCompleted(bb.event.Event):
- pass
-
-class CommandExit(bb.event.Event):
- def __init__(self, exitcode):
- bb.event.Event.__init__(self)
- self.exitcode = int(exitcode)
-
-class CommandFailed(CommandExit):
- def __init__(self, message):
- self.error = message
- CommandExit.__init__(self, 1)
-
-class Command:
- """
- A queue of asynchronous commands for bitbake
- """
- def __init__(self, cooker):
- self.cooker = cooker
- self.cmds_sync = CommandsSync()
- self.cmds_async = CommandsAsync()
-
- # FIXME Add lock for this
- self.currentAsyncCommand = None
-
- for attr in CommandsSync.__dict__:
- command = attr[:].lower()
- method = getattr(CommandsSync, attr)
- sync_cmds[command] = (method)
-
- for attr in CommandsAsync.__dict__:
- command = attr[:].lower()
- method = getattr(CommandsAsync, attr)
- async_cmds[command] = (method)
-
- def runCommand(self, commandline):
- try:
- command = commandline.pop(0)
- if command in CommandsSync.__dict__:
- # Can run synchronous commands straight away
- return getattr(CommandsSync, command)(self.cmds_sync, self, commandline)
- if self.currentAsyncCommand is not None:
- return "Busy (%s in progress)" % self.currentAsyncCommand[0]
- if command not in CommandsAsync.__dict__:
- return "No such command"
- self.currentAsyncCommand = (command, commandline)
- self.cooker.server.register_idle_function(self.cooker.runCommands, self.cooker)
- return True
- except:
- import traceback
- return traceback.format_exc()
-
- def runAsyncCommand(self):
- try:
- if self.currentAsyncCommand is not None:
- (command, options) = self.currentAsyncCommand
- commandmethod = getattr(CommandsAsync, command)
- needcache = getattr( commandmethod, "needcache" )
- if (needcache and self.cooker.state in
- (bb.cooker.state.initial, bb.cooker.state.parsing)):
- self.cooker.updateCache()
- return True
- else:
- commandmethod(self.cmds_async, self, options)
- return False
- else:
- return False
- except KeyboardInterrupt as exc:
- self.finishAsyncCommand("Interrupted")
- return False
- except SystemExit as exc:
- arg = exc.args[0]
- if isinstance(arg, basestring):
- self.finishAsyncCommand(arg)
- else:
- self.finishAsyncCommand("Exited with %s" % arg)
- return False
- except Exception:
- import traceback
- self.finishAsyncCommand(traceback.format_exc())
- return False
-
- def finishAsyncCommand(self, msg=None, code=None):
- if msg:
- bb.event.fire(CommandFailed(msg), self.cooker.configuration.event_data)
- elif code:
- bb.event.fire(CommandExit(code), self.cooker.configuration.event_data)
- else:
- bb.event.fire(CommandCompleted(), self.cooker.configuration.event_data)
- self.currentAsyncCommand = None
-
-
-class CommandsSync:
- """
- A class of synchronous commands
- These should run quickly so as not to hurt interactive performance.
- These must not influence any running synchronous command.
- """
-
- def stateShutdown(self, command, params):
- """
- Trigger cooker 'shutdown' mode
- """
- command.cooker.shutdown()
-
- def stateStop(self, command, params):
- """
- Stop the cooker
- """
- command.cooker.stop()
-
- def getCmdLineAction(self, command, params):
- """
- Get any command parsed from the commandline
- """
- return command.cooker.commandlineAction
-
- def getVariable(self, command, params):
- """
- Read the value of a variable from configuration.data
- """
- varname = params[0]
- expand = True
- if len(params) > 1:
- expand = params[1]
-
- return bb.data.getVar(varname, command.cooker.configuration.data, expand)
-
- def setVariable(self, command, params):
- """
- Set the value of variable in configuration.data
- """
- varname = params[0]
- value = params[1]
- bb.data.setVar(varname, value, command.cooker.configuration.data)
-
-
-class CommandsAsync:
- """
- A class of asynchronous commands
- These functions communicate via generated events.
- Any function that requires metadata parsing should be here.
- """
-
- def buildFile(self, command, params):
- """
- Build a single specified .bb file
- """
- bfile = params[0]
- task = params[1]
-
- command.cooker.buildFile(bfile, task)
- buildFile.needcache = False
-
- def buildTargets(self, command, params):
- """
- Build a set of targets
- """
- pkgs_to_build = params[0]
- task = params[1]
-
- command.cooker.buildTargets(pkgs_to_build, task)
- buildTargets.needcache = True
-
- def generateDepTreeEvent(self, command, params):
- """
- Generate an event containing the dependency information
- """
- pkgs_to_build = params[0]
- task = params[1]
-
- command.cooker.generateDepTreeEvent(pkgs_to_build, task)
- command.finishAsyncCommand()
- generateDepTreeEvent.needcache = True
-
- def generateDotGraph(self, command, params):
- """
- Dump dependency information to disk as .dot files
- """
- pkgs_to_build = params[0]
- task = params[1]
-
- command.cooker.generateDotGraphFiles(pkgs_to_build, task)
- command.finishAsyncCommand()
- generateDotGraph.needcache = True
-
- def showVersions(self, command, params):
- """
- Show the currently selected versions
- """
- command.cooker.showVersions()
- command.finishAsyncCommand()
- showVersions.needcache = True
-
- def showEnvironmentTarget(self, command, params):
- """
- Print the environment of a target recipe
- (needs the cache to work out which recipe to use)
- """
- pkg = params[0]
-
- command.cooker.showEnvironment(None, pkg)
- command.finishAsyncCommand()
- showEnvironmentTarget.needcache = True
-
- def showEnvironment(self, command, params):
- """
- Print the standard environment
- or if specified the environment for a specified recipe
- """
- bfile = params[0]
-
- command.cooker.showEnvironment(bfile)
- command.finishAsyncCommand()
- showEnvironment.needcache = False
-
- def parseFiles(self, command, params):
- """
- Parse the .bb files
- """
- command.cooker.updateCache()
- command.finishAsyncCommand()
- parseFiles.needcache = True
-
- def compareRevisions(self, command, params):
- """
- Parse the .bb files
- """
- if bb.fetch.fetcher_compare_revisions(command.cooker.configuration.data):
- command.finishAsyncCommand(code=1)
- else:
- command.finishAsyncCommand()
- compareRevisions.needcache = True
diff --git a/bitbake/lib/bb/cooker.py b/bitbake/lib/bb/cooker.py
deleted file mode 100644
index ff16daf83f..0000000000
--- a/bitbake/lib/bb/cooker.py
+++ /dev/null
@@ -1,1078 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-# Copyright (C) 2006 - 2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import print_function
-import sys, os, glob, os.path, re, time
-import atexit
-import itertools
-import logging
-import multiprocessing
-import signal
-import sre_constants
-import threading
-from cStringIO import StringIO
-from contextlib import closing
-import bb
-from bb import utils, data, parse, event, cache, providers, taskdata, command, runqueue
-
-logger = logging.getLogger("BitBake")
-collectlog = logging.getLogger("BitBake.Collection")
-buildlog = logging.getLogger("BitBake.Build")
-parselog = logging.getLogger("BitBake.Parsing")
-providerlog = logging.getLogger("BitBake.Provider")
-
-class MultipleMatches(Exception):
- """
- Exception raised when multiple file matches are found
- """
-
-class NothingToBuild(Exception):
- """
- Exception raised when there is nothing to build
- """
-
-class state:
- initial, parsing, running, shutdown, stop = range(5)
-
-#============================================================================#
-# BBCooker
-#============================================================================#
-class BBCooker:
- """
- Manages one bitbake build run
- """
-
- def __init__(self, configuration, server):
- self.status = None
- self.appendlist = {}
-
- if server:
- self.server = server.BitBakeServer(self)
-
- self.configuration = configuration
-
- self.configuration.data = bb.data.init()
-
- if not server:
- bb.data.setVar("BB_WORKERCONTEXT", "1", self.configuration.data)
-
- bb.data.inheritFromOS(self.configuration.data)
-
- self.parseConfigurationFiles(self.configuration.file)
-
- if not self.configuration.cmd:
- self.configuration.cmd = bb.data.getVar("BB_DEFAULT_TASK", self.configuration.data, True) or "build"
-
- bbpkgs = bb.data.getVar('BBPKGS', self.configuration.data, True)
- if bbpkgs and len(self.configuration.pkgs_to_build) == 0:
- self.configuration.pkgs_to_build.extend(bbpkgs.split())
-
- #
- # Special updated configuration we use for firing events
- #
- self.configuration.event_data = bb.data.createCopy(self.configuration.data)
- bb.data.update_data(self.configuration.event_data)
-
- # TOSTOP must not be set or our children will hang when they output
- fd = sys.stdout.fileno()
- if os.isatty(fd):
- import termios
- tcattr = termios.tcgetattr(fd)
- if tcattr[3] & termios.TOSTOP:
- buildlog.info("The terminal had the TOSTOP bit set, clearing...")
- tcattr[3] = tcattr[3] & ~termios.TOSTOP
- termios.tcsetattr(fd, termios.TCSANOW, tcattr)
-
- self.command = bb.command.Command(self)
- self.state = state.initial
-
- def parseConfiguration(self):
-
-
- # Change nice level if we're asked to
- nice = bb.data.getVar("BB_NICE_LEVEL", self.configuration.data, True)
- if nice:
- curnice = os.nice(0)
- nice = int(nice) - curnice
- buildlog.verbose("Renice to %s " % os.nice(nice))
-
- def parseCommandLine(self):
- # Parse any commandline into actions
- if self.configuration.show_environment:
- self.commandlineAction = None
-
- if 'world' in self.configuration.pkgs_to_build:
- buildlog.error("'world' is not a valid target for --environment.")
- elif len(self.configuration.pkgs_to_build) > 1:
- buildlog.error("Only one target can be used with the --environment option.")
- elif self.configuration.buildfile and len(self.configuration.pkgs_to_build) > 0:
- buildlog.error("No target should be used with the --environment and --buildfile options.")
- elif len(self.configuration.pkgs_to_build) > 0:
- self.commandlineAction = ["showEnvironmentTarget", self.configuration.pkgs_to_build]
- else:
- self.commandlineAction = ["showEnvironment", self.configuration.buildfile]
- elif self.configuration.buildfile is not None:
- self.commandlineAction = ["buildFile", self.configuration.buildfile, self.configuration.cmd]
- elif self.configuration.revisions_changed:
- self.commandlineAction = ["compareRevisions"]
- elif self.configuration.show_versions:
- self.commandlineAction = ["showVersions"]
- elif self.configuration.parse_only:
- self.commandlineAction = ["parseFiles"]
- elif self.configuration.dot_graph:
- if self.configuration.pkgs_to_build:
- self.commandlineAction = ["generateDotGraph", self.configuration.pkgs_to_build, self.configuration.cmd]
- else:
- self.commandlineAction = None
- buildlog.error("Please specify a package name for dependency graph generation.")
- else:
- if self.configuration.pkgs_to_build:
- self.commandlineAction = ["buildTargets", self.configuration.pkgs_to_build, self.configuration.cmd]
- else:
- self.commandlineAction = None
- buildlog.error("Nothing to do. Use 'bitbake world' to build everything, or run 'bitbake --help' for usage information.")
-
- def runCommands(self, server, data, abort):
- """
- Run any queued asynchronous command
- This is done by the idle handler so it runs in true context rather than
- tied to any UI.
- """
-
- return self.command.runAsyncCommand()
-
- def showVersions(self):
-
- # Need files parsed
- self.updateCache()
-
- pkg_pn = self.status.pkg_pn
- preferred_versions = {}
- latest_versions = {}
-
- # Sort by priority
- for pn in pkg_pn:
- (last_ver, last_file, pref_ver, pref_file) = bb.providers.findBestProvider(pn, self.configuration.data, self.status)
- preferred_versions[pn] = (pref_ver, pref_file)
- latest_versions[pn] = (last_ver, last_file)
-
- logger.plain("%-35s %25s %25s", "Package Name", "Latest Version", "Preferred Version")
- logger.plain("%-35s %25s %25s\n", "============", "==============", "=================")
-
- for p in sorted(pkg_pn):
- pref = preferred_versions[p]
- latest = latest_versions[p]
-
- prefstr = pref[0][0] + ":" + pref[0][1] + '-' + pref[0][2]
- lateststr = latest[0][0] + ":" + latest[0][1] + "-" + latest[0][2]
-
- if pref == latest:
- prefstr = ""
-
- logger.plain("%-35s %25s %25s", p, lateststr, prefstr)
-
- def showEnvironment(self, buildfile = None, pkgs_to_build = []):
- """
- Show the outer or per-package environment
- """
- fn = None
- envdata = None
-
- if buildfile:
- fn = self.matchFile(buildfile)
- elif len(pkgs_to_build) == 1:
- self.updateCache()
-
- localdata = data.createCopy(self.configuration.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
-
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
- taskdata.add_provider(localdata, self.status, pkgs_to_build[0])
- taskdata.add_unresolved(localdata, self.status)
-
- targetid = taskdata.getbuild_id(pkgs_to_build[0])
- fnid = taskdata.build_targets[targetid][0]
- fn = taskdata.fn_index[fnid]
- else:
- envdata = self.configuration.data
-
- if fn:
- try:
- envdata = bb.cache.Cache.loadDataFull(fn, self.get_file_appends(fn), self.configuration.data)
- except Exception, e:
- parselog.exception("Unable to read %s", fn)
- raise
-
- # emit variables and shell functions
- data.update_data(envdata)
- with closing(StringIO()) as env:
- data.emit_env(env, envdata, True)
- logger.plain(env.getvalue())
-
- # emit the metadata which isnt valid shell
- data.expandKeys(envdata)
- for e in envdata.keys():
- if data.getVarFlag( e, 'python', envdata ):
- logger.plain("\npython %s () {\n%s}\n", e, data.getVar(e, envdata, 1))
-
- def generateDepTreeData(self, pkgs_to_build, task):
- """
- Create a dependency tree of pkgs_to_build, returning the data.
- """
-
- # Need files parsed
- self.updateCache()
-
- # If we are told to do the None task then query the default task
- if (task == None):
- task = self.configuration.cmd
-
- pkgs_to_build = self.checkPackages(pkgs_to_build)
-
- localdata = data.createCopy(self.configuration.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
-
- runlist = []
- for k in pkgs_to_build:
- taskdata.add_provider(localdata, self.status, k)
- runlist.append([k, "do_%s" % task])
- taskdata.add_unresolved(localdata, self.status)
-
- rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
- rq.rqdata.prepare()
-
- seen_fnids = []
- depend_tree = {}
- depend_tree["depends"] = {}
- depend_tree["tdepends"] = {}
- depend_tree["pn"] = {}
- depend_tree["rdepends-pn"] = {}
- depend_tree["packages"] = {}
- depend_tree["rdepends-pkg"] = {}
- depend_tree["rrecs-pkg"] = {}
-
- for task in xrange(len(rq.rqdata.runq_fnid)):
- taskname = rq.rqdata.runq_task[task]
- fnid = rq.rqdata.runq_fnid[task]
- fn = taskdata.fn_index[fnid]
- pn = self.status.pkg_fn[fn]
- version = "%s:%s-%s" % self.status.pkg_pepvpr[fn]
- if pn not in depend_tree["pn"]:
- depend_tree["pn"][pn] = {}
- depend_tree["pn"][pn]["filename"] = fn
- depend_tree["pn"][pn]["version"] = version
- for dep in rq.rqdata.runq_depends[task]:
- depfn = taskdata.fn_index[rq.rqdata.runq_fnid[dep]]
- deppn = self.status.pkg_fn[depfn]
- dotname = "%s.%s" % (pn, rq.rqdata.runq_task[task])
- if not dotname in depend_tree["tdepends"]:
- depend_tree["tdepends"][dotname] = []
- depend_tree["tdepends"][dotname].append("%s.%s" % (deppn, rq.rqdata.runq_task[dep]))
- if fnid not in seen_fnids:
- seen_fnids.append(fnid)
- packages = []
-
- depend_tree["depends"][pn] = []
- for dep in taskdata.depids[fnid]:
- depend_tree["depends"][pn].append(taskdata.build_names_index[dep])
-
- depend_tree["rdepends-pn"][pn] = []
- for rdep in taskdata.rdepids[fnid]:
- depend_tree["rdepends-pn"][pn].append(taskdata.run_names_index[rdep])
-
- rdepends = self.status.rundeps[fn]
- for package in rdepends:
- depend_tree["rdepends-pkg"][package] = []
- for rdepend in rdepends[package]:
- depend_tree["rdepends-pkg"][package].append(rdepend)
- packages.append(package)
-
- rrecs = self.status.runrecs[fn]
- for package in rrecs:
- depend_tree["rrecs-pkg"][package] = []
- for rdepend in rrecs[package]:
- depend_tree["rrecs-pkg"][package].append(rdepend)
- if not package in packages:
- packages.append(package)
-
- for package in packages:
- if package not in depend_tree["packages"]:
- depend_tree["packages"][package] = {}
- depend_tree["packages"][package]["pn"] = pn
- depend_tree["packages"][package]["filename"] = fn
- depend_tree["packages"][package]["version"] = version
-
- return depend_tree
-
-
- def generateDepTreeEvent(self, pkgs_to_build, task):
- """
- Create a task dependency graph of pkgs_to_build.
- Generate an event with the result
- """
- depgraph = self.generateDepTreeData(pkgs_to_build, task)
- bb.event.fire(bb.event.DepTreeGenerated(depgraph), self.configuration.data)
-
- def generateDotGraphFiles(self, pkgs_to_build, task):
- """
- Create a task dependency graph of pkgs_to_build.
- Save the result to a set of .dot files.
- """
-
- depgraph = self.generateDepTreeData(pkgs_to_build, task)
-
- # Prints a flattened form of package-depends below where subpackages of a package are merged into the main pn
- depends_file = file('pn-depends.dot', 'w' )
- print("digraph depends {", file=depends_file)
- for pn in depgraph["pn"]:
- fn = depgraph["pn"][pn]["filename"]
- version = depgraph["pn"][pn]["version"]
- print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
- for pn in depgraph["depends"]:
- for depend in depgraph["depends"][pn]:
- print('"%s" -> "%s"' % (pn, depend), file=depends_file)
- for pn in depgraph["rdepends-pn"]:
- for rdepend in depgraph["rdepends-pn"][pn]:
- print('"%s" -> "%s" [style=dashed]' % (pn, rdepend), file=depends_file)
- print("}", file=depends_file)
- logger.info("PN dependencies saved to 'pn-depends.dot'")
-
- depends_file = file('package-depends.dot', 'w' )
- print("digraph depends {", file=depends_file)
- for package in depgraph["packages"]:
- pn = depgraph["packages"][package]["pn"]
- fn = depgraph["packages"][package]["filename"]
- version = depgraph["packages"][package]["version"]
- if package == pn:
- print('"%s" [label="%s %s\\n%s"]' % (pn, pn, version, fn), file=depends_file)
- else:
- print('"%s" [label="%s(%s) %s\\n%s"]' % (package, package, pn, version, fn), file=depends_file)
- for depend in depgraph["depends"][pn]:
- print('"%s" -> "%s"' % (package, depend), file=depends_file)
- for package in depgraph["rdepends-pkg"]:
- for rdepend in depgraph["rdepends-pkg"][package]:
- print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
- for package in depgraph["rrecs-pkg"]:
- for rdepend in depgraph["rrecs-pkg"][package]:
- print('"%s" -> "%s" [style=dashed]' % (package, rdepend), file=depends_file)
- print("}", file=depends_file)
- logger.info("Package dependencies saved to 'package-depends.dot'")
-
- tdepends_file = file('task-depends.dot', 'w' )
- print("digraph depends {", file=tdepends_file)
- for task in depgraph["tdepends"]:
- (pn, taskname) = task.rsplit(".", 1)
- fn = depgraph["pn"][pn]["filename"]
- version = depgraph["pn"][pn]["version"]
- print('"%s.%s" [label="%s %s\\n%s\\n%s"]' % (pn, taskname, pn, taskname, version, fn), file=tdepends_file)
- for dep in depgraph["tdepends"][task]:
- print('"%s" -> "%s"' % (task, dep), file=tdepends_file)
- print("}", file=tdepends_file)
- logger.info("Task dependencies saved to 'task-depends.dot'")
-
- def buildDepgraph( self ):
- all_depends = self.status.all_depends
- pn_provides = self.status.pn_provides
-
- localdata = data.createCopy(self.configuration.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
-
- matched = set()
- def calc_bbfile_priority(filename):
- for _, _, regex, pri in self.status.bbfile_config_priorities:
- if regex.match(filename):
- if not regex in matched:
- matched.add(regex)
- return pri
- return 0
-
- # Handle PREFERRED_PROVIDERS
- for p in (bb.data.getVar('PREFERRED_PROVIDERS', localdata, 1) or "").split():
- try:
- (providee, provider) = p.split(':')
- except:
- providerlog.critical("Malformed option in PREFERRED_PROVIDERS variable: %s" % p)
- continue
- if providee in self.status.preferred and self.status.preferred[providee] != provider:
- providerlog.error("conflicting preferences for %s: both %s and %s specified", providee, provider, self.status.preferred[providee])
- self.status.preferred[providee] = provider
-
- # Calculate priorities for each file
- for p in self.status.pkg_fn:
- self.status.bbfile_priority[p] = calc_bbfile_priority(p)
-
- for collection, pattern, regex, _ in self.status.bbfile_config_priorities:
- if not regex in matched:
- collectlog.warn("No bb files matched BBFILE_PATTERN_%s '%s'" % (collection, pattern))
-
- def buildWorldTargetList(self):
- """
- Build package list for "bitbake world"
- """
- all_depends = self.status.all_depends
- pn_provides = self.status.pn_provides
- parselog.debug(1, "collating packages for \"world\"")
- for f in self.status.possible_world:
- terminal = True
- pn = self.status.pkg_fn[f]
-
- for p in pn_provides[pn]:
- if p.startswith('virtual/'):
- parselog.debug(2, "World build skipping %s due to %s provider starting with virtual/", f, p)
- terminal = False
- break
- for pf in self.status.providers[p]:
- if self.status.pkg_fn[pf] != pn:
- parselog.debug(2, "World build skipping %s due to both us and %s providing %s", f, pf, p)
- terminal = False
- break
- if terminal:
- self.status.world_target.add(pn)
-
- # drop reference count now
- self.status.possible_world = None
- self.status.all_depends = None
-
- def interactiveMode( self ):
- """Drop off into a shell"""
- try:
- from bb import shell
- except ImportError:
- parselog.exception("Interactive mode not available")
- sys.exit(1)
- else:
- shell.start( self )
-
- def _findLayerConf(self):
- path = os.getcwd()
- while path != "/":
- bblayers = os.path.join(path, "conf", "bblayers.conf")
- if os.path.exists(bblayers):
- return bblayers
-
- path, _ = os.path.split(path)
-
- def parseConfigurationFiles(self, files):
- def _parse(f, data, include=False):
- try:
- return bb.parse.handle(f, data, include)
- except (IOError, bb.parse.ParseError) as exc:
- parselog.critical("Unable to parse %s: %s" % (f, exc))
- sys.exit(1)
-
- data = self.configuration.data
- bb.parse.init_parser(data)
- for f in files:
- data = _parse(f, data)
-
- layerconf = self._findLayerConf()
- if layerconf:
- parselog.debug(2, "Found bblayers.conf (%s)", layerconf)
- data = _parse(layerconf, data)
-
- layers = (bb.data.getVar('BBLAYERS', data, True) or "").split()
-
- data = bb.data.createCopy(data)
- for layer in layers:
- parselog.debug(2, "Adding layer %s", layer)
- bb.data.setVar('LAYERDIR', layer, data)
- data = _parse(os.path.join(layer, "conf", "layer.conf"), data)
- data.expandVarref('LAYERDIR')
-
- bb.data.delVar('LAYERDIR', data)
-
- if not data.getVar("BBPATH", True):
- raise SystemExit("The BBPATH variable is not set")
-
- data = _parse(os.path.join("conf", "bitbake.conf"), data)
-
- self.configuration.data = data
-
- # Handle any INHERITs and inherit the base class
- inherits = ["base"] + (bb.data.getVar('INHERIT', self.configuration.data, True ) or "").split()
- for inherit in inherits:
- self.configuration.data = _parse(os.path.join('classes', '%s.bbclass' % inherit), self.configuration.data, True )
-
- # Nomally we only register event handlers at the end of parsing .bb files
- # We register any handlers we've found so far here...
- for var in bb.data.getVar('__BBHANDLERS', self.configuration.data) or []:
- bb.event.register(var, bb.data.getVar(var, self.configuration.data))
-
- if bb.data.getVar("BB_WORKERCONTEXT", self.configuration.data) is None:
- bb.fetch.fetcher_init(self.configuration.data)
- bb.codeparser.parser_cache_init(self.configuration.data)
- bb.parse.init_parser(data)
- bb.event.fire(bb.event.ConfigParsed(), self.configuration.data)
-
- def handleCollections( self, collections ):
- """Handle collections"""
- if collections:
- collection_list = collections.split()
- for c in collection_list:
- regex = bb.data.getVar("BBFILE_PATTERN_%s" % c, self.configuration.data, 1)
- if regex == None:
- parselog.error("BBFILE_PATTERN_%s not defined" % c)
- continue
- priority = bb.data.getVar("BBFILE_PRIORITY_%s" % c, self.configuration.data, 1)
- if priority == None:
- parselog.error("BBFILE_PRIORITY_%s not defined" % c)
- continue
- try:
- cre = re.compile(regex)
- except re.error:
- parselog.error("BBFILE_PATTERN_%s \"%s\" is not a valid regular expression", c, regex)
- continue
- try:
- pri = int(priority)
- self.status.bbfile_config_priorities.append((c, regex, cre, pri))
- except ValueError:
- parselog.error("invalid value for BBFILE_PRIORITY_%s: \"%s\"", c, priority)
-
- def buildSetVars(self):
- """
- Setup any variables needed before starting a build
- """
- if not bb.data.getVar("BUILDNAME", self.configuration.data):
- bb.data.setVar("BUILDNAME", time.strftime('%Y%m%d%H%M'), self.configuration.data)
- bb.data.setVar("BUILDSTART", time.strftime('%m/%d/%Y %H:%M:%S', time.gmtime()), self.configuration.data)
-
- def matchFiles(self, buildfile):
- """
- Find the .bb files which match the expression in 'buildfile'.
- """
-
- bf = os.path.abspath(buildfile)
- filelist, masked = self.collect_bbfiles()
- try:
- os.stat(bf)
- return [bf]
- except OSError:
- regexp = re.compile(buildfile)
- matches = []
- for f in filelist:
- if regexp.search(f) and os.path.isfile(f):
- bf = f
- matches.append(f)
- return matches
-
- def matchFile(self, buildfile):
- """
- Find the .bb file which matches the expression in 'buildfile'.
- Raise an error if multiple files
- """
- matches = self.matchFiles(buildfile)
- if len(matches) != 1:
- parselog.error("Unable to match %s (%s matches found):" % (buildfile, len(matches)))
- for f in matches:
- parselog.error(" %s" % f)
- raise MultipleMatches
- return matches[0]
-
- def buildFile(self, buildfile, task):
- """
- Build the file matching regexp buildfile
- """
-
- # Parse the configuration here. We need to do it explicitly here since
- # buildFile() doesn't use the cache
- self.parseConfiguration()
-
- # If we are told to do the None task then query the default task
- if (task == None):
- task = self.configuration.cmd
-
- (fn, cls) = bb.cache.Cache.virtualfn2realfn(buildfile)
- buildfile = self.matchFile(fn)
- fn = bb.cache.Cache.realfn2virtual(buildfile, cls)
-
- self.buildSetVars()
-
- self.status = bb.cache.CacheData()
- infos = bb.cache.Cache.parse(fn, self.get_file_appends(fn), \
- self.configuration.data)
- maininfo = None
- for vfn, info in infos:
- self.status.add_from_recipeinfo(vfn, info)
- if vfn == fn:
- maininfo = info
-
- # Tweak some variables
- item = maininfo.pn
- self.status.ignored_dependencies = set()
- self.status.bbfile_priority[fn] = 1
-
- # Remove external dependencies
- self.status.task_deps[fn]['depends'] = {}
- self.status.deps[fn] = []
- self.status.rundeps[fn] = []
- self.status.runrecs[fn] = []
-
- # Remove stamp for target if force mode active
- if self.configuration.force:
- logger.verbose("Remove stamp %s, %s", task, fn)
- bb.build.del_stamp('do_%s' % task, self.status, fn)
-
- # Setup taskdata structure
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
- taskdata.add_provider(self.configuration.data, self.status, item)
-
- buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
- bb.event.fire(bb.event.BuildStarted(buildname, [item]), self.configuration.event_data)
-
- # Clear locks
- bb.fetch.persistent_database_connection = {}
-
- # Execute the runqueue
- runlist = [[item, "do_%s" % task]]
-
- rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
-
- def buildFileIdle(server, rq, abort):
-
- if abort or self.state == state.stop:
- rq.finish_runqueue(True)
- elif self.state == state.shutdown:
- rq.finish_runqueue(False)
- failures = 0
- try:
- retval = rq.execute_runqueue()
- except runqueue.TaskFailure as exc:
- for fnid in exc.args:
- buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
- failures += len(exc.args)
- retval = False
- if not retval:
- bb.event.fire(bb.event.BuildCompleted(buildname, item, failures), self.configuration.event_data)
- self.command.finishAsyncCommand()
- return False
- if retval is True:
- return True
- return retval
-
- self.server.register_idle_function(buildFileIdle, rq)
-
- def buildTargets(self, targets, task):
- """
- Attempt to build the targets specified
- """
-
- # Need files parsed
- self.updateCache()
-
- # If we are told to do the NULL task then query the default task
- if (task == None):
- task = self.configuration.cmd
-
- targets = self.checkPackages(targets)
-
- def buildTargetsIdle(server, rq, abort):
- if abort or self.state == state.stop:
- rq.finish_runqueue(True)
- elif self.state == state.shutdown:
- rq.finish_runqueue(False)
- failures = 0
- try:
- retval = rq.execute_runqueue()
- except runqueue.TaskFailure as exc:
- for fnid in exc.args:
- buildlog.error("'%s' failed" % taskdata.fn_index[fnid])
- failures += len(exc.args)
- retval = False
- if not retval:
- bb.event.fire(bb.event.BuildCompleted(buildname, targets, failures), self.configuration.event_data)
- self.command.finishAsyncCommand()
- return False
- if retval is True:
- return True
- return retval
-
- self.buildSetVars()
-
- buildname = bb.data.getVar("BUILDNAME", self.configuration.data)
- bb.event.fire(bb.event.BuildStarted(buildname, targets), self.configuration.event_data)
-
- localdata = data.createCopy(self.configuration.data)
- bb.data.update_data(localdata)
- bb.data.expandKeys(localdata)
-
- taskdata = bb.taskdata.TaskData(self.configuration.abort)
-
- runlist = []
- for k in targets:
- taskdata.add_provider(localdata, self.status, k)
- runlist.append([k, "do_%s" % task])
- taskdata.add_unresolved(localdata, self.status)
-
- # Clear locks
- bb.fetch.persistent_database_connection = {}
-
- rq = bb.runqueue.RunQueue(self, self.configuration.data, self.status, taskdata, runlist)
-
- self.server.register_idle_function(buildTargetsIdle, rq)
-
- def updateCache(self):
- if self.state == state.running:
- return
-
- if self.state != state.parsing:
- self.parseConfiguration ()
-
- # Import Psyco if available and not disabled
- import platform
- if platform.machine() in ['i386', 'i486', 'i586', 'i686']:
- if not self.configuration.disable_psyco:
- try:
- import psyco
- except ImportError:
- collectlog.info("Psyco JIT Compiler (http://psyco.sf.net) not available. Install it to increase performance.")
- else:
- psyco.bind( CookerParser.parse_next )
- else:
- collectlog.info("You have disabled Psyco. This decreases performance.")
-
- self.status = bb.cache.CacheData()
-
- ignore = bb.data.getVar("ASSUME_PROVIDED", self.configuration.data, 1) or ""
- self.status.ignored_dependencies = set(ignore.split())
-
- for dep in self.configuration.extra_assume_provided:
- self.status.ignored_dependencies.add(dep)
-
- self.handleCollections( bb.data.getVar("BBFILE_COLLECTIONS", self.configuration.data, 1) )
-
- (filelist, masked) = self.collect_bbfiles()
- bb.data.renameVar("__depends", "__base_depends", self.configuration.data)
-
- self.parser = CookerParser(self, filelist, masked)
- self.state = state.parsing
-
- if not self.parser.parse_next():
- collectlog.debug(1, "parsing complete")
- self.buildDepgraph()
- self.state = state.running
- return None
-
- return True
-
- def checkPackages(self, pkgs_to_build):
-
- if len(pkgs_to_build) == 0:
- raise NothingToBuild
-
- if 'world' in pkgs_to_build:
- self.buildWorldTargetList()
- pkgs_to_build.remove('world')
- for t in self.status.world_target:
- pkgs_to_build.append(t)
-
- return pkgs_to_build
-
- def get_bbfiles( self, path = os.getcwd() ):
- """Get list of default .bb files by reading out the current directory"""
- contents = os.listdir(path)
- bbfiles = []
- for f in contents:
- (root, ext) = os.path.splitext(f)
- if ext == ".bb":
- bbfiles.append(os.path.abspath(os.path.join(os.getcwd(), f)))
- return bbfiles
-
- def find_bbfiles( self, path ):
- """Find all the .bb and .bbappend files in a directory"""
- from os.path import join
-
- found = []
- for dir, dirs, files in os.walk(path):
- for ignored in ('SCCS', 'CVS', '.svn'):
- if ignored in dirs:
- dirs.remove(ignored)
- found += [join(dir, f) for f in files if (f.endswith('.bb') or f.endswith('.bbappend'))]
-
- return found
-
- def collect_bbfiles( self ):
- """Collect all available .bb build files"""
- parsed, cached, skipped, masked = 0, 0, 0, 0
-
- collectlog.debug(1, "collecting .bb files")
-
- files = (data.getVar( "BBFILES", self.configuration.data, 1 ) or "").split()
- data.setVar("BBFILES", " ".join(files), self.configuration.data)
-
- if not len(files):
- files = self.get_bbfiles()
-
- if not len(files):
- collectlog.error("no recipe files to build, check your BBPATH and BBFILES?")
- bb.event.fire(CookerExit(), self.configuration.event_data)
-
- newfiles = set()
- for f in files:
- if os.path.isdir(f):
- dirfiles = self.find_bbfiles(f)
- newfiles.update(dirfiles)
- else:
- globbed = glob.glob(f)
- if not globbed and os.path.exists(f):
- globbed = [f]
- newfiles.update(globbed)
-
- bbmask = bb.data.getVar('BBMASK', self.configuration.data, 1)
-
- if bbmask:
- try:
- bbmask_compiled = re.compile(bbmask)
- except sre_constants.error:
- collectlog.critical("BBMASK is not a valid regular expression, ignoring.")
- return list(newfiles), 0
-
- bbfiles = []
- bbappend = []
- for f in newfiles:
- if bbmask and bbmask_compiled.search(f):
- collectlog.debug(1, "skipping masked file %s", f)
- masked += 1
- continue
- if f.endswith('.bb'):
- bbfiles.append(f)
- elif f.endswith('.bbappend'):
- bbappend.append(f)
- else:
- collectlog.debug(1, "skipping %s: unknown file extension", f)
-
- # Build a list of .bbappend files for each .bb file
- for f in bbappend:
- base = os.path.basename(f).replace('.bbappend', '.bb')
- if not base in self.appendlist:
- self.appendlist[base] = []
- self.appendlist[base].append(f)
-
- return (bbfiles, masked)
-
- def get_file_appends(self, fn):
- """
- Returns a list of .bbappend files to apply to fn
- NB: collect_bbfiles() must have been called prior to this
- """
- f = os.path.basename(fn)
- if f in self.appendlist:
- return self.appendlist[f]
- return []
-
- def pre_serve(self):
- # Empty the environment. The environment will be populated as
- # necessary from the data store.
- #bb.utils.empty_environment()
- return
-
- def post_serve(self):
- bb.event.fire(CookerExit(), self.configuration.event_data)
-
- def shutdown(self):
- self.state = state.shutdown
-
- def stop(self):
- self.state = state.stop
-
-def server_main(cooker, func, *args):
- cooker.pre_serve()
-
- if cooker.configuration.profile:
- try:
- import cProfile as profile
- except:
- import profile
- prof = profile.Profile()
-
- ret = profile.Profile.runcall(prof, func, *args)
-
- prof.dump_stats("profile.log")
-
- # Redirect stdout to capture profile information
- pout = open('profile.log.processed', 'w')
- so = sys.stdout.fileno()
- orig_so = os.dup(sys.stdout.fileno())
- os.dup2(pout.fileno(), so)
-
- import pstats
- p = pstats.Stats('profile.log')
- p.sort_stats('time')
- p.print_stats()
- p.print_callers()
- p.sort_stats('cumulative')
- p.print_stats()
-
- os.dup2(orig_so, so)
- pout.flush()
- pout.close()
-
- print("Raw profiling information saved to profile.log and processed statistics to profile.log.processed")
-
- else:
- ret = func(*args)
-
- cooker.post_serve()
-
- return ret
-
-class CookerExit(bb.event.Event):
- """
- Notify clients of the Cooker shutdown
- """
-
- def __init__(self):
- bb.event.Event.__init__(self)
-
-class ParsingFailure(Exception):
- def __init__(self, realexception, recipe):
- self.realexception = realexception
- self.recipe = recipe
- Exception.__init__(self, "Failure when parsing %s" % recipe)
- self.args = (realexception, recipe)
-
-def parse_file(task):
- filename, appends = task
- try:
- return True, bb.cache.Cache.parse(filename, appends, parse_file.cfg)
- except Exception, exc:
- exc.recipe = filename
- raise exc
- # Need to turn BaseExceptions into Exceptions here so we gracefully shutdown
- # and for example a worker thread doesn't just exit on its own in response to
- # a SystemExit event for example.
- except BaseException, exc:
- raise ParsingFailure(exc, filename)
-
-class CookerParser(object):
- def __init__(self, cooker, filelist, masked):
- self.filelist = filelist
- self.cooker = cooker
- self.cfgdata = cooker.configuration.data
-
- # Accounting statistics
- self.parsed = 0
- self.cached = 0
- self.error = 0
- self.masked = masked
-
- self.skipped = 0
- self.virtuals = 0
- self.total = len(filelist)
-
- self.current = 0
- self.num_processes = int(self.cfgdata.getVar("BB_NUMBER_PARSE_THREADS", True) or
- multiprocessing.cpu_count())
-
- self.bb_cache = bb.cache.Cache(self.cfgdata)
- self.fromcache = []
- self.willparse = []
- for filename in self.filelist:
- appends = self.cooker.get_file_appends(filename)
- if not self.bb_cache.cacheValid(filename):
- self.willparse.append((filename, appends))
- else:
- self.fromcache.append((filename, appends))
- self.toparse = self.total - len(self.fromcache)
- self.progress_chunk = max(self.toparse / 100, 1)
-
- self.start()
-
- def start(self):
- def init(cfg):
- signal.signal(signal.SIGINT, signal.SIG_IGN)
- parse_file.cfg = cfg
-
- bb.event.fire(bb.event.ParseStarted(self.toparse), self.cfgdata)
-
- self.pool = multiprocessing.Pool(self.num_processes, init, [self.cfgdata])
- parsed = self.pool.imap(parse_file, self.willparse)
- self.pool.close()
-
- self.results = itertools.chain(self.load_cached(), parsed)
-
- def shutdown(self, clean=True):
- if clean:
- event = bb.event.ParseCompleted(self.cached, self.parsed,
- self.skipped, self.masked,
- self.virtuals, self.error,
- self.total)
- bb.event.fire(event, self.cfgdata)
- else:
- self.pool.terminate()
- self.pool.join()
-
- sync = threading.Thread(target=self.bb_cache.sync)
- sync.start()
- atexit.register(lambda: sync.join())
-
- codesync = threading.Thread(target=bb.codeparser.parser_cache_save(self.cooker.configuration.data))
- codesync.start()
- atexit.register(lambda: codesync.join())
-
- def load_cached(self):
- for filename, appends in self.fromcache:
- cached, infos = self.bb_cache.load(filename, appends, self.cfgdata)
- yield not cached, infos
-
- def parse_next(self):
- try:
- parsed, result = self.results.next()
- except StopIteration:
- self.shutdown()
- return False
- except KeyboardInterrupt:
- self.shutdown(clean=False)
- raise
- except Exception as exc:
- self.shutdown(clean=False)
- bb.fatal('Error parsing %s: %s' % (exc.recipe, exc))
-
- self.current += 1
- self.virtuals += len(result)
- if parsed:
- self.parsed += 1
- if self.parsed % self.progress_chunk == 0:
- bb.event.fire(bb.event.ParseProgress(self.parsed),
- self.cfgdata)
- else:
- self.cached += 1
-
- for virtualfn, info in result:
- if info.skipped:
- self.skipped += 1
- else:
- self.bb_cache.add_info(virtualfn, info, self.cooker.status,
- parsed=parsed)
- return True
-
- def reparse(self, filename):
- infos = self.bb_cache.parse(filename,
- self.cooker.get_file_appends(filename),
- self.cfgdata)
- for vfn, info in infos:
- self.cooker.status.add_from_recipeinfo(vfn, info)
diff --git a/bitbake/lib/bb/daemonize.py b/bitbake/lib/bb/daemonize.py
deleted file mode 100644
index f0714b3af6..0000000000
--- a/bitbake/lib/bb/daemonize.py
+++ /dev/null
@@ -1,190 +0,0 @@
-"""
-Python Deamonizing helper
-
-Configurable daemon behaviors:
-
- 1.) The current working directory set to the "/" directory.
- 2.) The current file creation mode mask set to 0.
- 3.) Close all open files (1024).
- 4.) Redirect standard I/O streams to "/dev/null".
-
-A failed call to fork() now raises an exception.
-
-References:
- 1) Advanced Programming in the Unix Environment: W. Richard Stevens
- 2) Unix Programming Frequently Asked Questions:
- http://www.erlenstar.demon.co.uk/unix/faq_toc.html
-
-Modified to allow a function to be daemonized and return for
-bitbake use by Richard Purdie
-"""
-
-__author__ = "Chad J. Schroeder"
-__copyright__ = "Copyright (C) 2005 Chad J. Schroeder"
-__version__ = "0.2"
-
-# Standard Python modules.
-import os # Miscellaneous OS interfaces.
-import sys # System-specific parameters and functions.
-
-# Default daemon parameters.
-# File mode creation mask of the daemon.
-# For BitBake's children, we do want to inherit the parent umask.
-UMASK = None
-
-# Default maximum for the number of available file descriptors.
-MAXFD = 1024
-
-# The standard I/O file descriptors are redirected to /dev/null by default.
-if (hasattr(os, "devnull")):
- REDIRECT_TO = os.devnull
-else:
- REDIRECT_TO = "/dev/null"
-
-def createDaemon(function, logfile):
- """
- Detach a process from the controlling terminal and run it in the
- background as a daemon, returning control to the caller.
- """
-
- try:
- # Fork a child process so the parent can exit. This returns control to
- # the command-line or shell. It also guarantees that the child will not
- # be a process group leader, since the child receives a new process ID
- # and inherits the parent's process group ID. This step is required
- # to insure that the next call to os.setsid is successful.
- pid = os.fork()
- except OSError as e:
- raise Exception("%s [%d]" % (e.strerror, e.errno))
-
- if (pid == 0): # The first child.
- # To become the session leader of this new session and the process group
- # leader of the new process group, we call os.setsid(). The process is
- # also guaranteed not to have a controlling terminal.
- os.setsid()
-
- # Is ignoring SIGHUP necessary?
- #
- # It's often suggested that the SIGHUP signal should be ignored before
- # the second fork to avoid premature termination of the process. The
- # reason is that when the first child terminates, all processes, e.g.
- # the second child, in the orphaned group will be sent a SIGHUP.
- #
- # "However, as part of the session management system, there are exactly
- # two cases where SIGHUP is sent on the death of a process:
- #
- # 1) When the process that dies is the session leader of a session that
- # is attached to a terminal device, SIGHUP is sent to all processes
- # in the foreground process group of that terminal device.
- # 2) When the death of a process causes a process group to become
- # orphaned, and one or more processes in the orphaned group are
- # stopped, then SIGHUP and SIGCONT are sent to all members of the
- # orphaned group." [2]
- #
- # The first case can be ignored since the child is guaranteed not to have
- # a controlling terminal. The second case isn't so easy to dismiss.
- # The process group is orphaned when the first child terminates and
- # POSIX.1 requires that every STOPPED process in an orphaned process
- # group be sent a SIGHUP signal followed by a SIGCONT signal. Since the
- # second child is not STOPPED though, we can safely forego ignoring the
- # SIGHUP signal. In any case, there are no ill-effects if it is ignored.
- #
- # import signal # Set handlers for asynchronous events.
- # signal.signal(signal.SIGHUP, signal.SIG_IGN)
-
- try:
- # Fork a second child and exit immediately to prevent zombies. This
- # causes the second child process to be orphaned, making the init
- # process responsible for its cleanup. And, since the first child is
- # a session leader without a controlling terminal, it's possible for
- # it to acquire one by opening a terminal in the future (System V-
- # based systems). This second fork guarantees that the child is no
- # longer a session leader, preventing the daemon from ever acquiring
- # a controlling terminal.
- pid = os.fork() # Fork a second child.
- except OSError as e:
- raise Exception("%s [%d]" % (e.strerror, e.errno))
-
- if (pid == 0): # The second child.
- # We probably don't want the file mode creation mask inherited from
- # the parent, so we give the child complete control over permissions.
- if UMASK is not None:
- os.umask(UMASK)
- else:
- # Parent (the first child) of the second child.
- os._exit(0)
- else:
- # exit() or _exit()?
- # _exit is like exit(), but it doesn't call any functions registered
- # with atexit (and on_exit) or any registered signal handlers. It also
- # closes any open file descriptors. Using exit() may cause all stdio
- # streams to be flushed twice and any temporary files may be unexpectedly
- # removed. It's therefore recommended that child branches of a fork()
- # and the parent branch(es) of a daemon use _exit().
- return
-
- # Close all open file descriptors. This prevents the child from keeping
- # open any file descriptors inherited from the parent. There is a variety
- # of methods to accomplish this task. Three are listed below.
- #
- # Try the system configuration variable, SC_OPEN_MAX, to obtain the maximum
- # number of open file descriptors to close. If it doesn't exists, use
- # the default value (configurable).
- #
- # try:
- # maxfd = os.sysconf("SC_OPEN_MAX")
- # except (AttributeError, ValueError):
- # maxfd = MAXFD
- #
- # OR
- #
- # if (os.sysconf_names.has_key("SC_OPEN_MAX")):
- # maxfd = os.sysconf("SC_OPEN_MAX")
- # else:
- # maxfd = MAXFD
- #
- # OR
- #
- # Use the getrlimit method to retrieve the maximum file descriptor number
- # that can be opened by this process. If there is not limit on the
- # resource, use the default value.
- #
- import resource # Resource usage information.
- maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
- if (maxfd == resource.RLIM_INFINITY):
- maxfd = MAXFD
-
- # Iterate through and close all file descriptors.
-# for fd in range(0, maxfd):
-# try:
-# os.close(fd)
-# except OSError: # ERROR, fd wasn't open to begin with (ignored)
-# pass
-
- # Redirect the standard I/O file descriptors to the specified file. Since
- # the daemon has no controlling terminal, most daemons redirect stdin,
- # stdout, and stderr to /dev/null. This is done to prevent side-effects
- # from reads and writes to the standard I/O file descriptors.
-
- # This call to open is guaranteed to return the lowest file descriptor,
- # which will be 0 (stdin), since it was closed above.
-# os.open(REDIRECT_TO, os.O_RDWR) # standard input (0)
-
- # Duplicate standard input to standard output and standard error.
-# os.dup2(0, 1) # standard output (1)
-# os.dup2(0, 2) # standard error (2)
-
-
- si = file('/dev/null', 'r')
- so = file(logfile, 'w')
- se = so
-
-
- # Replace those fds with our own
- os.dup2(si.fileno(), sys.stdin.fileno())
- os.dup2(so.fileno(), sys.stdout.fileno())
- os.dup2(se.fileno(), sys.stderr.fileno())
-
- function()
-
- os._exit(0)
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py
deleted file mode 100644
index 50f2218a70..0000000000
--- a/bitbake/lib/bb/data.py
+++ /dev/null
@@ -1,338 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Data' implementations
-
-Functions for interacting with the data structure used by the
-BitBake build tools.
-
-The expandData and update_data are the most expensive
-operations. At night the cookie monster came by and
-suggested 'give me cookies on setting the variables and
-things will work out'. Taking this suggestion into account
-applying the skills from the not yet passed 'Entwurf und
-Analyse von Algorithmen' lecture and the cookie
-monster seems to be right. We will track setVar more carefully
-to have faster update_data and expandKeys operations.
-
-This is a treade-off between speed and memory again but
-the speed is more critical here.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2005 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import sys, os, re
-if sys.argv[0][-5:] == "pydoc":
- path = os.path.dirname(os.path.dirname(sys.argv[1]))
-else:
- path = os.path.dirname(os.path.dirname(sys.argv[0]))
-sys.path.insert(0, path)
-from itertools import groupby
-
-from bb import data_smart
-from bb import codeparser
-import bb
-
-_dict_type = data_smart.DataSmart
-
-def init():
- """Return a new object representing the Bitbake data"""
- return _dict_type()
-
-def init_db(parent = None):
- """Return a new object representing the Bitbake data,
- optionally based on an existing object"""
- if parent:
- return parent.createCopy()
- else:
- return _dict_type()
-
-def createCopy(source):
- """Link the source set to the destination
- If one does not find the value in the destination set,
- search will go on to the source set to get the value.
- Value from source are copy-on-write. i.e. any try to
- modify one of them will end up putting the modified value
- in the destination set.
- """
- return source.createCopy()
-
-def initVar(var, d):
- """Non-destructive var init for data structure"""
- d.initVar(var)
-
-
-def setVar(var, value, d):
- """Set a variable to a given value"""
- d.setVar(var, value)
-
-
-def getVar(var, d, exp = 0):
- """Gets the value of a variable"""
- return d.getVar(var, exp)
-
-
-def renameVar(key, newkey, d):
- """Renames a variable from key to newkey"""
- d.renameVar(key, newkey)
-
-def delVar(var, d):
- """Removes a variable from the data set"""
- d.delVar(var)
-
-def setVarFlag(var, flag, flagvalue, d):
- """Set a flag for a given variable to a given value"""
- d.setVarFlag(var, flag, flagvalue)
-
-def getVarFlag(var, flag, d):
- """Gets given flag from given var"""
- return d.getVarFlag(var, flag)
-
-def delVarFlag(var, flag, d):
- """Removes a given flag from the variable's flags"""
- d.delVarFlag(var, flag)
-
-def setVarFlags(var, flags, d):
- """Set the flags for a given variable
-
- Note:
- setVarFlags will not clear previous
- flags. Think of this method as
- addVarFlags
- """
- d.setVarFlags(var, flags)
-
-def getVarFlags(var, d):
- """Gets a variable's flags"""
- return d.getVarFlags(var)
-
-def delVarFlags(var, d):
- """Removes a variable's flags"""
- d.delVarFlags(var)
-
-def keys(d):
- """Return a list of keys in d"""
- return d.keys()
-
-
-__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
-
-def expand(s, d, varname = None):
- """Variable expansion using the data store"""
- return d.expand(s, varname)
-
-def expandKeys(alterdata, readdata = None):
- if readdata == None:
- readdata = alterdata
-
- todolist = {}
- for key in keys(alterdata):
- if not '${' in key:
- continue
-
- ekey = expand(key, readdata)
- if key == ekey:
- continue
- todolist[key] = ekey
-
- # These two for loops are split for performance to maximise the
- # usefulness of the expand cache
-
- for key in todolist:
- ekey = todolist[key]
- renameVar(key, ekey, alterdata)
-
-def inheritFromOS(d):
- """Inherit variables from the environment."""
- exportlist = bb.utils.preserved_envvars_exported()
- for s in os.environ.keys():
- try:
- setVar(s, os.environ[s], d)
- if s in exportlist:
- setVarFlag(s, "export", True, d)
- except TypeError:
- pass
-
-def emit_var(var, o=sys.__stdout__, d = init(), all=False):
- """Emit a variable to be sourced by a shell."""
- if getVarFlag(var, "python", d):
- return 0
-
- export = getVarFlag(var, "export", d)
- unexport = getVarFlag(var, "unexport", d)
- func = getVarFlag(var, "func", d)
- if not all and not export and not unexport and not func:
- return 0
-
- try:
- if all:
- oval = getVar(var, d, 0)
- val = getVar(var, d, 1)
- except (KeyboardInterrupt, bb.build.FuncFailed):
- raise
- except Exception, exc:
- o.write('# expansion of %s threw %s: %s\n' % (var, exc.__class__.__name__, str(exc)))
- return 0
-
- if all:
- commentVal = re.sub('\n', '\n#', str(oval))
- o.write('# %s=%s\n' % (var, commentVal))
-
- if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all:
- return 0
-
- varExpanded = expand(var, d)
-
- if unexport:
- o.write('unset %s\n' % varExpanded)
- return 0
-
- if not val:
- return 0
-
- val = str(val)
-
- if func:
- # NOTE: should probably check for unbalanced {} within the var
- o.write("%s() {\n%s\n}\n" % (varExpanded, val))
- return 1
-
- if export:
- o.write('export ')
-
- # if we're going to output this within doublequotes,
- # to a shell, we need to escape the quotes in the var
- alter = re.sub('"', '\\"', val.strip())
- alter = re.sub('\n', ' \\\n', alter)
- o.write('%s="%s"\n' % (varExpanded, alter))
- return 0
-
-def emit_env(o=sys.__stdout__, d = init(), all=False):
- """Emits all items in the data store in a format such that it can be sourced by a shell."""
-
- isfunc = lambda key: bool(d.getVarFlag(key, "func"))
- keys = sorted((key for key in d.keys() if not key.startswith("__")), key=isfunc)
- grouped = groupby(keys, isfunc)
- for isfunc, keys in grouped:
- for key in keys:
- emit_var(key, o, d, all and not isfunc) and o.write('\n')
-
-def export_vars(d):
- keys = (key for key in d.keys() if d.getVarFlag(key, "export"))
- ret = {}
- for k in keys:
- try:
- v = d.getVar(k, True)
- if v:
- ret[k] = v
- except (KeyboardInterrupt, bb.build.FuncFailed):
- raise
- except Exception, exc:
- pass
- return ret
-
-def export_envvars(v, d):
- for s in os.environ.keys():
- if s not in v:
- v[s] = os.environ[s]
- return v
-
-def emit_func(func, o=sys.__stdout__, d = init()):
- """Emits all items in the data store in a format such that it can be sourced by a shell."""
-
- keys = (key for key in d.keys() if not key.startswith("__") and not d.getVarFlag(key, "func"))
- for key in keys:
- emit_var(key, o, d, False) and o.write('\n')
-
- emit_var(func, o, d, False) and o.write('\n')
- newdeps = bb.codeparser.ShellParser().parse_shell(d.getVar(func, True))
- seen = set()
- while newdeps:
- deps = newdeps
- seen |= deps
- newdeps = set()
- for dep in deps:
- if bb.data.getVarFlag(dep, "func", d):
- emit_var(dep, o, d, False) and o.write('\n')
- newdeps |= bb.codeparser.ShellParser().parse_shell(d.getVar(dep, True))
- newdeps -= seen
-
-def update_data(d):
- """Performs final steps upon the datastore, including application of overrides"""
- d.finalize()
-
-def build_dependencies(key, keys, shelldeps, d):
- deps = set()
- try:
- if d.getVarFlag(key, "func"):
- if d.getVarFlag(key, "python"):
- parsedvar = d.expandWithRefs(d.getVar(key, False), key)
- parser = bb.codeparser.PythonParser()
- parser.parse_python(parsedvar.value)
- deps = deps | parser.references
- else:
- parsedvar = d.expandWithRefs(d.getVar(key, False), key)
- parser = bb.codeparser.ShellParser()
- parser.parse_shell(parsedvar.value)
- deps = deps | shelldeps
- deps = deps | parsedvar.references
- deps = deps | (keys & parser.execs) | (keys & parsedvar.execs)
- else:
- parser = d.expandWithRefs(d.getVar(key, False), key)
- deps |= parser.references
- deps = deps | (keys & parser.execs)
- deps |= set((d.getVarFlag(key, "vardeps", True) or "").split())
- deps -= set((d.getVarFlag(key, "vardepsexclude", True) or "").split())
- except:
- bb.note("Error expanding variable %s" % key)
- raise
- return deps
- #bb.note("Variable %s references %s and calls %s" % (key, str(deps), str(execs)))
- #d.setVarFlag(key, "vardeps", deps)
-
-def generate_dependencies(d):
-
- keys = set(key for key in d.keys() if not key.startswith("__"))
- shelldeps = set(key for key in keys if d.getVarFlag(key, "export") and not d.getVarFlag(key, "unexport"))
-
- deps = {}
-
- tasklist = bb.data.getVar('__BBTASKS', d) or []
- for task in tasklist:
- deps[task] = build_dependencies(task, keys, shelldeps, d)
- newdeps = deps[task]
- seen = set()
- while newdeps:
- nextdeps = newdeps
- seen |= nextdeps
- newdeps = set()
- for dep in nextdeps:
- if dep not in deps:
- deps[dep] = build_dependencies(dep, keys, shelldeps, d)
- newdeps |= deps[dep]
- newdeps -= seen
- #print "For %s: %s" % (task, str(taskdeps[task]))
- return tasklist, deps
-
-def inherits_class(klass, d):
- val = getVar('__inherit_cache', d) or []
- if os.path.join('classes', '%s.bbclass' % klass) in val:
- return True
- return False
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py
deleted file mode 100644
index df9798ad58..0000000000
--- a/bitbake/lib/bb/data_smart.py
+++ /dev/null
@@ -1,428 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake Smart Dictionary Implementation
-
-Functions for interacting with the data structure used by the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004, 2005 Seb Frankengul
-# Copyright (C) 2005, 2006 Holger Hans Peter Freyther
-# Copyright (C) 2005 Uli Luckas
-# Copyright (C) 2005 ROAD GmbH
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import copy, re
-from collections import MutableMapping
-import logging
-import bb, bb.codeparser
-from bb import utils
-from bb.COW import COWDictBase
-
-logger = logging.getLogger("BitBake.Data")
-
-__setvar_keyword__ = ["_append", "_prepend"]
-__setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend)(_(?P<add>.*))?')
-__expand_var_regexp__ = re.compile(r"\${[^{}]+}")
-__expand_python_regexp__ = re.compile(r"\${@.+?}")
-
-
-class VariableParse:
- def __init__(self, varname, d, val = None):
- self.varname = varname
- self.d = d
- self.value = val
-
- self.references = set()
- self.execs = set()
-
- def var_sub(self, match):
- key = match.group()[2:-1]
- if self.varname and key:
- if self.varname == key:
- raise Exception("variable %s references itself!" % self.varname)
- var = self.d.getVar(key, 1)
- if var is not None:
- self.references.add(key)
- return var
- else:
- return match.group()
-
- def python_sub(self, match):
- code = match.group()[3:-1]
- codeobj = compile(code.strip(), self.varname or "<expansion>", "eval")
-
- parser = bb.codeparser.PythonParser()
- parser.parse_python(code)
- self.references |= parser.references
- self.execs |= parser.execs
-
- value = utils.better_eval(codeobj, DataContext(self.d))
- return str(value)
-
-
-class DataContext(dict):
- def __init__(self, metadata, **kwargs):
- self.metadata = metadata
- dict.__init__(self, **kwargs)
- self['d'] = metadata
-
- def __missing__(self, key):
- value = self.metadata.getVar(key, True)
- if value is None or self.metadata.getVarFlag(key, 'func'):
- raise KeyError(key)
- else:
- return value
-
-class ExpansionError(Exception):
- def __init__(self, varname, expression, exception):
- self.expression = expression
- self.variablename = varname
- self.exception = exception
- self.msg = "Failure expanding variable %s, expression was %s which triggered exception %s: %s" % (varname, expression, type(exception).__name__, exception)
- Exception.__init__(self, self.msg)
- self.args = (varname, expression, exception)
- def __str__(self):
- return self.msg
-
-class DataSmart(MutableMapping):
- def __init__(self, special = COWDictBase.copy(), seen = COWDictBase.copy() ):
- self.dict = {}
-
- # cookie monster tribute
- self._special_values = special
- self._seen_overrides = seen
-
- self.expand_cache = {}
-
- def expandWithRefs(self, s, varname):
-
- if not isinstance(s, basestring): # sanity check
- return VariableParse(varname, self, s)
-
- if varname and varname in self.expand_cache:
- return self.expand_cache[varname]
-
- varparse = VariableParse(varname, self)
-
- while s.find('${') != -1:
- olds = s
- try:
- s = __expand_var_regexp__.sub(varparse.var_sub, s)
- s = __expand_python_regexp__.sub(varparse.python_sub, s)
- if s == olds:
- break
- except ExpansionError:
- raise
- except Exception as exc:
- raise ExpansionError(varname, s, exc)
-
- varparse.value = s
-
- if varname:
- self.expand_cache[varname] = varparse
-
- return varparse
-
- def expand(self, s, varname):
- return self.expandWithRefs(s, varname).value
-
-
- def finalize(self):
- """Performs final steps upon the datastore, including application of overrides"""
-
- overrides = (self.getVar("OVERRIDES", True) or "").split(":") or []
-
- #
- # Well let us see what breaks here. We used to iterate
- # over each variable and apply the override and then
- # do the line expanding.
- # If we have bad luck - which we will have - the keys
- # where in some order that is so important for this
- # method which we don't have anymore.
- # Anyway we will fix that and write test cases this
- # time.
-
- #
- # First we apply all overrides
- # Then we will handle _append and _prepend
- #
-
- for o in overrides:
- # calculate '_'+override
- l = len(o) + 1
-
- # see if one should even try
- if o not in self._seen_overrides:
- continue
-
- vars = self._seen_overrides[o]
- for var in vars:
- name = var[:-l]
- try:
- self.setVar(name, self.getVar(var, False))
- except Exception:
- logger.info("Untracked delVar")
-
- # now on to the appends and prepends
- for op in __setvar_keyword__:
- if op in self._special_values:
- appends = self._special_values[op] or []
- for append in appends:
- keep = []
- for (a, o) in self.getVarFlag(append, op) or []:
- if o and not o in overrides:
- keep.append((a ,o))
- continue
-
- if op is "_append":
- sval = self.getVar(append, False) or ""
- sval += a
- self.setVar(append, sval)
- elif op is "_prepend":
- sval = a + (self.getVar(append, False) or "")
- self.setVar(append, sval)
-
- # We save overrides that may be applied at some later stage
- if keep:
- self.setVarFlag(append, op, keep)
- else:
- self.delVarFlag(append, op)
-
- def initVar(self, var):
- self.expand_cache = {}
- if not var in self.dict:
- self.dict[var] = {}
-
- def _findVar(self, var):
- dest = self.dict
- while dest:
- if var in dest:
- return dest[var]
-
- if "_data" not in dest:
- break
- dest = dest["_data"]
-
- def _makeShadowCopy(self, var):
- if var in self.dict:
- return
-
- local_var = self._findVar(var)
-
- if local_var:
- self.dict[var] = copy.copy(local_var)
- else:
- self.initVar(var)
-
- def setVar(self, var, value):
- self.expand_cache = {}
- match = __setvar_regexp__.match(var)
- if match and match.group("keyword") in __setvar_keyword__:
- base = match.group('base')
- keyword = match.group("keyword")
- override = match.group('add')
- l = self.getVarFlag(base, keyword) or []
- l.append([value, override])
- self.setVarFlag(base, keyword, l)
-
- # todo make sure keyword is not __doc__ or __module__
- # pay the cookie monster
- try:
- self._special_values[keyword].add( base )
- except KeyError:
- self._special_values[keyword] = set()
- self._special_values[keyword].add( base )
-
- return
-
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- # more cookies for the cookie monster
- if '_' in var:
- override = var[var.rfind('_')+1:]
- if override not in self._seen_overrides:
- self._seen_overrides[override] = set()
- self._seen_overrides[override].add( var )
-
- # setting var
- self.dict[var]["content"] = value
-
- def getVar(self, var, exp):
- value = self.getVarFlag(var, "content")
-
- if exp and value:
- return self.expand(value, var)
- return value
-
- def renameVar(self, key, newkey):
- """
- Rename the variable key to newkey
- """
- val = self.getVar(key, 0)
- if val is not None:
- self.setVar(newkey, val)
-
- for i in ('_append', '_prepend'):
- src = self.getVarFlag(key, i)
- if src is None:
- continue
-
- dest = self.getVarFlag(newkey, i) or []
- dest.extend(src)
- self.setVarFlag(newkey, i, dest)
-
- if i in self._special_values and key in self._special_values[i]:
- self._special_values[i].remove(key)
- self._special_values[i].add(newkey)
-
- self.delVar(key)
-
- def delVar(self, var):
- self.expand_cache = {}
- self.dict[var] = {}
-
- def setVarFlag(self, var, flag, flagvalue):
- if not var in self.dict:
- self._makeShadowCopy(var)
- self.dict[var][flag] = flagvalue
-
- def getVarFlag(self, var, flag, expand=False):
- local_var = self._findVar(var)
- value = None
- if local_var:
- if flag in local_var:
- value = copy.copy(local_var[flag])
- elif flag == "content" and "defaultval" in local_var:
- value = copy.copy(local_var["defaultval"])
- if expand and value:
- value = self.expand(value, None)
- return value
-
- def delVarFlag(self, var, flag):
- local_var = self._findVar(var)
- if not local_var:
- return
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- if var in self.dict and flag in self.dict[var]:
- del self.dict[var][flag]
-
- def setVarFlags(self, var, flags):
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- for i in flags:
- if i == "content":
- continue
- self.dict[var][i] = flags[i]
-
- def getVarFlags(self, var):
- local_var = self._findVar(var)
- flags = {}
-
- if local_var:
- for i in local_var:
- if i == "content":
- continue
- flags[i] = local_var[i]
-
- if len(flags) == 0:
- return None
- return flags
-
-
- def delVarFlags(self, var):
- if not var in self.dict:
- self._makeShadowCopy(var)
-
- if var in self.dict:
- content = None
-
- # try to save the content
- if "content" in self.dict[var]:
- content = self.dict[var]["content"]
- self.dict[var] = {}
- self.dict[var]["content"] = content
- else:
- del self.dict[var]
-
-
- def createCopy(self):
- """
- Create a copy of self by setting _data to self
- """
- # we really want this to be a DataSmart...
- data = DataSmart(seen=self._seen_overrides.copy(), special=self._special_values.copy())
- data.dict["_data"] = self.dict
-
- return data
-
- def expandVarref(self, variable, parents=False):
- """Find all references to variable in the data and expand it
- in place, optionally descending to parent datastores."""
-
- if parents:
- keys = iter(self)
- else:
- keys = self.localkeys()
-
- ref = '${%s}' % variable
- value = self.getVar(variable, False)
- for key in keys:
- referrervalue = self.getVar(key, False)
- if referrervalue and ref in referrervalue:
- self.setVar(key, referrervalue.replace(ref, value))
-
- def localkeys(self):
- for key in self.dict:
- if key != '_data':
- yield key
-
- def __iter__(self):
- seen = set()
- def _keys(d):
- if "_data" in d:
- for key in _keys(d["_data"]):
- yield key
-
- for key in d:
- if key != "_data":
- if not key in seen:
- seen.add(key)
- yield key
- return _keys(self.dict)
-
- def __len__(self):
- return len(frozenset(self))
-
- def __getitem__(self, item):
- value = self.getVar(item, False)
- if value is None:
- raise KeyError(item)
- else:
- return value
-
- def __setitem__(self, var, value):
- self.setVar(var, value)
-
- def __delitem__(self, var):
- self.delVar(var)
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py
deleted file mode 100644
index 3467ddd613..0000000000
--- a/bitbake/lib/bb/event.py
+++ /dev/null
@@ -1,386 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Event' implementation
-
-Classes and functions for manipulating 'events' in the
-BitBake build tools.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os, sys
-import warnings
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
-import logging
-import atexit
-import bb.utils
-
-# This is the pid for which we should generate the event. This is set when
-# the runqueue forks off.
-worker_pid = 0
-worker_pipe = None
-
-class Event(object):
- """Base class for events"""
-
- def __init__(self):
- self.pid = worker_pid
-
-NotHandled = 0
-Handled = 1
-
-Registered = 10
-AlreadyRegistered = 14
-
-# Internal
-_handlers = {}
-_ui_handlers = {}
-_ui_handler_seq = 0
-
-# For compatibility
-bb.utils._context["NotHandled"] = NotHandled
-bb.utils._context["Handled"] = Handled
-
-def fire_class_handlers(event, d):
- if isinstance(event, logging.LogRecord):
- return
-
- for handler in _handlers:
- h = _handlers[handler]
- event.data = d
- if type(h).__name__ == "code":
- locals = {"e": event}
- bb.utils.simple_exec(h, locals)
- ret = bb.utils.better_eval("tmpHandler(e)", locals)
- if ret is not None:
- warnings.warn("Using Handled/NotHandled in event handlers is deprecated",
- DeprecationWarning, stacklevel = 2)
- else:
- h(event)
- del event.data
-
-ui_queue = []
-@atexit.register
-def print_ui_queue():
- """If we're exiting before a UI has been spawned, display any queued
- LogRecords to the console."""
- logger = logging.getLogger("BitBake")
- if not _ui_handlers:
- from bb.msg import BBLogFormatter
- console = logging.StreamHandler(sys.stdout)
- console.setFormatter(BBLogFormatter("%(levelname)s: %(message)s"))
- logger.handlers = [console]
- while ui_queue:
- event = ui_queue.pop()
- if isinstance(event, logging.LogRecord):
- logger.handle(event)
-
-def fire_ui_handlers(event, d):
- if not _ui_handlers:
- # No UI handlers registered yet, queue up the messages
- ui_queue.append(event)
- return
-
- errors = []
- for h in _ui_handlers:
- #print "Sending event %s" % event
- try:
- # We use pickle here since it better handles object instances
- # which xmlrpc's marshaller does not. Events *must* be serializable
- # by pickle.
- _ui_handlers[h].event.send((pickle.dumps(event)))
- except:
- errors.append(h)
- for h in errors:
- del _ui_handlers[h]
-
-def fire(event, d):
- """Fire off an Event"""
-
- # We can fire class handlers in the worker process context and this is
- # desired so they get the task based datastore.
- # UI handlers need to be fired in the server context so we defer this. They
- # don't have a datastore so the datastore context isn't a problem.
-
- fire_class_handlers(event, d)
- if worker_pid != 0:
- worker_fire(event, d)
- else:
- fire_ui_handlers(event, d)
-
-def worker_fire(event, d):
- data = "<event>" + pickle.dumps(event) + "</event>"
- worker_pipe.write(data)
-
-def fire_from_worker(event, d):
- if not event.startswith("<event>") or not event.endswith("</event>"):
- print("Error, not an event %s" % event)
- return
- event = pickle.loads(event[7:-8])
- fire_ui_handlers(event, d)
-
-def register(name, handler):
- """Register an Event handler"""
-
- # already registered
- if name in _handlers:
- return AlreadyRegistered
-
- if handler is not None:
- # handle string containing python code
- if isinstance(handler, basestring):
- tmp = "def tmpHandler(e):\n%s" % handler
- comp = bb.utils.better_compile(tmp, "tmpHandler(e)", "bb.event._registerCode")
- _handlers[name] = comp
- else:
- _handlers[name] = handler
-
- return Registered
-
-def remove(name, handler):
- """Remove an Event handler"""
- _handlers.pop(name)
-
-def register_UIHhandler(handler):
- bb.event._ui_handler_seq = bb.event._ui_handler_seq + 1
- _ui_handlers[_ui_handler_seq] = handler
- return _ui_handler_seq
-
-def unregister_UIHhandler(handlerNum):
- if handlerNum in _ui_handlers:
- del _ui_handlers[handlerNum]
- return
-
-def getName(e):
- """Returns the name of a class or class instance"""
- if getattr(e, "__name__", None) == None:
- return e.__class__.__name__
- else:
- return e.__name__
-
-class ConfigParsed(Event):
- """Configuration Parsing Complete"""
-
-class RecipeParsed(Event):
- """ Recipe Parsing Complete """
-
- def __init__(self, fn):
- self.fn = fn
- Event.__init__(self)
-
-class StampUpdate(Event):
- """Trigger for any adjustment of the stamp files to happen"""
-
- def __init__(self, targets, stampfns):
- self._targets = targets
- self._stampfns = stampfns
- Event.__init__(self)
-
- def getStampPrefix(self):
- return self._stampfns
-
- def getTargets(self):
- return self._targets
-
- stampPrefix = property(getStampPrefix)
- targets = property(getTargets)
-
-class BuildBase(Event):
- """Base class for bbmake run events"""
-
- def __init__(self, n, p, failures = 0):
- self._name = n
- self._pkgs = p
- Event.__init__(self)
- self._failures = failures
-
- def getPkgs(self):
- return self._pkgs
-
- def setPkgs(self, pkgs):
- self._pkgs = pkgs
-
- def getName(self):
- return self._name
-
- def setName(self, name):
- self._name = name
-
- def getCfg(self):
- return self.data
-
- def setCfg(self, cfg):
- self.data = cfg
-
- def getFailures(self):
- """
- Return the number of failed packages
- """
- return self._failures
-
- pkgs = property(getPkgs, setPkgs, None, "pkgs property")
- name = property(getName, setName, None, "name property")
- cfg = property(getCfg, setCfg, None, "cfg property")
-
-
-
-
-
-class BuildStarted(BuildBase):
- """bbmake build run started"""
-
-
-class BuildCompleted(BuildBase):
- """bbmake build run completed"""
-
-
-
-
-class NoProvider(Event):
- """No Provider for an Event"""
-
- def __init__(self, item, runtime=False, dependees=None):
- Event.__init__(self)
- self._item = item
- self._runtime = runtime
- self._dependees = dependees
-
- def getItem(self):
- return self._item
-
- def isRuntime(self):
- return self._runtime
-
-class MultipleProviders(Event):
- """Multiple Providers"""
-
- def __init__(self, item, candidates, runtime = False):
- Event.__init__(self)
- self._item = item
- self._candidates = candidates
- self._is_runtime = runtime
-
- def isRuntime(self):
- """
- Is this a runtime issue?
- """
- return self._is_runtime
-
- def getItem(self):
- """
- The name for the to be build item
- """
- return self._item
-
- def getCandidates(self):
- """
- Get the possible Candidates for a PROVIDER.
- """
- return self._candidates
-
-class ParseStarted(Event):
- """Recipe parsing for the runqueue has begun"""
- def __init__(self, total):
- Event.__init__(self)
- self.total = total
-
-class ParseCompleted(Event):
- """Recipe parsing for the runqueue has completed"""
-
- def __init__(self, cached, parsed, skipped, masked, virtuals, errors, total):
- Event.__init__(self)
- self.cached = cached
- self.parsed = parsed
- self.skipped = skipped
- self.virtuals = virtuals
- self.masked = masked
- self.errors = errors
- self.sofar = cached + parsed
- self.total = total
-
-class ParseProgress(Event):
- """Recipe parsing progress"""
-
- def __init__(self, current):
- self.current = current
-
-class CacheLoadStarted(Event):
- """Loading of the dependency cache has begun"""
- def __init__(self, total):
- Event.__init__(self)
- self.total = total
-
-class CacheLoadProgress(Event):
- """Cache loading progress"""
- def __init__(self, current):
- Event.__init__(self)
- self.current = current
-
-class CacheLoadCompleted(Event):
- """Cache loading is complete"""
- def __init__(self, total, num_entries):
- Event.__init__(self)
- self.total = total
- self.num_entries = num_entries
-
-
-class DepTreeGenerated(Event):
- """
- Event when a dependency tree has been generated
- """
-
- def __init__(self, depgraph):
- Event.__init__(self)
- self._depgraph = depgraph
-
-class MsgBase(Event):
- """Base class for messages"""
-
- def __init__(self, msg):
- self._message = msg
- Event.__init__(self)
-
-class MsgDebug(MsgBase):
- """Debug Message"""
-
-class MsgNote(MsgBase):
- """Note Message"""
-
-class MsgWarn(MsgBase):
- """Warning Message"""
-
-class MsgError(MsgBase):
- """Error Message"""
-
-class MsgFatal(MsgBase):
- """Fatal Message"""
-
-class MsgPlain(MsgBase):
- """General output"""
-
-class LogHandler(logging.Handler):
- """Dispatch logging messages as bitbake events"""
-
- def emit(self, record):
- fire(record, None)
-
- def filter(self, record):
- record.taskpid = worker_pid
- return True
diff --git a/bitbake/lib/bb/fetch/__init__.py b/bitbake/lib/bb/fetch/__init__.py
deleted file mode 100644
index 2f92d87d96..0000000000
--- a/bitbake/lib/bb/fetch/__init__.py
+++ /dev/null
@@ -1,836 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from __future__ import absolute_import
-from __future__ import print_function
-import os, re
-import logging
-import bb
-from bb import data
-from bb import persist_data
-from bb import utils
-
-__version__ = "1"
-
-logger = logging.getLogger("BitBake.Fetch")
-
-class MalformedUrl(Exception):
- """Exception raised when encountering an invalid url"""
-
-class FetchError(Exception):
- """Exception raised when a download fails"""
-
-class NoMethodError(Exception):
- """Exception raised when there is no method to obtain a supplied url or set of urls"""
-
-class MissingParameterError(Exception):
- """Exception raised when a fetch method is missing a critical parameter in the url"""
-
-class ParameterError(Exception):
- """Exception raised when a url cannot be proccessed due to invalid parameters."""
-
-class MD5SumError(Exception):
- """Exception raised when a MD5SUM of a file does not match the expected one"""
-
-class InvalidSRCREV(Exception):
- """Exception raised when an invalid SRCREV is encountered"""
-
-def decodeurl(url):
- """Decodes an URL into the tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
- if not m:
- raise MalformedUrl(url)
-
- type = m.group('type')
- location = m.group('location')
- if not location:
- raise MalformedUrl(url)
- user = m.group('user')
- parm = m.group('parm')
-
- locidx = location.find('/')
- if locidx != -1 and type.lower() != 'file':
- host = location[:locidx]
- path = location[locidx:]
- else:
- host = ""
- path = location
- if user:
- m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
- if m:
- user = m.group('user')
- pswd = m.group('pswd')
- else:
- user = ''
- pswd = ''
-
- p = {}
- if parm:
- for s in parm.split(';'):
- s1, s2 = s.split('=')
- p[s1] = s2
-
- return (type, host, path, user, pswd, p)
-
-def encodeurl(decoded):
- """Encodes a URL from tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- (type, host, path, user, pswd, p) = decoded
-
- if not type or not path:
- raise MissingParameterError("Type or path url components missing when encoding %s" % decoded)
- url = '%s://' % type
- if user:
- url += "%s" % user
- if pswd:
- url += ":%s" % pswd
- url += "@"
- if host:
- url += "%s" % host
- url += "%s" % path
- if p:
- for parm in p:
- url += ";%s=%s" % (parm, p[parm])
-
- return url
-
-def uri_replace(uri, uri_find, uri_replace, d):
- if not uri or not uri_find or not uri_replace:
- logger.debug(1, "uri_replace: passed an undefined value, not replacing")
- uri_decoded = list(decodeurl(uri))
- uri_find_decoded = list(decodeurl(uri_find))
- uri_replace_decoded = list(decodeurl(uri_replace))
- result_decoded = ['', '', '', '', '', {}]
- for i in uri_find_decoded:
- loc = uri_find_decoded.index(i)
- result_decoded[loc] = uri_decoded[loc]
- if isinstance(i, basestring):
- if (re.match(i, uri_decoded[loc])):
- result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
- if uri_find_decoded.index(i) == 2:
- if d:
- localfn = bb.fetch.localpath(uri, d)
- if localfn:
- result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(bb.fetch.localpath(uri, d)))
- else:
- return uri
- return encodeurl(result_decoded)
-
-methods = []
-urldata_cache = {}
-saved_headrevs = {}
-
-def fetcher_init(d):
- """
- Called to initialize the fetchers once the configuration data is known.
- Calls before this must not hit the cache.
- """
- pd = persist_data.persist(d)
- # When to drop SCM head revisions controlled by user policy
- srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, 1) or "clear"
- if srcrev_policy == "cache":
- logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
- elif srcrev_policy == "clear":
- logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
- try:
- bb.fetch.saved_headrevs = pd['BB_URI_HEADREVS'].items()
- except:
- pass
- del pd['BB_URI_HEADREVS']
- else:
- raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
-
- for m in methods:
- if hasattr(m, "init"):
- m.init(d)
-
-def fetcher_compare_revisions(d):
- """
- Compare the revisions in the persistant cache with current values and
- return true/false on whether they've changed.
- """
-
- pd = persist_data.persist(d)
- data = pd['BB_URI_HEADREVS'].items()
- data2 = bb.fetch.saved_headrevs
-
- changed = False
- for key in data:
- if key not in data2 or data2[key] != data[key]:
- logger.debug(1, "%s changed", key)
- changed = True
- return True
- else:
- logger.debug(2, "%s did not change", key)
- return False
-
-# Function call order is usually:
-# 1. init
-# 2. go
-# 3. localpaths
-# localpath can be called at any time
-
-def init(urls, d, setup = True):
- urldata = {}
-
- fn = bb.data.getVar('FILE', d, 1)
- if fn in urldata_cache:
- urldata = urldata_cache[fn]
-
- for url in urls:
- if url not in urldata:
- urldata[url] = FetchData(url, d)
-
- if setup:
- for url in urldata:
- if not urldata[url].setup:
- urldata[url].setup_localpath(d)
-
- urldata_cache[fn] = urldata
- return urldata
-
-def mirror_from_string(data):
- return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
-
-def verify_checksum(u, ud, d):
- """
- verify the MD5 and SHA256 checksum for downloaded src
-
- return value:
- - True: checksum matched
- - False: checksum unmatched
-
- if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
- if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
- matched
- """
-
- if not ud.type in ["http", "https", "ftp", "ftps"]:
- return
-
- md5data = bb.utils.md5_file(ud.localpath)
- sha256data = bb.utils.sha256_file(ud.localpath)
-
- if (ud.md5_expected == None or ud.sha256_expected == None):
- logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
- ud.localpath, ud.md5_name, md5data,
- ud.sha256_name, sha256data)
- if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
- raise FetchError("No checksum specified for %s." % u)
- return
-
- if (ud.md5_expected != md5data or ud.sha256_expected != sha256data):
- logger.error('The checksums for "%s" did not match.\n'
- ' MD5: expected "%s", got "%s"\n'
- ' SHA256: expected "%s", got "%s"\n',
- ud.localpath, ud.md5_expected, md5data,
- ud.sha256_expected, sha256data)
- raise FetchError("%s checksum mismatch." % u)
-
-def go(d, urls = None):
- """
- Fetch all urls
- init must have previously been called
- """
- if not urls:
- urls = d.getVar("SRC_URI", 1).split()
- urldata = init(urls, d, True)
-
- for u in urls:
- ud = urldata[u]
- m = ud.method
- localpath = ""
-
- if not ud.localfile:
- continue
-
- lf = bb.utils.lockfile(ud.lockfile)
-
- if m.try_premirror(u, ud, d):
- # First try fetching uri, u, from PREMIRRORS
- mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
- localpath = try_mirrors(d, u, mirrors, False, m.forcefetch(u, ud, d))
- elif os.path.exists(ud.localfile):
- localpath = ud.localfile
-
- # Need to re-test forcefetch() which will return true if our copy is too old
- if m.forcefetch(u, ud, d) or not localpath:
- # Next try fetching from the original uri, u
- try:
- m.go(u, ud, d)
- localpath = ud.localpath
- except FetchError:
- # Remove any incomplete file
- bb.utils.remove(ud.localpath)
- # Finally, try fetching uri, u, from MIRRORS
- mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
- localpath = try_mirrors (d, u, mirrors)
- if not localpath or not os.path.exists(localpath):
- raise FetchError("Unable to fetch URL %s from any source." % u)
-
- ud.localpath = localpath
-
- if os.path.exists(ud.md5):
- # Touch the md5 file to show active use of the download
- try:
- os.utime(ud.md5, None)
- except:
- # Errors aren't fatal here
- pass
- else:
- # Only check the checksums if we've not seen this item before
- verify_checksum(u, ud, d)
- Fetch.write_md5sum(u, ud, d)
-
- bb.utils.unlockfile(lf)
-
-def checkstatus(d, urls = None):
- """
- Check all urls exist upstream
- init must have previously been called
- """
- urldata = init([], d, True)
-
- if not urls:
- urls = urldata
-
- for u in urls:
- ud = urldata[u]
- m = ud.method
- logger.debug(1, "Testing URL %s", u)
- # First try checking uri, u, from PREMIRRORS
- mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', d, True))
- ret = try_mirrors(d, u, mirrors, True)
- if not ret:
- # Next try checking from the original uri, u
- try:
- ret = m.checkstatus(u, ud, d)
- except:
- # Finally, try checking uri, u, from MIRRORS
- mirrors = mirror_from_string(bb.data.getVar('MIRRORS', d, True))
- ret = try_mirrors (d, u, mirrors, True)
-
- if not ret:
- raise FetchError("URL %s doesn't work" % u)
-
-def localpaths(d):
- """
- Return a list of the local filenames, assuming successful fetch
- """
- local = []
- urldata = init([], d, True)
-
- for u in urldata:
- ud = urldata[u]
- local.append(ud.localpath)
-
- return local
-
-srcrev_internal_call = False
-
-def get_autorev(d):
- return get_srcrev(d)
-
-def get_srcrev(d):
- """
- Return the version string for the current package
- (usually to be used as PV)
- Most packages usually only have one SCM so we just pass on the call.
- In the multi SCM case, we build a value based on SRCREV_FORMAT which must
- have been set.
- """
-
- #
- # Ugly code alert. localpath in the fetchers will try to evaluate SRCREV which
- # could translate into a call to here. If it does, we need to catch this
- # and provide some way so it knows get_srcrev is active instead of being
- # some number etc. hence the srcrev_internal_call tracking and the magic
- # "SRCREVINACTION" return value.
- #
- # Neater solutions welcome!
- #
- if bb.fetch.srcrev_internal_call:
- return "SRCREVINACTION"
-
- scms = []
-
- # Only call setup_localpath on URIs which supports_srcrev()
- urldata = init(bb.data.getVar('SRC_URI', d, 1).split(), d, False)
- for u in urldata:
- ud = urldata[u]
- if ud.method.supports_srcrev():
- if not ud.setup:
- ud.setup_localpath(d)
- scms.append(u)
-
- if len(scms) == 0:
- logger.error("SRCREV was used yet no valid SCM was found in SRC_URI")
- raise ParameterError
-
- if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
- bb.data.setVar('__BB_DONT_CACHE', '1', d)
-
- if len(scms) == 1:
- return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d)
-
- #
- # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
- #
- format = bb.data.getVar('SRCREV_FORMAT', d, 1)
- if not format:
- logger.error("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
- raise ParameterError
-
- for scm in scms:
- if 'name' in urldata[scm].parm:
- name = urldata[scm].parm["name"]
- rev = urldata[scm].method.sortable_revision(scm, urldata[scm], d)
- format = format.replace(name, rev)
-
- return format
-
-def localpath(url, d, cache = True):
- """
- Called from the parser with cache=False since the cache isn't ready
- at this point. Also called from classed in OE e.g. patch.bbclass
- """
- ud = init([url], d)
- if ud[url].method:
- return ud[url].localpath
- return url
-
-def runfetchcmd(cmd, d, quiet = False):
- """
- Run cmd returning the command output
- Raise an error if interrupted or cmd fails
- Optionally echo command output to stdout
- """
-
- # Need to export PATH as binary could be in metadata paths
- # rather than host provided
- # Also include some other variables.
- # FIXME: Should really include all export varaiables?
- exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
- 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
- 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
- 'KRB5CCNAME', 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
-
- for var in exportvars:
- val = data.getVar(var, d, True)
- if val:
- cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
-
- logger.debug(1, "Running %s", cmd)
-
- # redirect stderr to stdout
- stdout_handle = os.popen(cmd + " 2>&1", "r")
- output = ""
-
- while True:
- line = stdout_handle.readline()
- if not line:
- break
- if not quiet:
- print(line, end=' ')
- output += line
-
- status = stdout_handle.close() or 0
- signal = status >> 8
- exitstatus = status & 0xff
-
- if signal:
- raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
- elif status != 0:
- raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
-
- return output
-
-def try_mirrors(d, uri, mirrors, check = False, force = False):
- """
- Try to use a mirrored version of the sources.
- This method will be automatically called before the fetchers go.
-
- d Is a bb.data instance
- uri is the original uri we're trying to download
- mirrors is the list of mirrors we're going to try
- """
- fpath = os.path.join(data.getVar("DL_DIR", d, 1), os.path.basename(uri))
- if not check and os.access(fpath, os.R_OK) and not force:
- logger.debug(1, "%s already exists, skipping checkout.", fpath)
- return fpath
-
- ld = d.createCopy()
- for (find, replace) in mirrors:
- newuri = uri_replace(uri, find, replace, ld)
- if newuri != uri:
- try:
- ud = FetchData(newuri, ld)
- except bb.fetch.NoMethodError:
- logger.debug(1, "No method for %s", uri)
- continue
-
- ud.setup_localpath(ld)
-
- try:
- if check:
- found = ud.method.checkstatus(newuri, ud, ld)
- if found:
- return found
- else:
- ud.method.go(newuri, ud, ld)
- return ud.localpath
- except (bb.fetch.MissingParameterError,
- bb.fetch.FetchError,
- bb.fetch.MD5SumError):
- import sys
- (type, value, traceback) = sys.exc_info()
- logger.debug(2, "Mirror fetch failure: %s", value)
- bb.utils.remove(ud.localpath)
- continue
- return None
-
-
-class FetchData(object):
- """
- A class which represents the fetcher state for a given URI.
- """
- def __init__(self, url, d):
- self.localfile = ""
- (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
- self.date = Fetch.getSRCDate(self, d)
- self.url = url
- if not self.user and "user" in self.parm:
- self.user = self.parm["user"]
- if not self.pswd and "pswd" in self.parm:
- self.pswd = self.parm["pswd"]
- self.setup = False
-
- if "name" in self.parm:
- self.md5_name = "%s.md5sum" % self.parm["name"]
- self.sha256_name = "%s.sha256sum" % self.parm["name"]
- else:
- self.md5_name = "md5sum"
- self.sha256_name = "sha256sum"
- self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
- self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
-
- for m in methods:
- if m.supports(url, self, d):
- self.method = m
- return
- raise NoMethodError("Missing implementation for url %s" % url)
-
- def setup_localpath(self, d):
- self.setup = True
- if "localpath" in self.parm:
- # if user sets localpath for file, use it instead.
- self.localpath = self.parm["localpath"]
- self.basename = os.path.basename(self.localpath)
- else:
- premirrors = bb.data.getVar('PREMIRRORS', d, True)
- local = ""
- if premirrors and self.url:
- aurl = self.url.split(";")[0]
- mirrors = mirror_from_string(premirrors)
- for (find, replace) in mirrors:
- if replace.startswith("file://"):
- path = aurl.split("://")[1]
- path = path.split(";")[0]
- local = replace.split("://")[1] + os.path.basename(path)
- if local == aurl or not os.path.exists(local) or os.path.isdir(local):
- local = ""
- self.localpath = local
- if not local:
- try:
- bb.fetch.srcrev_internal_call = True
- self.localpath = self.method.localpath(self.url, self, d)
- finally:
- bb.fetch.srcrev_internal_call = False
- # We have to clear data's internal caches since the cached value of SRCREV is now wrong.
- # Horrible...
- bb.data.delVar("ISHOULDNEVEREXIST", d)
-
- if self.localpath is not None:
- # Note: These files should always be in DL_DIR whereas localpath may not be.
- basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
- self.md5 = basepath + '.md5'
- self.lockfile = basepath + '.lock'
-
-
-class Fetch(object):
- """Base class for 'fetch'ing data"""
-
- def __init__(self, urls = []):
- self.urls = []
-
- def supports(self, url, urldata, d):
- """
- Check to see if this fetch class supports a given url.
- """
- return 0
-
- def localpath(self, url, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- Can also setup variables in urldata for use in go (saving code duplication
- and duplicate code execution)
- """
- return url
- def _strip_leading_slashes(self, relpath):
- """
- Remove leading slash as os.path.join can't cope
- """
- while os.path.isabs(relpath):
- relpath = relpath[1:]
- return relpath
-
- def setUrls(self, urls):
- self.__urls = urls
-
- def getUrls(self):
- return self.__urls
-
- urls = property(getUrls, setUrls, None, "Urls property")
-
- def forcefetch(self, url, urldata, d):
- """
- Force a fetch, even if localpath exists?
- """
- return False
-
- def supports_srcrev(self):
- """
- The fetcher supports auto source revisions (SRCREV)
- """
- return False
-
- def go(self, url, urldata, d):
- """
- Fetch urls
- Assumes localpath was called first
- """
- raise NoMethodError("Missing implementation for url")
-
- def try_premirror(self, url, urldata, d):
- """
- Should premirrors be used?
- """
- if urldata.method.forcefetch(url, urldata, d):
- return True
- elif os.path.exists(urldata.md5) and os.path.exists(urldata.localfile):
- return False
- else:
- return True
-
- def checkstatus(self, url, urldata, d):
- """
- Check the status of a URL
- Assumes localpath was called first
- """
- logger.info("URL %s could not be checked for status since no method exists.", url)
- return True
-
- def getSRCDate(urldata, d):
- """
- Return the SRC Date for the component
-
- d the bb.data module
- """
- if "srcdate" in urldata.parm:
- return urldata.parm['srcdate']
-
- pn = data.getVar("PN", d, 1)
-
- if pn:
- return data.getVar("SRCDATE_%s" % pn, d, 1) or data.getVar("CVSDATE_%s" % pn, d, 1) or data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
-
- return data.getVar("SRCDATE", d, 1) or data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1)
- getSRCDate = staticmethod(getSRCDate)
-
- def srcrev_internal_helper(ud, d):
- """
- Return:
- a) a source revision if specified
- b) True if auto srcrev is in action
- c) False otherwise
- """
-
- if 'rev' in ud.parm:
- return ud.parm['rev']
-
- if 'tag' in ud.parm:
- return ud.parm['tag']
-
- rev = None
- if 'name' in ud.parm:
- pn = data.getVar("PN", d, 1)
- rev = data.getVar("SRCREV_%s_pn-%s" % (ud.parm['name'], pn), d, 1)
- if not rev:
- rev = data.getVar("SRCREV_pn-%s_%s" % (pn, ud.parm['name']), d, 1)
- if not rev:
- rev = data.getVar("SRCREV_%s" % (ud.parm['name']), d, 1)
- if not rev:
- rev = data.getVar("SRCREV", d, 1)
- if rev == "INVALID":
- raise InvalidSRCREV("Please set SRCREV to a valid value")
- if not rev:
- return False
- if rev == "SRCREVINACTION":
- return True
- return rev
-
- srcrev_internal_helper = staticmethod(srcrev_internal_helper)
-
- def localcount_internal_helper(ud, d):
- """
- Return:
- a) a locked localcount if specified
- b) None otherwise
- """
-
- localcount = None
- if 'name' in ud.parm:
- pn = data.getVar("PN", d, 1)
- localcount = data.getVar("LOCALCOUNT_" + ud.parm['name'], d, 1)
- if not localcount:
- localcount = data.getVar("LOCALCOUNT", d, 1)
- return localcount
-
- localcount_internal_helper = staticmethod(localcount_internal_helper)
-
- def verify_md5sum(ud, got_sum):
- """
- Verify the md5sum we wanted with the one we got
- """
- wanted_sum = ud.parm.get('md5sum')
- if not wanted_sum:
- return True
-
- return wanted_sum == got_sum
- verify_md5sum = staticmethod(verify_md5sum)
-
- def write_md5sum(url, ud, d):
- md5data = bb.utils.md5_file(ud.localpath)
- # verify the md5sum
- if not Fetch.verify_md5sum(ud, md5data):
- raise MD5SumError(url)
-
- md5out = file(ud.md5, 'w')
- md5out.write(md5data)
- md5out.close()
- write_md5sum = staticmethod(write_md5sum)
-
- def latest_revision(self, url, ud, d):
- """
- Look in the cache for the latest revision, if not present ask the SCM.
- """
- if not hasattr(self, "_latest_revision"):
- raise ParameterError
-
- pd = persist_data.persist(d)
- revs = pd['BB_URI_HEADREVS']
- key = self.generate_revision_key(url, ud, d)
- rev = revs[key]
- if rev != None:
- return str(rev)
-
- revs[key] = rev = self._latest_revision(url, ud, d)
- return rev
-
- def sortable_revision(self, url, ud, d):
- """
-
- """
- if hasattr(self, "_sortable_revision"):
- return self._sortable_revision(url, ud, d)
-
- pd = persist_data.persist(d)
- localcounts = pd['BB_URI_LOCALCOUNT']
- key = self.generate_revision_key(url, ud, d)
-
- latest_rev = self._build_revision(url, ud, d)
- last_rev = localcounts[key + '_rev']
- uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
- count = None
- if uselocalcount:
- count = Fetch.localcount_internal_helper(ud, d)
- if count is None:
- count = localcounts[key + '_count']
-
- if last_rev == latest_rev:
- return str(count + "+" + latest_rev)
-
- buildindex_provided = hasattr(self, "_sortable_buildindex")
- if buildindex_provided:
- count = self._sortable_buildindex(url, ud, d, latest_rev)
-
- if count is None:
- count = "0"
- elif uselocalcount or buildindex_provided:
- count = str(count)
- else:
- count = str(int(count) + 1)
-
- localcounts[key + '_rev'] = latest_rev
- localcounts[key + '_count'] = count
-
- return str(count + "+" + latest_rev)
-
- def generate_revision_key(self, url, ud, d):
- key = self._revision_key(url, ud, d)
- return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
-
-from . import cvs
-from . import git
-from . import local
-from . import svn
-from . import wget
-from . import svk
-from . import ssh
-from . import perforce
-from . import bzr
-from . import hg
-from . import osc
-from . import repo
-
-methods.append(local.Local())
-methods.append(wget.Wget())
-methods.append(svn.Svn())
-methods.append(git.Git())
-methods.append(cvs.Cvs())
-methods.append(svk.Svk())
-methods.append(ssh.SSH())
-methods.append(perforce.Perforce())
-methods.append(bzr.Bzr())
-methods.append(hg.Hg())
-methods.append(osc.Osc())
-methods.append(repo.Repo())
diff --git a/bitbake/lib/bb/fetch/bzr.py b/bitbake/lib/bb/fetch/bzr.py
deleted file mode 100644
index afaf799900..0000000000
--- a/bitbake/lib/bb/fetch/bzr.py
+++ /dev/null
@@ -1,148 +0,0 @@
-"""
-BitBake 'Fetch' implementation for bzr.
-
-"""
-
-# Copyright (C) 2007 Ross Burton
-# Copyright (C) 2007 Richard Purdie
-#
-# Classes for obtaining upstream sources for the
-# BitBake build tools.
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch, FetchError, runfetchcmd, logger
-
-class Bzr(Fetch):
- def supports(self, url, ud, d):
- return ud.type in ['bzr']
-
- def localpath (self, url, ud, d):
-
- # Create paths to bzr checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
-
- revision = Fetch.srcrev_internal_helper(ud, d)
- if revision is True:
- ud.revision = self.latest_revision(url, ud, d)
- elif revision:
- ud.revision = revision
-
- if not ud.revision:
- ud.revision = self.latest_revision(url, ud, d)
-
- ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def _buildbzrcommand(self, ud, d, command):
- """
- Build up an bzr commandline based on ud
- command is "fetch", "update", "revno"
- """
-
- basecmd = data.expand('${FETCHCMD_bzr}', d)
-
- proto = ud.parm.get('proto', 'http')
-
- bzrroot = ud.host + ud.path
-
- options = []
-
- if command is "revno":
- bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- else:
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- if command is "fetch":
- bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- elif command is "update":
- bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid bzr command %s" % command)
-
- return bzrcmd
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
- bzrcmd = self._buildbzrcommand(ud, d, "update")
- logger.debug(1, "BZR Update %s", loc)
- os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
- runfetchcmd(bzrcmd, d)
- else:
- bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
- bzrcmd = self._buildbzrcommand(ud, d, "fetch")
- logger.debug(1, "BZR Checkout %s", loc)
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", bzrcmd)
- runfetchcmd(bzrcmd, d)
-
- os.chdir(ud.pkgdir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
-
- # tar them up to a defined filename
- try:
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d)
- except:
- t, v, tb = sys.exc_info()
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise t, v, tb
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, url, ud, d):
- """
- Return a unique key for the url
- """
- return "bzr:" + ud.pkgdir
-
- def _latest_revision(self, url, ud, d):
- """
- Return the latest upstream revision number
- """
- logger.debug(2, "BZR fetcher hitting network for %s", url)
-
- output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
-
- return output.strip()
-
- def _sortable_revision(self, url, ud, d):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return self._build_revision(url, ud, d)
-
- def _build_revision(self, url, ud, d):
- return ud.revision
diff --git a/bitbake/lib/bb/fetch/cvs.py b/bitbake/lib/bb/fetch/cvs.py
deleted file mode 100644
index 0edb794b04..0000000000
--- a/bitbake/lib/bb/fetch/cvs.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
-#
-
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch, FetchError, MissingParameterError, logger
-
-class Cvs(Fetch):
- """
- Class to fetch a module or modules from cvs repositories
- """
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with cvs.
- """
- return ud.type in ['cvs']
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("cvs method needs a 'module' parameter")
- ud.module = ud.parm["module"]
-
- ud.tag = ud.parm.get('tag', "")
-
- # Override the default date in certain cases
- if 'date' in ud.parm:
- ud.date = ud.parm['date']
- elif ud.tag:
- ud.date = ""
-
- norecurse = ''
- if 'norecurse' in ud.parm:
- norecurse = '_norecurse'
-
- fullpath = ''
- if 'fullpath' in ud.parm:
- fullpath = '_fullpath'
-
- ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def forcefetch(self, url, ud, d):
- if (ud.date == "now"):
- return True
- return False
-
- def go(self, loc, ud, d):
-
- method = ud.parm.get('method', 'pserver')
- localdir = ud.parm.get('localdir', ud.module)
- cvs_port = ud.parm.get('port', '')
-
- cvs_rsh = None
- if method == "ext":
- if "rsh" in ud.parm:
- cvs_rsh = ud.parm["rsh"]
-
- if method == "dir":
- cvsroot = ud.path
- else:
- cvsroot = ":" + method
- cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
- if cvsproxyhost:
- cvsroot += ";proxy=" + cvsproxyhost
- cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
- if cvsproxyport:
- cvsroot += ";proxyport=" + cvsproxyport
- cvsroot += ":" + ud.user
- if ud.pswd:
- cvsroot += ":" + ud.pswd
- cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
-
- options = []
- if 'norecurse' in ud.parm:
- options.append("-l")
- if ud.date:
- # treat YYYYMMDDHHMM specially for CVS
- if len(ud.date) == 12:
- options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
- else:
- options.append("-D \"%s UTC\"" % ud.date)
- if ud.tag:
- options.append("-r %s" % ud.tag)
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- data.setVar('CVSROOT', cvsroot, localdata)
- data.setVar('CVSCOOPTS', " ".join(options), localdata)
- data.setVar('CVSMODULE', ud.module, localdata)
- cvscmd = data.getVar('FETCHCOMMAND', localdata, 1)
- cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1)
-
- if cvs_rsh:
- cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
- cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
-
- # create module directory
- logger.debug(2, "Fetch: checking for module directory")
- pkg = data.expand('${PN}', d)
- pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
- moddir = os.path.join(pkgdir, localdir)
- if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
- logger.info("Update " + loc)
- # update sources there
- os.chdir(moddir)
- myret = os.system(cvsupdatecmd)
- else:
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(pkgdir)
- os.chdir(pkgdir)
- logger.debug(1, "Running %s", cvscmd)
- myret = os.system(cvscmd)
-
- if myret != 0 or not os.access(moddir, os.R_OK):
- try:
- os.rmdir(moddir)
- except OSError:
- pass
- raise FetchError(ud.module)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude 'CVS'"
-
- # tar them up to a defined filename
- if 'fullpath' in ud.parm:
- os.chdir(pkgdir)
- myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir))
- else:
- os.chdir(moddir)
- os.chdir('..')
- myret = os.system("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir)))
-
- if myret != 0:
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise FetchError(ud.module)
diff --git a/bitbake/lib/bb/fetch/git.py b/bitbake/lib/bb/fetch/git.py
deleted file mode 100644
index b37a09743e..0000000000
--- a/bitbake/lib/bb/fetch/git.py
+++ /dev/null
@@ -1,339 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' git implementation
-
-"""
-
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-import bb.persist_data
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import runfetchcmd
-from bb.fetch import logger
-
-class Git(Fetch):
- """Class to fetch a module or modules from git repositories"""
- def init(self, d):
- #
- # Only enable _sortable revision if the key is set
- #
- if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
- self._sortable_buildindex = self._sortable_buildindex_disabled
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with git.
- """
- return ud.type in ['git']
-
- def localpath(self, url, ud, d):
-
- if 'protocol' in ud.parm:
- ud.proto = ud.parm['protocol']
- elif not ud.host:
- ud.proto = 'file'
- else:
- ud.proto = "rsync"
-
- ud.branch = ud.parm.get("branch", "master")
-
- gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
- ud.mirrortarball = 'git_%s.tar.gz' % (gitsrcname)
- ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
-
- tag = Fetch.srcrev_internal_helper(ud, d)
- if tag is True:
- ud.tag = self.latest_revision(url, ud, d)
- elif tag:
- ud.tag = tag
-
- if not ud.tag or ud.tag == "master":
- ud.tag = self.latest_revision(url, ud, d)
-
- subdir = ud.parm.get("subpath", "")
- if subdir != "":
- if subdir.endswith("/"):
- subdir = subdir[:-1]
- subdirpath = os.path.join(ud.path, subdir);
- else:
- subdirpath = ud.path;
-
- if 'fullclone' in ud.parm:
- ud.localfile = ud.mirrortarball
- else:
- ud.localfile = data.expand('git_%s%s_%s.tar.gz' % (ud.host, subdirpath.replace('/', '.'), ud.tag), d)
-
- ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
-
- if 'noclone' in ud.parm:
- ud.localfile = None
- return None
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def forcefetch(self, url, ud, d):
- if 'fullclone' in ud.parm:
- return True
- if 'noclone' in ud.parm:
- return False
- if os.path.exists(ud.localpath):
- return False
- if not self._contains_ref(ud.tag, d):
- return True
- return False
-
- def try_premirror(self, u, ud, d):
- if 'noclone' in ud.parm:
- return False
- if os.path.exists(ud.clonedir):
- return False
- if os.path.exists(ud.localpath):
- return False
-
- return True
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- if ud.user:
- username = ud.user + '@'
- else:
- username = ""
-
- repofile = os.path.join(data.getVar("DL_DIR", d, 1), ud.mirrortarball)
-
-
- coname = '%s' % (ud.tag)
- codir = os.path.join(ud.clonedir, coname)
-
- # If we have no existing clone and no mirror tarball, try and obtain one
- if not os.path.exists(ud.clonedir) and not os.path.exists(repofile):
- try:
- Fetch.try_mirrors(ud.mirrortarball)
- except:
- pass
-
- # If the checkout doesn't exist and the mirror tarball does, extract it
- if not os.path.exists(ud.clonedir) and os.path.exists(repofile):
- bb.mkdirhier(ud.clonedir)
- os.chdir(ud.clonedir)
- runfetchcmd("tar -xzf %s" % (repofile), d)
-
- # If the repo still doesn't exist, fallback to cloning it
- if not os.path.exists(ud.clonedir):
- runfetchcmd("%s clone -n %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
-
- os.chdir(ud.clonedir)
- # Update the checkout if needed
- if not self._contains_ref(ud.tag, d) or 'fullclone' in ud.parm:
- # Remove all but the .git directory
- runfetchcmd("rm * -Rf", d)
- if 'fullclone' in ud.parm:
- runfetchcmd("%s fetch --all" % (ud.basecmd), d)
- else:
- runfetchcmd("%s fetch %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.branch), d)
- runfetchcmd("%s fetch --tags %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
- runfetchcmd("%s prune-packed" % ud.basecmd, d)
- runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
-
- # Generate a mirror tarball if needed
- os.chdir(ud.clonedir)
- mirror_tarballs = data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True)
- if mirror_tarballs != "0" or 'fullclone' in ud.parm:
- logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s %s" % (repofile, os.path.join(".", ".git", "*") ), d)
-
- if 'fullclone' in ud.parm:
- return
-
- if os.path.exists(codir):
- bb.utils.prunedir(codir)
-
- subdir = ud.parm.get("subpath", "")
- if subdir != "":
- if subdir.endswith("/"):
- subdirbase = os.path.basename(subdir[:-1])
- else:
- subdirbase = os.path.basename(subdir)
- else:
- subdirbase = ""
-
- if subdir != "":
- readpathspec = ":%s" % (subdir)
- codir = os.path.join(codir, "git")
- coprefix = os.path.join(codir, subdirbase, "")
- else:
- readpathspec = ""
- coprefix = os.path.join(codir, "git", "")
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- runfetchcmd("%s clone -n %s %s" % (ud.basecmd, ud.clonedir, coprefix), d)
- os.chdir(coprefix)
- runfetchcmd("%s checkout -q -f %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
- else:
- bb.mkdirhier(codir)
- os.chdir(ud.clonedir)
- runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.tag, readpathspec), d)
- runfetchcmd("%s checkout-index -q -f --prefix=%s -a" % (ud.basecmd, coprefix), d)
-
- os.chdir(codir)
- logger.info("Creating tarball of git checkout")
- runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.join(".", "*") ), d)
-
- os.chdir(ud.clonedir)
- bb.utils.prunedir(codir)
-
- def supports_srcrev(self):
- return True
-
- def _contains_ref(self, tag, d):
- basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
- output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
- return output.split()[0] != "0"
-
- def _revision_key(self, url, ud, d, branch=False):
- """
- Return a unique key for the url
- """
- key = 'git:' + ud.host + ud.path.replace('/', '.')
- if branch:
- return key + ud.branch
- else:
- return key
-
- def generate_revision_key(self, url, ud, d, branch=False):
- key = self._revision_key(url, ud, d, branch)
- return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
-
- def _latest_revision(self, url, ud, d):
- """
- Compute the HEAD revision for the url
- """
- if ud.user:
- username = ud.user + '@'
- else:
- username = ""
-
- basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
- cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branch)
- output = runfetchcmd(cmd, d, True)
- if not output:
- raise bb.fetch.FetchError("Fetch command %s gave empty output\n" % (cmd))
- return output.split()[0]
-
- def latest_revision(self, url, ud, d):
- """
- Look in the cache for the latest revision, if not present ask the SCM.
- """
- persisted = bb.persist_data.persist(d)
- revs = persisted['BB_URI_HEADREVS']
-
- key = self.generate_revision_key(url, ud, d, branch=True)
- rev = revs[key]
- if rev is None:
- # Compatibility with old key format, no branch included
- oldkey = self.generate_revision_key(url, ud, d, branch=False)
- rev = revs[oldkey]
- if rev is not None:
- del revs[oldkey]
- else:
- rev = self._latest_revision(url, ud, d)
- revs[key] = rev
-
- return str(rev)
-
- def sortable_revision(self, url, ud, d):
- """
-
- """
- pd = bb.persist_data.persist(d)
- localcounts = pd['BB_URI_LOCALCOUNT']
- key = self.generate_revision_key(url, ud, d, branch=True)
- oldkey = self.generate_revision_key(url, ud, d, branch=False)
-
- latest_rev = self._build_revision(url, ud, d)
- last_rev = localcounts[key + '_rev']
- if last_rev is None:
- last_rev = localcounts[oldkey + '_rev']
- if last_rev is not None:
- del localcounts[oldkey + '_rev']
- localcounts[key + '_rev'] = last_rev
-
- uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
- count = None
- if uselocalcount:
- count = Fetch.localcount_internal_helper(ud, d)
- if count is None:
- count = localcounts[key + '_count']
- if count is None:
- count = localcounts[oldkey + '_count']
- if count is not None:
- del localcounts[oldkey + '_count']
- localcounts[key + '_count'] = count
-
- if last_rev == latest_rev:
- return str(count + "+" + latest_rev)
-
- buildindex_provided = hasattr(self, "_sortable_buildindex")
- if buildindex_provided:
- count = self._sortable_buildindex(url, ud, d, latest_rev)
- if count is None:
- count = "0"
- elif uselocalcount or buildindex_provided:
- count = str(count)
- else:
- count = str(int(count) + 1)
-
- localcounts[key + '_rev'] = latest_rev
- localcounts[key + '_count'] = count
-
- return str(count + "+" + latest_rev)
-
- def _build_revision(self, url, ud, d):
- return ud.tag
-
- def _sortable_buildindex_disabled(self, url, ud, d, rev):
- """
- Return a suitable buildindex for the revision specified. This is done by counting revisions
- using "git rev-list" which may or may not work in different circumstances.
- """
-
- cwd = os.getcwd()
-
- # Check if we have the rev already
-
- if not os.path.exists(ud.clonedir):
- print("no repo")
- self.go(None, ud, d)
- if not os.path.exists(ud.clonedir):
- logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
- return None
-
-
- os.chdir(ud.clonedir)
- if not self._contains_ref(rev, d):
- self.go(None, ud, d)
-
- output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
- os.chdir(cwd)
-
- buildindex = "%s" % output.split()[0]
- logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
- return buildindex
diff --git a/bitbake/lib/bb/fetch/hg.py b/bitbake/lib/bb/fetch/hg.py
deleted file mode 100644
index 3c649a6ad0..0000000000
--- a/bitbake/lib/bb/fetch/hg.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for mercurial DRCS (hg).
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-# Copyright (C) 2007 Robert Schuster
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import MissingParameterError
-from bb.fetch import runfetchcmd
-from bb.fetch import logger
-
-class Hg(Fetch):
- """Class to fetch from mercurial repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with mercurial.
- """
- return ud.type in ['hg']
-
- def forcefetch(self, url, ud, d):
- revTag = ud.parm.get('rev', 'tip')
- return revTag == "tip"
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("hg method needs a 'module' parameter")
-
- ud.module = ud.parm["module"]
-
- # Create paths to mercurial checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- else:
- tag = Fetch.srcrev_internal_helper(ud, d)
- if tag is True:
- ud.revision = self.latest_revision(url, ud, d)
- elif tag:
- ud.revision = tag
- else:
- ud.revision = self.latest_revision(url, ud, d)
-
- ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def _buildhgcommand(self, ud, d, command):
- """
- Build up an hg commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_hg}', d)
-
- proto = ud.parm.get('proto', 'http')
-
- host = ud.host
- if proto == "file":
- host = "/"
- ud.host = "localhost"
-
- if not ud.user:
- hgroot = host + ud.path
- else:
- hgroot = ud.user + "@" + host + ud.path
-
- if command is "info":
- return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
-
- options = [];
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- if command is "fetch":
- cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
- elif command is "pull":
- # do not pass options list; limiting pull to rev causes the local
- # repo not to contain it and immediately following "update" command
- # will crash
- cmd = "%s pull" % (basecmd)
- elif command is "update":
- cmd = "%s update -C %s" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid hg command %s" % command)
-
- return cmd
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
- updatecmd = self._buildhgcommand(ud, d, "pull")
- logger.info("Update " + loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- runfetchcmd(updatecmd, d)
-
- else:
- fetchcmd = self._buildhgcommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", fetchcmd)
- runfetchcmd(fetchcmd, d)
-
- # Even when we clone (fetch), we still need to update as hg's clone
- # won't checkout the specified revision if its on a branch
- updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- runfetchcmd(updatecmd, d)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.hg' --exclude '.hgrags'"
-
- os.chdir(ud.pkgdir)
- try:
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
- except:
- t, v, tb = sys.exc_info()
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise t, v, tb
-
- def supports_srcrev(self):
- return True
-
- def _latest_revision(self, url, ud, d):
- """
- Compute tip revision for the url
- """
- output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
- return output.strip()
-
- def _build_revision(self, url, ud, d):
- return ud.revision
-
- def _revision_key(self, url, ud, d):
- """
- Return a unique key for the url
- """
- return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch/local.py b/bitbake/lib/bb/fetch/local.py
deleted file mode 100644
index 6aa9e45768..0000000000
--- a/bitbake/lib/bb/fetch/local.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import bb
-import bb.utils
-from bb import data
-from bb.fetch import Fetch
-
-class Local(Fetch):
- def supports(self, url, urldata, d):
- """
- Check to see if a given url represents a local fetch.
- """
- return urldata.type in ['file']
-
- def localpath(self, url, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- """
- path = url.split("://")[1]
- path = path.split(";")[0]
- newpath = path
- if path[0] != "/":
- filespath = data.getVar('FILESPATH', d, 1)
- if filespath:
- newpath = bb.utils.which(filespath, path)
- if not newpath:
- filesdir = data.getVar('FILESDIR', d, 1)
- if filesdir:
- newpath = os.path.join(filesdir, path)
- # We don't set localfile as for this fetcher the file is already local!
- return newpath
-
- def go(self, url, urldata, d):
- """Fetch urls (no-op for Local method)"""
- # no need to fetch local files, we'll deal with them in place.
- return 1
-
- def checkstatus(self, url, urldata, d):
- """
- Check the status of the url
- """
- if urldata.localpath.find("*") != -1:
- logger.info("URL %s looks like a glob and was therefore not checked.", url)
- return True
- if os.path.exists(urldata.localpath):
- return True
- return False
diff --git a/bitbake/lib/bb/fetch/osc.py b/bitbake/lib/bb/fetch/osc.py
deleted file mode 100644
index 8e0423d762..0000000000
--- a/bitbake/lib/bb/fetch/osc.py
+++ /dev/null
@@ -1,143 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-Bitbake "Fetch" implementation for osc (Opensuse build service client).
-Based on the svn "Fetch" implementation.
-
-"""
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb import utils
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import MissingParameterError
-from bb.fetch import runfetchcmd
-
-class Osc(Fetch):
- """Class to fetch a module or modules from Opensuse build server
- repositories."""
-
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with osc.
- """
- return ud.type in ['osc']
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("osc method needs a 'module' parameter.")
-
- ud.module = ud.parm["module"]
-
- # Create paths to osc checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
- ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- else:
- pv = data.getVar("PV", d, 0)
- rev = Fetch.srcrev_internal_helper(ud, d)
- if rev and rev != True:
- ud.revision = rev
- else:
- ud.revision = ""
-
- ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def _buildosccommand(self, ud, d, command):
- """
- Build up an ocs commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_osc}', d)
-
- proto = ud.parm.get('proto', 'ocs')
-
- options = []
-
- config = "-c %s" % self.generate_config(ud, d)
-
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- coroot = self._strip_leading_slashes(ud.path)
-
- if command is "fetch":
- osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
- elif command is "update":
- osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
- else:
- raise FetchError("Invalid osc command %s" % command)
-
- return osccmd
-
- def go(self, loc, ud, d):
- """
- Fetch url
- """
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
- oscupdatecmd = self._buildosccommand(ud, d, "update")
- logger.info("Update "+ loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", oscupdatecmd)
- runfetchcmd(oscupdatecmd, d)
- else:
- oscfetchcmd = self._buildosccommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", oscfetchcmd)
- runfetchcmd(oscfetchcmd, d)
-
- os.chdir(os.path.join(ud.pkgdir + ud.path))
- # tar them up to a defined filename
- try:
- runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d)
- except:
- t, v, tb = sys.exc_info()
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise t, v, tb
-
- def supports_srcrev(self):
- return False
-
- def generate_config(self, ud, d):
- """
- Generate a .oscrc to be used for this run.
- """
-
- config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
- bb.utils.remove(config_path)
-
- f = open(config_path, 'w')
- f.write("[general]\n")
- f.write("apisrv = %s\n" % ud.host)
- f.write("scheme = http\n")
- f.write("su-wrapper = su -c\n")
- f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
- f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
- f.write("extra-pkgs = gzip\n")
- f.write("\n")
- f.write("[%s]\n" % ud.host)
- f.write("user = %s\n" % ud.parm["user"])
- f.write("pass = %s\n" % ud.parm["pswd"])
- f.close()
-
- return config_path
diff --git a/bitbake/lib/bb/fetch/perforce.py b/bitbake/lib/bb/fetch/perforce.py
deleted file mode 100644
index 222ed7eaaa..0000000000
--- a/bitbake/lib/bb/fetch/perforce.py
+++ /dev/null
@@ -1,206 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from future_builtins import zip
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import logger
-
-class Perforce(Fetch):
- def supports(self, url, ud, d):
- return ud.type in ['p4']
-
- def doparse(url, d):
- parm = {}
- path = url.split("://")[1]
- delim = path.find("@");
- if delim != -1:
- (user, pswd, host, port) = path.split('@')[0].split(":")
- path = path.split('@')[1]
- else:
- (host, port) = data.getVar('P4PORT', d).split(':')
- user = ""
- pswd = ""
-
- if path.find(";") != -1:
- keys=[]
- values=[]
- plist = path.split(';')
- for item in plist:
- if item.count('='):
- (key, value) = item.split('=')
- keys.append(key)
- values.append(value)
-
- parm = dict(zip(keys, values))
- path = "//" + path.split(';')[0]
- host += ":%s" % (port)
- parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
-
- return host, path, user, pswd, parm
- doparse = staticmethod(doparse)
-
- def getcset(d, depot, host, user, pswd, parm):
- p4opt = ""
- if "cset" in parm:
- return parm["cset"];
- if user:
- p4opt += " -u %s" % (user)
- if pswd:
- p4opt += " -P %s" % (pswd)
- if host:
- p4opt += " -p %s" % (host)
-
- p4date = data.getVar("P4DATE", d, 1)
- if "revision" in parm:
- depot += "#%s" % (parm["revision"])
- elif "label" in parm:
- depot += "@%s" % (parm["label"])
- elif p4date:
- depot += "@%s" % (p4date)
-
- p4cmd = data.getVar('FETCHCOMMAND_p4', d, 1)
- logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
- p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
- cset = p4file.readline().strip()
- logger.debug(1, "READ %s", cset)
- if not cset:
- return -1
-
- return cset.split(' ')[1]
- getcset = staticmethod(getcset)
-
- def localpath(self, url, ud, d):
-
- (host, path, user, pswd, parm) = Perforce.doparse(url, d)
-
- # If a label is specified, we use that as our filename
-
- if "label" in parm:
- ud.localfile = "%s.tar.gz" % (parm["label"])
- return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
-
- base = path
- which = path.find('/...')
- if which != -1:
- base = path[:which]
-
- base = self._strip_leading_slashes(base)
-
- cset = Perforce.getcset(d, path, host, user, pswd, parm)
-
- ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
-
- return os.path.join(data.getVar("DL_DIR", d, 1), ud.localfile)
-
- def go(self, loc, ud, d):
- """
- Fetch urls
- """
-
- (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
-
- if depot.find('/...') != -1:
- path = depot[:depot.find('/...')]
- else:
- path = depot
-
- module = parm.get('module', os.path.basename(path))
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- # Get the p4 command
- p4opt = ""
- if user:
- p4opt += " -u %s" % (user)
-
- if pswd:
- p4opt += " -P %s" % (pswd)
-
- if host:
- p4opt += " -p %s" % (host)
-
- p4cmd = data.getVar('FETCHCOMMAND', localdata, 1)
-
- # create temp directory
- logger.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
- raise FetchError(module)
-
- if "label" in parm:
- depot = "%s@%s" % (depot, parm["label"])
- else:
- cset = Perforce.getcset(d, depot, host, user, pswd, parm)
- depot = "%s@%s" % (depot, cset)
-
- os.chdir(tmpfile)
- logger.info("Fetch " + loc)
- logger.info("%s%s files %s", p4cmd, p4opt, depot)
- p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
-
- if not p4file:
- logger.error("Fetch: unable to get the P4 files from %s", depot)
- raise FetchError(module)
-
- count = 0
-
- for file in p4file:
- list = file.split()
-
- if list[2] == "delete":
- continue
-
- dest = list[0][len(path)+1:]
- where = dest.find("#")
-
- os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
- count = count + 1
-
- if count == 0:
- logger.error("Fetch: No files gathered from the P4 fetch")
- raise FetchError(module)
-
- myret = os.system("tar -czf %s %s" % (ud.localpath, module))
- if myret != 0:
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise FetchError(module)
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch/repo.py b/bitbake/lib/bb/fetch/repo.py
deleted file mode 100644
index 03642e7a0d..0000000000
--- a/bitbake/lib/bb/fetch/repo.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake "Fetch" repo (git) implementation
-
-"""
-
-# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
-#
-# Based on git.py which is:
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import runfetchcmd
-
-class Repo(Fetch):
- """Class to fetch a module or modules from repo (git) repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with repo.
- """
- return ud.type in ["repo"]
-
- def localpath(self, url, ud, d):
- """
- We don"t care about the git rev of the manifests repository, but
- we do care about the manifest to use. The default is "default".
- We also care about the branch or tag to be used. The default is
- "master".
- """
-
- ud.proto = ud.parm.get('protocol', 'git')
- ud.branch = ud.parm.get('branch', 'master')
- ud.manifest = ud.parm.get('manifest', 'default.xml')
- if not ud.manifest.endswith('.xml'):
- ud.manifest += '.xml'
-
- ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
- logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
- return
-
- gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
- repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
- codir = os.path.join(repodir, gitsrcname, ud.manifest)
-
- if ud.user:
- username = ud.user + "@"
- else:
- username = ""
-
- bb.mkdirhier(os.path.join(codir, "repo"))
- os.chdir(os.path.join(codir, "repo"))
- if not os.path.exists(os.path.join(codir, "repo", ".repo")):
- runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
-
- runfetchcmd("repo sync", d)
- os.chdir(codir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.repo' --exclude '.git'"
-
- # Create a cache
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
-
- def supports_srcrev(self):
- return False
-
- def _build_revision(self, url, ud, d):
- return ud.manifest
-
- def _want_sortable_revision(self, url, ud, d):
- return False
diff --git a/bitbake/lib/bb/fetch/ssh.py b/bitbake/lib/bb/fetch/ssh.py
deleted file mode 100644
index 86c76f4e44..0000000000
--- a/bitbake/lib/bb/fetch/ssh.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-'''
-BitBake 'Fetch' implementations
-
-This implementation is for Secure Shell (SSH), and attempts to comply with the
-IETF secsh internet draft:
- http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
-
- Currently does not support the sftp parameters, as this uses scp
- Also does not support the 'fingerprint' connection parameter.
-
-'''
-
-# Copyright (C) 2006 OpenedHand Ltd.
-#
-#
-# Based in part on svk.py:
-# Copyright (C) 2006 Holger Hans Peter Freyther
-# Based on svn.py:
-# Copyright (C) 2003, 2004 Chris Larson
-# Based on functions from the base bb module:
-# Copyright 2003 Holger Schurig
-#
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, os
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-
-
-__pattern__ = re.compile(r'''
- \s* # Skip leading whitespace
- ssh:// # scheme
- ( # Optional username/password block
- (?P<user>\S+) # username
- (:(?P<pass>\S+))? # colon followed by the password (optional)
- )?
- (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
- @
- (?P<host>\S+?) # non-greedy match of the host
- (:(?P<port>[0-9]+))? # colon followed by the port (optional)
- /
- (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
- # and may include the use of '~' to reference the remote home
- # directory
- (?P<sparam>(;[^;]+)*)? # parameters block (optional)
- $
-''', re.VERBOSE)
-
-class SSH(Fetch):
- '''Class to fetch a module or modules via Secure Shell'''
-
- def supports(self, url, urldata, d):
- return __pattern__.match(url) != None
-
- def localpath(self, url, urldata, d):
- m = __pattern__.match(url)
- path = m.group('path')
- host = m.group('host')
- lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
- return lpath
-
- def go(self, url, urldata, d):
- dldir = data.getVar('DL_DIR', d, 1)
-
- m = __pattern__.match(url)
- path = m.group('path')
- host = m.group('host')
- port = m.group('port')
- user = m.group('user')
- password = m.group('pass')
-
- ldir = os.path.join(dldir, host)
- lpath = os.path.join(ldir, os.path.basename(path))
-
- if not os.path.exists(ldir):
- os.makedirs(ldir)
-
- if port:
- port = '-P %s' % port
- else:
- port = ''
-
- if user:
- fr = user
- if password:
- fr += ':%s' % password
- fr += '@%s' % host
- else:
- fr = host
- fr += ':%s' % path
-
-
- import commands
- cmd = 'scp -B -r %s %s %s/' % (
- port,
- commands.mkarg(fr),
- commands.mkarg(ldir)
- )
-
- (exitstatus, output) = commands.getstatusoutput(cmd)
- if exitstatus != 0:
- print(output)
- raise FetchError('Unable to fetch %s' % url)
diff --git a/bitbake/lib/bb/fetch/svk.py b/bitbake/lib/bb/fetch/svk.py
deleted file mode 100644
index 595a9da255..0000000000
--- a/bitbake/lib/bb/fetch/svk.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-This implementation is for svk. It is based on the svn implementation
-
-"""
-
-# Copyright (C) 2006 Holger Hans Peter Freyther
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import MissingParameterError
-from bb.fetch import logger
-
-class Svk(Fetch):
- """Class to fetch a module or modules from svk repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with svk.
- """
- return ud.type in ['svk']
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("svk method needs a 'module' parameter")
- else:
- ud.module = ud.parm["module"]
-
- ud.revision = ud.parm.get('rev', "")
-
- ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def forcefetch(self, url, ud, d):
- return ud.date == "now"
-
- def go(self, loc, ud, d):
- """Fetch urls"""
-
- svkroot = ud.host + ud.path
-
- svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
-
- if ud.revision:
- svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
-
- # create temp directory
- localdata = data.createCopy(d)
- data.update_data(localdata)
- logger.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- logger.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.")
- raise FetchError(ud.module)
-
- # check out sources there
- os.chdir(tmpfile)
- logger.info("Fetch " + loc)
- logger.debug(1, "Running %s", svkcmd)
- myret = os.system(svkcmd)
- if myret != 0:
- try:
- os.rmdir(tmpfile)
- except OSError:
- pass
- raise FetchError(ud.module)
-
- os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
- # tar them up to a defined filename
- myret = os.system("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)))
- if myret != 0:
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise FetchError(ud.module)
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch/svn.py b/bitbake/lib/bb/fetch/svn.py
deleted file mode 100644
index 8f053abf74..0000000000
--- a/bitbake/lib/bb/fetch/svn.py
+++ /dev/null
@@ -1,204 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for svn.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch import Fetch
-from bb.fetch import FetchError
-from bb.fetch import MissingParameterError
-from bb.fetch import runfetchcmd
-from bb.fetch import logger
-
-class Svn(Fetch):
- """Class to fetch a module or modules from svn repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with svn.
- """
- return ud.type in ['svn']
-
- def localpath(self, url, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("svn method needs a 'module' parameter")
-
- ud.module = ud.parm["module"]
-
- # Create paths to svn checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
-
- if 'rev' in ud.parm:
- ud.date = ""
- ud.revision = ud.parm['rev']
- elif 'date' in ud.date:
- ud.date = ud.parm['date']
- ud.revision = ""
- else:
- #
- # ***Nasty hack***
- # If DATE in unexpanded PV, use ud.date (which is set from SRCDATE)
- # Should warn people to switch to SRCREV here
- #
- pv = data.getVar("PV", d, 0)
- if "DATE" in pv:
- ud.revision = ""
- else:
- rev = Fetch.srcrev_internal_helper(ud, d)
- if rev is True:
- ud.revision = self.latest_revision(url, ud, d)
- ud.date = ""
- elif rev:
- ud.revision = rev
- ud.date = ""
- else:
- ud.revision = ""
-
- ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def _buildsvncommand(self, ud, d, command):
- """
- Build up an svn commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_svn}', d)
-
- proto = ud.parm.get('proto', 'svn')
-
- svn_rsh = None
- if proto == "svn+ssh" and "rsh" in ud.parm:
- svn_rsh = ud.parm["rsh"]
-
- svnroot = ud.host + ud.path
-
- # either use the revision, or SRCDATE in braces,
- options = []
-
- if ud.user:
- options.append("--username %s" % ud.user)
-
- if ud.pswd:
- options.append("--password %s" % ud.pswd)
-
- if command is "info":
- svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
- else:
- suffix = ""
- if ud.revision:
- options.append("-r %s" % ud.revision)
- suffix = "@%s" % (ud.revision)
- elif ud.date:
- options.append("-r {%s}" % ud.date)
-
- if command is "fetch":
- svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
- elif command is "update":
- svncmd = "%s update %s" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid svn command %s" % command)
-
- if svn_rsh:
- svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
-
- return svncmd
-
- def go(self, loc, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
- svnupdatecmd = self._buildsvncommand(ud, d, "update")
- logger.info("Update " + loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", svnupdatecmd)
- runfetchcmd(svnupdatecmd, d)
- else:
- svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", svnfetchcmd)
- runfetchcmd(svnfetchcmd, d)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.svn'"
-
- os.chdir(ud.pkgdir)
- # tar them up to a defined filename
- try:
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d)
- except:
- t, v, tb = sys.exc_info()
- try:
- os.unlink(ud.localpath)
- except OSError:
- pass
- raise t, v, tb
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, url, ud, d):
- """
- Return a unique key for the url
- """
- return "svn:" + ud.moddir
-
- def _latest_revision(self, url, ud, d):
- """
- Return the latest upstream revision number
- """
- logger.debug(2, "SVN fetcher hitting network for %s", url)
-
- output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
-
- revision = None
- for line in output.splitlines():
- if "Last Changed Rev" in line:
- revision = line.split(":")[1].strip()
-
- return revision
-
- def _sortable_revision(self, url, ud, d):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return self._build_revision(url, ud, d)
-
- def _build_revision(self, url, ud, d):
- return ud.revision
diff --git a/bitbake/lib/bb/fetch/wget.py b/bitbake/lib/bb/fetch/wget.py
deleted file mode 100644
index 4d4bdfd493..0000000000
--- a/bitbake/lib/bb/fetch/wget.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import logging
-import bb
-import urllib
-from bb import data
-from bb.fetch import Fetch, FetchError, encodeurl, decodeurl, logger, runfetchcmd
-
-class Wget(Fetch):
- """Class to fetch urls via 'wget'"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with wget.
- """
- return ud.type in ['http', 'https', 'ftp']
-
- def localpath(self, url, ud, d):
-
- url = encodeurl([ud.type, ud.host, ud.path, ud.user, ud.pswd, {}])
- ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
-
- return os.path.join(data.getVar("DL_DIR", d, True), ud.localfile)
-
- def go(self, uri, ud, d, checkonly = False):
- """Fetch urls"""
-
- def fetch_uri(uri, ud, d):
- if checkonly:
- fetchcmd = data.getVar("CHECKCOMMAND", d, 1)
- elif os.path.exists(ud.localpath):
- # file exists, but we didnt complete it.. trying again..
- fetchcmd = data.getVar("RESUMECOMMAND", d, 1)
- else:
- fetchcmd = data.getVar("FETCHCOMMAND", d, 1)
-
- uri = uri.split(";")[0]
- uri_decoded = list(decodeurl(uri))
- uri_type = uri_decoded[0]
- uri_host = uri_decoded[1]
-
- fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
- fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
- logger.info("fetch " + uri)
- logger.debug(2, "executing " + fetchcmd)
- runfetchcmd(fetchcmd, d)
-
- # Sanity check since wget can pretend it succeed when it didn't
- # Also, this used to happen if sourceforge sent us to the mirror page
- if not os.path.exists(ud.localpath) and not checkonly:
- logger.debug(2, "The fetch command for %s returned success but %s doesn't exist?...", uri, ud.localpath)
- return False
-
- return True
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- if fetch_uri(uri, ud, localdata):
- return True
-
- raise FetchError(uri)
-
-
- def checkstatus(self, uri, ud, d):
- return self.go(uri, ud, d, True)
diff --git a/bitbake/lib/bb/fetch2/__init__.py b/bitbake/lib/bb/fetch2/__init__.py
deleted file mode 100644
index 4e03fc9884..0000000000
--- a/bitbake/lib/bb/fetch2/__init__.py
+++ /dev/null
@@ -1,1074 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from __future__ import absolute_import
-from __future__ import print_function
-import os, re
-import logging
-import bb
-from bb import data
-from bb import persist_data
-from bb import utils
-
-__version__ = "2"
-
-logger = logging.getLogger("BitBake.Fetcher")
-
-class BBFetchException(Exception):
- """Class all fetch exceptions inherit from"""
- def __init__(self, message):
- self.msg = message
- Exception.__init__(self, message)
-
- def __str__(self):
- return self.msg
-
-class MalformedUrl(BBFetchException):
- """Exception raised when encountering an invalid url"""
- def __init__(self, url):
- msg = "The URL: '%s' is invalid and cannot be interpreted" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = url
-
-class FetchError(BBFetchException):
- """General fetcher exception when something happens incorrectly"""
- def __init__(self, message, url = None):
- msg = "Fetcher failure for URL: '%s'. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
-
-class UnpackError(BBFetchException):
- """General fetcher exception when something happens incorrectly when unpacking"""
- def __init__(self, message, url):
- msg = "Unpack failure for URL: '%s'. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
-
-class NoMethodError(BBFetchException):
- """Exception raised when there is no method to obtain a supplied url or set of urls"""
- def __init__(self, url):
- msg = "Could not find a fetcher which supports the URL: '%s'" % url
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = url
-
-class MissingParameterError(BBFetchException):
- """Exception raised when a fetch method is missing a critical parameter in the url"""
- def __init__(self, missing, url):
- msg = "URL: '%s' is missing the required parameter '%s'" % (url, missing)
- self.url = url
- self.missing = missing
- BBFetchException.__init__(self, msg)
- self.args = (missing, url)
-
-class ParameterError(BBFetchException):
- """Exception raised when a url cannot be proccessed due to invalid parameters."""
- def __init__(self, message, url):
- msg = "URL: '%s' has invalid parameters. %s" % (url, message)
- self.url = url
- BBFetchException.__init__(self, msg)
- self.args = (message, url)
-
-class MD5SumError(BBFetchException):
- """Exception raised when a MD5 checksum of a file does not match for a downloaded file"""
- def __init__(self, path, wanted, got, url):
- msg = "File: '%s' has md5 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
- self.url = url
- self.path = path
- self.wanted = wanted
- self.got = got
- BBFetchException.__init__(self, msg)
- self.args = (path, wanted, got, url)
-
-class SHA256SumError(MD5SumError):
- """Exception raised when a SHA256 checksum of a file does not match for a downloaded file"""
- def __init__(self, path, wanted, got, url):
- msg = "File: '%s' has sha256 checksum %s when %s was expected (from URL: '%s')" % (path, got, wanted, url)
- self.url = url
- self.path = path
- self.wanted = wanted
- self.got = got
- BBFetchException.__init__(self, msg)
- self.args = (path, wanted, got, url)
-
-class NetworkAccess(BBFetchException):
- """Exception raised when network access is disabled but it is required."""
- def __init__(self, url, cmd):
- msg = "Network access disabled through BB_NO_NETWORK but access rquested with command %s (for url %s)" % (cmd, url)
- self.url = url
- self.cmd = cmd
- BBFetchException.__init__(self, msg)
- self.args = (url, cmd)
-
-
-def decodeurl(url):
- """Decodes an URL into the tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url)
- if not m:
- raise MalformedUrl(url)
-
- type = m.group('type')
- location = m.group('location')
- if not location:
- raise MalformedUrl(url)
- user = m.group('user')
- parm = m.group('parm')
-
- locidx = location.find('/')
- if locidx != -1 and type.lower() != 'file':
- host = location[:locidx]
- path = location[locidx:]
- else:
- host = ""
- path = location
- if user:
- m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user)
- if m:
- user = m.group('user')
- pswd = m.group('pswd')
- else:
- user = ''
- pswd = ''
-
- p = {}
- if parm:
- for s in parm.split(';'):
- s1, s2 = s.split('=')
- p[s1] = s2
-
- return (type, host, path, user, pswd, p)
-
-def encodeurl(decoded):
- """Encodes a URL from tokens (scheme, network location, path,
- user, password, parameters).
- """
-
- (type, host, path, user, pswd, p) = decoded
-
- if not path:
- raise MissingParameterError('path', "encoded from the data %s" % str(decoded))
- if not type:
- raise MissingParameterError('type', "encoded from the data %s" % str(decoded))
- url = '%s://' % type
- if user and type != "file":
- url += "%s" % user
- if pswd:
- url += ":%s" % pswd
- url += "@"
- if host and type != "file":
- url += "%s" % host
- url += "%s" % path
- if p:
- for parm in p:
- url += ";%s=%s" % (parm, p[parm])
-
- return url
-
-def uri_replace(ud, uri_find, uri_replace, d):
- if not ud.url or not uri_find or not uri_replace:
- logger.debug(1, "uri_replace: passed an undefined value, not replacing")
- uri_decoded = list(decodeurl(ud.url))
- uri_find_decoded = list(decodeurl(uri_find))
- uri_replace_decoded = list(decodeurl(uri_replace))
- result_decoded = ['', '', '', '', '', {}]
- for i in uri_find_decoded:
- loc = uri_find_decoded.index(i)
- result_decoded[loc] = uri_decoded[loc]
- if isinstance(i, basestring):
- if (re.match(i, uri_decoded[loc])):
- result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc])
- if uri_find_decoded.index(i) == 2:
- if ud.mirrortarball:
- result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.mirrortarball))
- elif ud.localpath:
- result_decoded[loc] = os.path.join(os.path.dirname(result_decoded[loc]), os.path.basename(ud.localpath))
- else:
- return ud.url
- return encodeurl(result_decoded)
-
-methods = []
-urldata_cache = {}
-saved_headrevs = {}
-
-def fetcher_init(d):
- """
- Called to initialize the fetchers once the configuration data is known.
- Calls before this must not hit the cache.
- """
- pd = persist_data.persist(d)
- # When to drop SCM head revisions controlled by user policy
- srcrev_policy = bb.data.getVar('BB_SRCREV_POLICY', d, True) or "clear"
- if srcrev_policy == "cache":
- logger.debug(1, "Keeping SRCREV cache due to cache policy of: %s", srcrev_policy)
- elif srcrev_policy == "clear":
- logger.debug(1, "Clearing SRCREV cache due to cache policy of: %s", srcrev_policy)
- try:
- bb.fetch2.saved_headrevs = pd['BB_URI_HEADREVS'].items()
- except:
- pass
- del pd['BB_URI_HEADREVS']
- else:
- raise FetchError("Invalid SRCREV cache policy of: %s" % srcrev_policy)
-
- for m in methods:
- if hasattr(m, "init"):
- m.init(d)
-
-def fetcher_compare_revisions(d):
- """
- Compare the revisions in the persistant cache with current values and
- return true/false on whether they've changed.
- """
-
- pd = persist_data.persist(d)
- data = pd['BB_URI_HEADREVS'].items()
- data2 = bb.fetch2.saved_headrevs
-
- changed = False
- for key in data:
- if key not in data2 or data2[key] != data[key]:
- logger.debug(1, "%s changed", key)
- changed = True
- return True
- else:
- logger.debug(2, "%s did not change", key)
- return False
-
-def mirror_from_string(data):
- return [ i.split() for i in (data or "").replace('\\n','\n').split('\n') if i ]
-
-def verify_checksum(u, ud, d):
- """
- verify the MD5 and SHA256 checksum for downloaded src
-
- return value:
- - True: checksum matched
- - False: checksum unmatched
-
- if checksum is missing in recipes file, "BB_STRICT_CHECKSUM" decide the return value.
- if BB_STRICT_CHECKSUM = "1" then return false as unmatched, otherwise return true as
- matched
- """
-
- if not ud.type in ["http", "https", "ftp", "ftps"]:
- return
-
- md5data = bb.utils.md5_file(ud.localpath)
- sha256data = bb.utils.sha256_file(ud.localpath)
-
- if (ud.md5_expected == None or ud.sha256_expected == None):
- logger.warn('Missing SRC_URI checksum for %s, consider adding to the recipe:\n'
- 'SRC_URI[%s] = "%s"\nSRC_URI[%s] = "%s"',
- ud.localpath, ud.md5_name, md5data,
- ud.sha256_name, sha256data)
- if bb.data.getVar("BB_STRICT_CHECKSUM", d, True) == "1":
- raise FetchError("No checksum specified for %s." % u, u)
- return
-
- if ud.md5_expected != md5data:
- raise MD5SumError(ud.localpath, ud.md5_expected, md5data, u)
-
- if ud.sha256_expected != sha256data:
- raise SHA256SumError(ud.localpath, ud.sha256_expected, sha256data, u)
-
-def subprocess_setup():
- import signal
- # Python installs a SIGPIPE handler by default. This is usually not what
- # non-Python subprocesses expect.
- # SIGPIPE errors are known issues with gzip/bash
- signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-
-def get_autorev(d):
- # only not cache src rev in autorev case
- if bb.data.getVar('BB_SRCREV_POLICY', d, True) != "cache":
- bb.data.setVar('__BB_DONT_CACHE', '1', d)
- return "AUTOINC"
-
-def get_srcrev(d):
- """
- Return the version string for the current package
- (usually to be used as PV)
- Most packages usually only have one SCM so we just pass on the call.
- In the multi SCM case, we build a value based on SRCREV_FORMAT which must
- have been set.
- """
-
- scms = []
- fetcher = Fetch(bb.data.getVar('SRC_URI', d, True).split(), d)
- urldata = fetcher.ud
- for u in urldata:
- if urldata[u].method.supports_srcrev():
- scms.append(u)
-
- if len(scms) == 0:
- raise FetchError("SRCREV was used yet no valid SCM was found in SRC_URI")
-
- if len(scms) == 1 and len(urldata[scms[0]].names) == 1:
- return urldata[scms[0]].method.sortable_revision(scms[0], urldata[scms[0]], d, urldata[scms[0]].names[0])
-
- #
- # Mutiple SCMs are in SRC_URI so we resort to SRCREV_FORMAT
- #
- format = bb.data.getVar('SRCREV_FORMAT', d, True)
- if not format:
- raise FetchError("The SRCREV_FORMAT variable must be set when multiple SCMs are used.")
-
- for scm in scms:
- ud = urldata[scm]
- for name in ud.names:
- rev = ud.method.sortable_revision(scm, ud, d, name)
- format = format.replace(name, rev)
-
- return format
-
-def localpath(url, d):
- fetcher = bb.fetch2.Fetch([url], d)
- return fetcher.localpath(url)
-
-def runfetchcmd(cmd, d, quiet = False, cleanup = []):
- """
- Run cmd returning the command output
- Raise an error if interrupted or cmd fails
- Optionally echo command output to stdout
- Optionally remove the files/directories listed in cleanup upon failure
- """
-
- # Need to export PATH as binary could be in metadata paths
- # rather than host provided
- # Also include some other variables.
- # FIXME: Should really include all export varaiables?
- exportvars = ['PATH', 'GIT_PROXY_COMMAND', 'GIT_PROXY_HOST',
- 'GIT_PROXY_PORT', 'GIT_CONFIG', 'http_proxy', 'ftp_proxy',
- 'https_proxy', 'no_proxy', 'ALL_PROXY', 'all_proxy',
- 'SSH_AUTH_SOCK', 'SSH_AGENT_PID', 'HOME']
-
- for var in exportvars:
- val = data.getVar(var, d, True)
- if val:
- cmd = 'export ' + var + '=\"%s\"; %s' % (val, cmd)
-
- logger.debug(1, "Running %s", cmd)
-
- # redirect stderr to stdout
- stdout_handle = os.popen(cmd + " 2>&1", "r")
- output = ""
-
- while True:
- line = stdout_handle.readline()
- if not line:
- break
- if not quiet:
- print(line, end=' ')
- output += line
-
- status = stdout_handle.close() or 0
- signal = status >> 8
- exitstatus = status & 0xff
-
- if (signal or status != 0):
- for f in cleanup:
- try:
- bb.utils.remove(f, True)
- except OSError:
- pass
-
- if signal:
- raise FetchError("Fetch command %s failed with signal %s, output:\n%s" % (cmd, signal, output))
- elif status != 0:
- raise FetchError("Fetch command %s failed with exit code %s, output:\n%s" % (cmd, status, output))
-
- return output
-
-def check_network_access(d, info = "", url = None):
- """
- log remote network access, and error if BB_NO_NETWORK is set
- """
- if bb.data.getVar("BB_NO_NETWORK", d, True) == "1":
- raise NetworkAccess(url, info)
- else:
- logger.debug(1, "Fetcher accessed the network with the command %s" % info)
-
-def try_mirrors(d, origud, mirrors, check = False):
- """
- Try to use a mirrored version of the sources.
- This method will be automatically called before the fetchers go.
-
- d Is a bb.data instance
- uri is the original uri we're trying to download
- mirrors is the list of mirrors we're going to try
- """
- ld = d.createCopy()
- for line in mirrors:
- try:
- (find, replace) = line
- except ValueError:
- continue
- newuri = uri_replace(origud, find, replace, ld)
- if newuri == origud.url:
- continue
- try:
- ud = FetchData(newuri, ld)
- ud.setup_localpath(ld)
-
- if check:
- found = ud.method.checkstatus(newuri, ud, ld)
- if found:
- return found
- continue
-
- if ud.method.need_update(newuri, ud, ld):
- ud.method.download(newuri, ud, ld)
- if hasattr(ud.method,"build_mirror_data"):
- ud.method.build_mirror_data(newuri, ud, ld)
-
- if not ud.localpath or not os.path.exists(ud.localpath):
- continue
-
- if ud.localpath == origud.localpath:
- return ud.localpath
-
- # We may be obtaining a mirror tarball which needs further processing by the real fetcher
- # If that tarball is a local file:// we need to provide a symlink to it
- dldir = ld.getVar("DL_DIR", True)
- if os.path.basename(ud.localpath) != os.path.basename(origud.localpath):
- dest = os.path.join(dldir, os.path.basename(ud.localpath))
- if not os.path.exists(dest):
- os.symlink(ud.localpath, dest)
- return None
- # Otherwise the result is a local file:// and we symlink to it
- if not os.path.exists(origud.localpath):
- os.symlink(ud.localpath, origud.localpath)
- return ud.localpath
-
- except bb.fetch2.NetworkAccess:
- raise
-
- except bb.fetch2.BBFetchException as e:
- logger.debug(1, "Mirror fetch failure for url %s (original url: %s)" % (newuri, origud.url))
- logger.debug(1, str(e))
- try:
- if os.path.isfile(ud.localpath):
- bb.utils.remove(ud.localpath)
- except UnboundLocalError:
- pass
- continue
- return None
-
-def srcrev_internal_helper(ud, d, name):
- """
- Return:
- a) a source revision if specified
- b) latest revision if SRCREV="AUTOINC"
- c) None if not specified
- """
-
- if 'rev' in ud.parm:
- return ud.parm['rev']
-
- if 'tag' in ud.parm:
- return ud.parm['tag']
-
- rev = None
- pn = data.getVar("PN", d, True)
- if name != '':
- rev = data.getVar("SRCREV_%s_pn-%s" % (name, pn), d, True)
- if not rev:
- rev = data.getVar("SRCREV_%s" % name, d, True)
- if not rev:
- rev = data.getVar("SRCREV_pn-%s" % pn, d, True)
- if not rev:
- rev = data.getVar("SRCREV", d, True)
- if rev == "INVALID":
- raise FetchError("Please set SRCREV to a valid value", ud.url)
- if rev == "AUTOINC":
- rev = ud.method.latest_revision(ud.url, ud, d, name)
-
- return rev
-
-class FetchData(object):
- """
- A class which represents the fetcher state for a given URI.
- """
- def __init__(self, url, d):
- # localpath is the location of a downloaded result. If not set, the file is local.
- self.donestamp = None
- self.localfile = ""
- self.localpath = None
- self.lockfile = None
- self.mirrortarball = None
- (self.type, self.host, self.path, self.user, self.pswd, self.parm) = decodeurl(data.expand(url, d))
- self.date = self.getSRCDate(d)
- self.url = url
- if not self.user and "user" in self.parm:
- self.user = self.parm["user"]
- if not self.pswd and "pswd" in self.parm:
- self.pswd = self.parm["pswd"]
- self.setup = False
-
- if "name" in self.parm:
- self.md5_name = "%s.md5sum" % self.parm["name"]
- self.sha256_name = "%s.sha256sum" % self.parm["name"]
- else:
- self.md5_name = "md5sum"
- self.sha256_name = "sha256sum"
- self.md5_expected = bb.data.getVarFlag("SRC_URI", self.md5_name, d)
- self.sha256_expected = bb.data.getVarFlag("SRC_URI", self.sha256_name, d)
-
- self.names = self.parm.get("name",'default').split(',')
-
- self.method = None
- for m in methods:
- if m.supports(url, self, d):
- self.method = m
- break
-
- if not self.method:
- raise NoMethodError(url)
-
- if self.method.supports_srcrev():
- self.revisions = {}
- for name in self.names:
- self.revisions[name] = srcrev_internal_helper(self, d, name)
-
- # add compatibility code for non name specified case
- if len(self.names) == 1:
- self.revision = self.revisions[self.names[0]]
-
- if hasattr(self.method, "urldata_init"):
- self.method.urldata_init(self, d)
-
- if "localpath" in self.parm:
- # if user sets localpath for file, use it instead.
- self.localpath = self.parm["localpath"]
- self.basename = os.path.basename(self.localpath)
- elif self.localfile:
- self.localpath = self.method.localpath(self.url, self, d)
-
- if self.localfile and self.localpath:
- # Note: These files should always be in DL_DIR whereas localpath may not be.
- basepath = bb.data.expand("${DL_DIR}/%s" % os.path.basename(self.localpath), d)
- self.donestamp = basepath + '.done'
- self.lockfile = basepath + '.lock'
-
- def setup_localpath(self, d):
- if not self.localpath:
- self.localpath = self.method.localpath(self.url, self, d)
-
- def getSRCDate(self, d):
- """
- Return the SRC Date for the component
-
- d the bb.data module
- """
- if "srcdate" in self.parm:
- return self.parm['srcdate']
-
- pn = data.getVar("PN", d, True)
-
- if pn:
- return data.getVar("SRCDATE_%s" % pn, d, True) or data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True)
-
- return data.getVar("SRCDATE", d, True) or data.getVar("DATE", d, True)
-
-class FetchMethod(object):
- """Base class for 'fetch'ing data"""
-
- def __init__(self, urls = []):
- self.urls = []
-
- def supports(self, url, urldata, d):
- """
- Check to see if this fetch class supports a given url.
- """
- return 0
-
- def localpath(self, url, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- Can also setup variables in urldata for use in go (saving code duplication
- and duplicate code execution)
- """
- return os.path.join(data.getVar("DL_DIR", d, True), urldata.localfile)
-
- def _strip_leading_slashes(self, relpath):
- """
- Remove leading slash as os.path.join can't cope
- """
- while os.path.isabs(relpath):
- relpath = relpath[1:]
- return relpath
-
- def setUrls(self, urls):
- self.__urls = urls
-
- def getUrls(self):
- return self.__urls
-
- urls = property(getUrls, setUrls, None, "Urls property")
-
- def need_update(self, url, ud, d):
- """
- Force a fetch, even if localpath exists?
- """
- if os.path.exists(ud.localpath):
- return False
- return True
-
- def supports_srcrev(self):
- """
- The fetcher supports auto source revisions (SRCREV)
- """
- return False
-
- def download(self, url, urldata, d):
- """
- Fetch urls
- Assumes localpath was called first
- """
- raise NoMethodError(url)
-
- def unpack(self, urldata, rootdir, data):
- import subprocess
- iterate = False
- file = urldata.localpath
-
- try:
- unpack = bb.utils.to_boolean(urldata.parm.get('unpack'), True)
- except ValueError, exc:
- bb.fatal("Invalid value for 'unpack' parameter for %s: %s" %
- (file, urldata.parm.get('unpack')))
-
- dots = file.split(".")
- if dots[-1] in ['gz', 'bz2', 'Z']:
- efile = os.path.join(bb.data.getVar('WORKDIR', data, True),os.path.basename('.'.join(dots[0:-1])))
- else:
- efile = file
- cmd = None
-
- if unpack:
- if file.endswith('.tar'):
- cmd = 'tar x --no-same-owner -f %s' % file
- elif file.endswith('.tgz') or file.endswith('.tar.gz') or file.endswith('.tar.Z'):
- cmd = 'tar xz --no-same-owner -f %s' % file
- elif file.endswith('.tbz') or file.endswith('.tbz2') or file.endswith('.tar.bz2'):
- cmd = 'bzip2 -dc %s | tar x --no-same-owner -f -' % file
- elif file.endswith('.gz') or file.endswith('.Z') or file.endswith('.z'):
- cmd = 'gzip -dc %s > %s' % (file, efile)
- elif file.endswith('.bz2'):
- cmd = 'bzip2 -dc %s > %s' % (file, efile)
- elif file.endswith('.tar.xz'):
- cmd = 'xz -dc %s | tar x --no-same-owner -f -' % file
- elif file.endswith('.xz'):
- cmd = 'xz -dc %s > %s' % (file, efile)
- elif file.endswith('.zip') or file.endswith('.jar'):
- try:
- dos = bb.utils.to_boolean(urldata.parm.get('dos'), False)
- except ValueError, exc:
- bb.fatal("Invalid value for 'dos' parameter for %s: %s" %
- (file, urldata.parm.get('dos')))
- cmd = 'unzip -q -o'
- if dos:
- cmd = '%s -a' % cmd
- cmd = "%s '%s'" % (cmd, file)
- elif file.endswith('.src.rpm') or file.endswith('.srpm'):
- if 'extract' in urldata.parm:
- unpack_file = urldata.parm.get('extract')
- cmd = 'rpm2cpio.sh %s | cpio -i %s' % (file, unpack_file)
- iterate = True
- iterate_file = unpack_file
- else:
- cmd = 'rpm2cpio.sh %s | cpio -i' % (file)
-
- if not unpack or not cmd:
- # If file == dest, then avoid any copies, as we already put the file into dest!
- dest = os.path.join(rootdir, os.path.basename(file))
- if (file != dest) and not (os.path.exists(dest) and os.path.samefile(file, dest)):
- if os.path.isdir(file):
- filesdir = os.path.realpath(bb.data.getVar("FILESDIR", data, True))
- destdir = "."
- if file[0:len(filesdir)] == filesdir:
- destdir = file[len(filesdir):file.rfind('/')]
- destdir = destdir.strip('/')
- if len(destdir) < 1:
- destdir = "."
- elif not os.access("%s/%s" % (rootdir, destdir), os.F_OK):
- os.makedirs("%s/%s" % (rootdir, destdir))
- cmd = 'cp -pPR %s %s/%s/' % (file, rootdir, destdir)
- #cmd = 'tar -cf - -C "%d" -ps . | tar -xf - -C "%s/%s/"' % (file, rootdir, destdir)
- else:
- # The "destdir" handling was specifically done for FILESPATH
- # items. So, only do so for file:// entries.
- if urldata.type == "file" and urldata.path.find("/") != -1:
- destdir = urldata.path.rsplit("/", 1)[0]
- else:
- destdir = "."
- bb.mkdirhier("%s/%s" % (rootdir, destdir))
- cmd = 'cp %s %s/%s/' % (file, rootdir, destdir)
-
- if not cmd:
- return
-
- # Change to subdir before executing command
- save_cwd = os.getcwd();
- os.chdir(rootdir)
- if 'subdir' in urldata.parm:
- newdir = ("%s/%s" % (rootdir, urldata.parm.get('subdir')))
- bb.mkdirhier(newdir)
- os.chdir(newdir)
-
- cmd = "PATH=\"%s\" %s" % (bb.data.getVar('PATH', data, True), cmd)
- bb.note("Unpacking %s to %s/" % (file, os.getcwd()))
- ret = subprocess.call(cmd, preexec_fn=subprocess_setup, shell=True)
-
- os.chdir(save_cwd)
-
- if ret != 0:
- raise UnpackError("Unpack command %s failed with return value %s" % (cmd, ret), urldata.url)
-
- if iterate is True:
- iterate_urldata = urldata
- iterate_urldata.localpath = "%s/%s" % (rootdir, iterate_file)
- self.unpack(urldata, rootdir, data)
-
- return
-
- def clean(self, urldata, d):
- """
- Clean any existing full or partial download
- """
- bb.utils.remove(urldata.localpath)
-
- def try_premirror(self, url, urldata, d):
- """
- Should premirrors be used?
- """
- return True
-
- def checkstatus(self, url, urldata, d):
- """
- Check the status of a URL
- Assumes localpath was called first
- """
- logger.info("URL %s could not be checked for status since no method exists.", url)
- return True
-
- def localcount_internal_helper(ud, d, name):
- """
- Return:
- a) a locked localcount if specified
- b) None otherwise
- """
-
- localcount = None
- if name != '':
- pn = data.getVar("PN", d, True)
- localcount = data.getVar("LOCALCOUNT_" + name, d, True)
- if not localcount:
- localcount = data.getVar("LOCALCOUNT", d, True)
- return localcount
-
- localcount_internal_helper = staticmethod(localcount_internal_helper)
-
- def latest_revision(self, url, ud, d, name):
- """
- Look in the cache for the latest revision, if not present ask the SCM.
- """
- if not hasattr(self, "_latest_revision"):
- raise ParameterError("The fetcher for this URL does not support _latest_revision", url)
-
- pd = persist_data.persist(d)
- revs = pd['BB_URI_HEADREVS']
- key = self.generate_revision_key(url, ud, d, name)
- rev = revs[key]
- if rev != None:
- return str(rev)
-
- revs[key] = rev = self._latest_revision(url, ud, d, name)
- return rev
-
- def sortable_revision(self, url, ud, d, name):
- """
-
- """
- if hasattr(self, "_sortable_revision"):
- return self._sortable_revision(url, ud, d)
-
- pd = persist_data.persist(d)
- localcounts = pd['BB_URI_LOCALCOUNT']
- key = self.generate_revision_key(url, ud, d, name)
-
- latest_rev = self._build_revision(url, ud, d, name)
- last_rev = localcounts[key + '_rev']
- uselocalcount = bb.data.getVar("BB_LOCALCOUNT_OVERRIDE", d, True) or False
- count = None
- if uselocalcount:
- count = FetchMethod.localcount_internal_helper(ud, d, name)
- if count is None:
- count = localcounts[key + '_count'] or "0"
-
- if last_rev == latest_rev:
- return str(count + "+" + latest_rev)
-
- buildindex_provided = hasattr(self, "_sortable_buildindex")
- if buildindex_provided:
- count = self._sortable_buildindex(url, ud, d, latest_rev)
-
- if count is None:
- count = "0"
- elif uselocalcount or buildindex_provided:
- count = str(count)
- else:
- count = str(int(count) + 1)
-
- localcounts[key + '_rev'] = latest_rev
- localcounts[key + '_count'] = count
-
- return str(count + "+" + latest_rev)
-
- def generate_revision_key(self, url, ud, d, name):
- key = self._revision_key(url, ud, d, name)
- return "%s-%s" % (key, bb.data.getVar("PN", d, True) or "")
-
-class Fetch(object):
- def __init__(self, urls, d, cache = True):
- if len(urls) == 0:
- urls = d.getVar("SRC_URI", True).split()
- self.urls = urls
- self.d = d
- self.ud = {}
-
- fn = bb.data.getVar('FILE', d, True)
- if cache and fn in urldata_cache:
- self.ud = urldata_cache[fn]
-
- for url in urls:
- if url not in self.ud:
- self.ud[url] = FetchData(url, d)
-
- if cache:
- urldata_cache[fn] = self.ud
-
- def localpath(self, url):
- if url not in self.urls:
- self.ud[url] = FetchData(url, self.d)
-
- self.ud[url].setup_localpath(self.d)
- return bb.data.expand(self.ud[url].localpath, self.d)
-
- def localpaths(self):
- """
- Return a list of the local filenames, assuming successful fetch
- """
- local = []
-
- for u in self.urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- local.append(ud.localpath)
-
- return local
-
- def download(self, urls = []):
- """
- Fetch all urls
- """
- if len(urls) == 0:
- urls = self.urls
-
- network = bb.data.getVar("BB_NO_NETWORK", self.d, True)
- premirroronly = (bb.data.getVar("BB_FETCH_PREMIRRORONLY", self.d, True) == "1")
-
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- m = ud.method
- localpath = ""
-
- if not ud.localfile:
- continue
-
- lf = bb.utils.lockfile(ud.lockfile)
-
- try:
- bb.data.setVar("BB_NO_NETWORK", network, self.d)
-
- if not m.need_update(u, ud, self.d):
- localpath = ud.localpath
- elif m.try_premirror(u, ud, self.d):
- logger.debug(1, "Trying PREMIRRORS")
- mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
- localpath = try_mirrors(self.d, ud, mirrors, False)
-
- if premirroronly:
- bb.data.setVar("BB_NO_NETWORK", "1", self.d)
-
- if not localpath and m.need_update(u, ud, self.d):
- try:
- logger.debug(1, "Trying Upstream")
- m.download(u, ud, self.d)
- if hasattr(m, "build_mirror_data"):
- m.build_mirror_data(u, ud, self.d)
- localpath = ud.localpath
-
- except bb.fetch2.NetworkAccess:
- raise
-
- except BBFetchException as e:
- logger.debug(1, str(e))
- # Remove any incomplete fetch
- if os.path.isfile(ud.localpath):
- bb.utils.remove(ud.localpath)
- logger.debug(1, "Trying MIRRORS")
- mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
- localpath = try_mirrors (self.d, ud, mirrors)
-
- if not localpath or not os.path.exists(localpath):
- raise FetchError("Unable to fetch URL %s from any source." % u, u)
-
- if os.path.exists(ud.donestamp):
- # Touch the done stamp file to show active use of the download
- try:
- os.utime(ud.donestamp, None)
- except:
- # Errors aren't fatal here
- pass
- else:
- # Only check the checksums if we've not seen this item before, then create the stamp
- verify_checksum(u, ud, self.d)
- open(ud.donestamp, 'w').close()
-
- finally:
- bb.utils.unlockfile(lf)
-
- def checkstatus(self, urls = []):
- """
- Check all urls exist upstream
- """
-
- if len(urls) == 0:
- urls = self.urls
-
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
- m = ud.method
- logger.debug(1, "Testing URL %s", u)
- # First try checking uri, u, from PREMIRRORS
- mirrors = mirror_from_string(bb.data.getVar('PREMIRRORS', self.d, True))
- ret = try_mirrors(self.d, ud, mirrors, True)
- if not ret:
- # Next try checking from the original uri, u
- try:
- ret = m.checkstatus(u, ud, self.d)
- except:
- # Finally, try checking uri, u, from MIRRORS
- mirrors = mirror_from_string(bb.data.getVar('MIRRORS', self.d, True))
- ret = try_mirrors (self.d, ud, mirrors, True)
-
- if not ret:
- raise FetchError("URL %s doesn't work" % u, u)
-
- def unpack(self, root, urls = []):
- """
- Check all urls exist upstream
- """
-
- if len(urls) == 0:
- urls = self.urls
-
- for u in urls:
- ud = self.ud[u]
- ud.setup_localpath(self.d)
-
- if bb.data.expand(self.localpath, self.d) is None:
- continue
-
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
-
- ud.method.unpack(ud, root, self.d)
-
- if ud.lockfile:
- bb.utils.unlockfile(lf)
-
- def clean(self, urls = []):
- """
- Clean files that the fetcher gets or places
- """
-
- if len(urls) == 0:
- urls = self.urls
-
- for url in urls:
- if url not in self.ud:
- self.ud[url] = FetchData(url, d)
- ud = self.ud[url]
- ud.setup_localpath(self.d)
-
- if not ud.localfile or self.localpath is None:
- continue
-
- if ud.lockfile:
- lf = bb.utils.lockfile(ud.lockfile)
-
- ud.method.clean(ud, self.d)
- if ud.donestamp:
- bb.utils.remove(ud.donestamp)
-
- if ud.lockfile:
- bb.utils.unlockfile(lf)
-
-from . import cvs
-from . import git
-from . import local
-from . import svn
-from . import wget
-from . import svk
-from . import ssh
-from . import perforce
-from . import bzr
-from . import hg
-from . import osc
-from . import repo
-
-methods.append(local.Local())
-methods.append(wget.Wget())
-methods.append(svn.Svn())
-methods.append(git.Git())
-methods.append(cvs.Cvs())
-methods.append(svk.Svk())
-methods.append(ssh.SSH())
-methods.append(perforce.Perforce())
-methods.append(bzr.Bzr())
-methods.append(hg.Hg())
-methods.append(osc.Osc())
-methods.append(repo.Repo())
diff --git a/bitbake/lib/bb/fetch2/bzr.py b/bitbake/lib/bb/fetch2/bzr.py
deleted file mode 100644
index 454961eff0..0000000000
--- a/bitbake/lib/bb/fetch2/bzr.py
+++ /dev/null
@@ -1,141 +0,0 @@
-"""
-BitBake 'Fetch' implementation for bzr.
-
-"""
-
-# Copyright (C) 2007 Ross Burton
-# Copyright (C) 2007 Richard Purdie
-#
-# Classes for obtaining upstream sources for the
-# BitBake build tools.
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Bzr(FetchMethod):
- def supports(self, url, ud, d):
- return ud.type in ['bzr']
-
- def urldata_init(self, ud, d):
- """
- init bzr specific variable within url data
- """
- # Create paths to bzr checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${BZRDIR}', d), ud.host, relpath)
-
- if not ud.revision:
- ud.revision = self.latest_revision(ud.url, ud, d)
-
- ud.localfile = data.expand('bzr_%s_%s_%s.tar.gz' % (ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- def _buildbzrcommand(self, ud, d, command):
- """
- Build up an bzr commandline based on ud
- command is "fetch", "update", "revno"
- """
-
- basecmd = data.expand('${FETCHCMD_bzr}', d)
-
- proto = ud.parm.get('proto', 'http')
-
- bzrroot = ud.host + ud.path
-
- options = []
-
- if command is "revno":
- bzrcmd = "%s revno %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- else:
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- if command is "fetch":
- bzrcmd = "%s co %s %s://%s" % (basecmd, " ".join(options), proto, bzrroot)
- elif command is "update":
- bzrcmd = "%s pull %s --overwrite" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid bzr command %s" % command, ud.url)
-
- return bzrcmd
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir), '.bzr'), os.R_OK):
- bzrcmd = self._buildbzrcommand(ud, d, "update")
- logger.debug(1, "BZR Update %s", loc)
- bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- os.chdir(os.path.join (ud.pkgdir, os.path.basename(ud.path)))
- runfetchcmd(bzrcmd, d)
- else:
- bb.utils.remove(os.path.join(ud.pkgdir, os.path.basename(ud.pkgdir)), True)
- bzrcmd = self._buildbzrcommand(ud, d, "fetch")
- bb.fetch2.check_network_access(d, bzrcmd, ud.url)
- logger.debug(1, "BZR Checkout %s", loc)
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", bzrcmd)
- runfetchcmd(bzrcmd, d)
-
- os.chdir(ud.pkgdir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.bzr' --exclude '.bzrtags'"
-
- # tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(ud.pkgdir)), d, cleanup = [ud.localpath])
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, url, ud, d, name):
- """
- Return a unique key for the url
- """
- return "bzr:" + ud.pkgdir
-
- def _latest_revision(self, url, ud, d, name):
- """
- Return the latest upstream revision number
- """
- logger.debug(2, "BZR fetcher hitting network for %s", url)
-
- bb.fetch2.check_network_access(d, self._buildbzrcommand(ud, d, "revno"), ud.url)
-
- output = runfetchcmd(self._buildbzrcommand(ud, d, "revno"), d, True)
-
- return output.strip()
-
- def _sortable_revision(self, url, ud, d):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return self._build_revision(url, ud, d)
-
- def _build_revision(self, url, ud, d):
- return ud.revision
diff --git a/bitbake/lib/bb/fetch2/cvs.py b/bitbake/lib/bb/fetch2/cvs.py
deleted file mode 100644
index 12d11e0d5b..0000000000
--- a/bitbake/lib/bb/fetch2/cvs.py
+++ /dev/null
@@ -1,181 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-#Based on functions from the base bb module, Copyright 2003 Holger Schurig
-#
-
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod, FetchError, MissingParameterError, logger
-from bb.fetch2 import runfetchcmd
-
-class Cvs(FetchMethod):
- """
- Class to fetch a module or modules from cvs repositories
- """
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with cvs.
- """
- return ud.type in ['cvs']
-
- def urldata_init(self, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError("module", ud.url)
- ud.module = ud.parm["module"]
-
- ud.tag = ud.parm.get('tag', "")
-
- # Override the default date in certain cases
- if 'date' in ud.parm:
- ud.date = ud.parm['date']
- elif ud.tag:
- ud.date = ""
-
- norecurse = ''
- if 'norecurse' in ud.parm:
- norecurse = '_norecurse'
-
- fullpath = ''
- if 'fullpath' in ud.parm:
- fullpath = '_fullpath'
-
- ud.localfile = data.expand('%s_%s_%s_%s%s%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.tag, ud.date, norecurse, fullpath), d)
-
- def need_update(self, url, ud, d):
- if (ud.date == "now"):
- return True
- if not os.path.exists(ud.localpath):
- return True
- return False
-
- def download(self, loc, ud, d):
-
- method = ud.parm.get('method', 'pserver')
- localdir = ud.parm.get('localdir', ud.module)
- cvs_port = ud.parm.get('port', '')
-
- cvs_rsh = None
- if method == "ext":
- if "rsh" in ud.parm:
- cvs_rsh = ud.parm["rsh"]
-
- if method == "dir":
- cvsroot = ud.path
- else:
- cvsroot = ":" + method
- cvsproxyhost = data.getVar('CVS_PROXY_HOST', d, True)
- if cvsproxyhost:
- cvsroot += ";proxy=" + cvsproxyhost
- cvsproxyport = data.getVar('CVS_PROXY_PORT', d, True)
- if cvsproxyport:
- cvsroot += ";proxyport=" + cvsproxyport
- cvsroot += ":" + ud.user
- if ud.pswd:
- cvsroot += ":" + ud.pswd
- cvsroot += "@" + ud.host + ":" + cvs_port + ud.path
-
- options = []
- if 'norecurse' in ud.parm:
- options.append("-l")
- if ud.date:
- # treat YYYYMMDDHHMM specially for CVS
- if len(ud.date) == 12:
- options.append("-D \"%s %s:%s UTC\"" % (ud.date[0:8], ud.date[8:10], ud.date[10:12]))
- else:
- options.append("-D \"%s UTC\"" % ud.date)
- if ud.tag:
- options.append("-r %s" % ud.tag)
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- data.setVar('CVSROOT', cvsroot, localdata)
- data.setVar('CVSCOOPTS', " ".join(options), localdata)
- data.setVar('CVSMODULE', ud.module, localdata)
- cvscmd = data.getVar('FETCHCOMMAND', localdata, True)
- cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, True)
-
- if cvs_rsh:
- cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd)
- cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd)
-
- # create module directory
- logger.debug(2, "Fetch: checking for module directory")
- pkg = data.expand('${PN}', d)
- pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
- moddir = os.path.join(pkgdir, localdir)
- if os.access(os.path.join(moddir, 'CVS'), os.R_OK):
- logger.info("Update " + loc)
- bb.fetch2.check_network_access(d, cvsupdatecmd, ud.url)
- # update sources there
- os.chdir(moddir)
- cmd = cvsupdatecmd
- else:
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(pkgdir)
- os.chdir(pkgdir)
- logger.debug(1, "Running %s", cvscmd)
- bb.fetch2.check_network_access(d, cvscmd, ud.url)
- cmd = cvscmd
-
- runfetchcmd(cmd, d, cleanup = [moddir])
-
- if not os.access(moddir, os.R_OK):
- raise FetchError("Directory %s was not readable despite sucessful fetch?!" % moddir, ud.url)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude 'CVS'"
-
- # tar them up to a defined filename
- if 'fullpath' in ud.parm:
- os.chdir(pkgdir)
- cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, localdir)
- else:
- os.chdir(moddir)
- os.chdir('..')
- cmd = "tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.basename(moddir))
-
- runfetchcmd(cmd, d, cleanup = [ud.localpath])
-
- def clean(self, ud, d):
- """ Clean CVS Files and tarballs """
-
- pkg = data.expand('${PN}', d)
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
- pkgdir = os.path.join(data.expand('${CVSDIR}', localdata), pkg)
-
- bb.utils.remove(pkgdir, True)
- bb.utils.remove(ud.localpath)
-
diff --git a/bitbake/lib/bb/fetch2/git.py b/bitbake/lib/bb/fetch2/git.py
deleted file mode 100644
index f2c27e42a7..0000000000
--- a/bitbake/lib/bb/fetch2/git.py
+++ /dev/null
@@ -1,242 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' git implementation
-
-"""
-
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Git(FetchMethod):
- """Class to fetch a module or modules from git repositories"""
- def init(self, d):
- #
- # Only enable _sortable revision if the key is set
- #
- if bb.data.getVar("BB_GIT_CLONE_FOR_SRCREV", d, True):
- self._sortable_buildindex = self._sortable_buildindex_disabled
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with git.
- """
- return ud.type in ['git']
-
- def urldata_init(self, ud, d):
- """
- init git specific variable within url data
- so that the git method like latest_revision() can work
- """
- if 'protocol' in ud.parm:
- ud.proto = ud.parm['protocol']
- elif not ud.host:
- ud.proto = 'file'
- else:
- ud.proto = "rsync"
-
- ud.nocheckout = False
- if 'nocheckout' in ud.parm:
- ud.nocheckout = True
-
- branches = ud.parm.get("branch", "master").split(',')
- if len(branches) != len(ud.names):
- raise bb.fetch2.ParameterError("The number of name and branch parameters is not balanced", ud.url)
- ud.branches = {}
- for name in ud.names:
- branch = branches[ud.names.index(name)]
- ud.branches[name] = branch
-
- gitsrcname = '%s%s' % (ud.host, ud.path.replace('/', '.'))
- ud.mirrortarball = 'git2_%s.tar.gz' % (gitsrcname)
- ud.fullmirror = os.path.join(data.getVar("DL_DIR", d, True), ud.mirrortarball)
- ud.clonedir = os.path.join(data.expand('${GITDIR}', d), gitsrcname)
-
- ud.basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
-
- for name in ud.names:
- # Ensure anything that doesn't look like a sha256 checksum/revision is translated into one
- if not ud.revisions[name] or len(ud.revisions[name]) != 40 or (False in [c in "abcdef0123456789" for c in ud.revisions[name]]):
- ud.revisions[name] = self.latest_revision(ud.url, ud, d, name)
-
- ud.write_tarballs = (data.getVar("BB_GENERATE_MIRROR_TARBALLS", d, True) or "0") != "0"
-
- ud.localfile = ud.clonedir
-
- def localpath(self, url, ud, d):
- return ud.clonedir
-
- def need_update(self, u, ud, d):
- if not os.path.exists(ud.clonedir):
- return True
- os.chdir(ud.clonedir)
- for name in ud.names:
- if not self._contains_ref(ud.revisions[name], d):
- return True
- if ud.write_tarballs and not os.path.exists(ud.fullmirror):
- return True
- return False
-
- def try_premirror(self, u, ud, d):
- # If we don't do this, updating an existing checkout with only premirrors
- # is not possible
- if bb.data.getVar("BB_FETCH_PREMIRRORONLY", d, True) is not None:
- return True
- if os.path.exists(ud.clonedir):
- return False
- return True
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- if ud.user:
- username = ud.user + '@'
- else:
- username = ""
-
- ud.repochanged = not os.path.exists(ud.fullmirror)
-
- # If the checkout doesn't exist and the mirror tarball does, extract it
- if not os.path.exists(ud.clonedir) and os.path.exists(ud.fullmirror):
- bb.mkdirhier(ud.clonedir)
- os.chdir(ud.clonedir)
- runfetchcmd("tar -xzf %s" % (ud.fullmirror), d)
-
- # If the repo still doesn't exist, fallback to cloning it
- if not os.path.exists(ud.clonedir):
- bb.fetch2.check_network_access(d, "git clone --bare %s%s" % (ud.host, ud.path))
- runfetchcmd("%s clone --bare %s://%s%s%s %s" % (ud.basecmd, ud.proto, username, ud.host, ud.path, ud.clonedir), d)
-
- os.chdir(ud.clonedir)
- # Update the checkout if needed
- needupdate = False
- for name in ud.names:
- if not self._contains_ref(ud.revisions[name], d):
- needupdate = True
- if needupdate:
- bb.fetch2.check_network_access(d, "git fetch %s%s" % (ud.host, ud.path), ud.url)
- try:
- runfetchcmd("%s remote prune origin" % ud.basecmd, d)
- runfetchcmd("%s remote rm origin" % ud.basecmd, d)
- except bb.fetch2.FetchError:
- logger.debug(1, "No Origin")
-
- runfetchcmd("%s remote add origin %s://%s%s%s" % (ud.basecmd, ud.proto, username, ud.host, ud.path), d)
- runfetchcmd("%s fetch --all -t" % ud.basecmd, d)
- runfetchcmd("%s prune-packed" % ud.basecmd, d)
- runfetchcmd("%s pack-redundant --all | xargs -r rm" % ud.basecmd, d)
- ud.repochanged = True
-
- def build_mirror_data(self, url, ud, d):
- # Generate a mirror tarball if needed
- if ud.write_tarballs and (ud.repochanged or not os.path.exists(ud.fullmirror)):
- os.chdir(ud.clonedir)
- logger.info("Creating tarball of git repository")
- runfetchcmd("tar -czf %s %s" % (ud.fullmirror, os.path.join(".") ), d)
-
- def unpack(self, ud, destdir, d):
- """ unpack the downloaded src to destdir"""
-
- subdir = ud.parm.get("subpath", "")
- if subdir != "":
- readpathspec = ":%s" % (subdir)
- else:
- readpathspec = ""
-
- destdir = os.path.join(destdir, "git/")
- if os.path.exists(destdir):
- bb.utils.prunedir(destdir)
-
- runfetchcmd("git clone -s -n %s %s" % (ud.clonedir, destdir), d)
- if not ud.nocheckout:
- os.chdir(destdir)
- runfetchcmd("%s read-tree %s%s" % (ud.basecmd, ud.revisions[ud.names[0]], readpathspec), d)
- runfetchcmd("%s checkout-index -q -f -a" % ud.basecmd, d)
- return True
-
- def clean(self, ud, d):
- """ clean the git directory """
-
- bb.utils.remove(ud.localpath, True)
- bb.utils.remove(ud.fullmirror)
-
- def supports_srcrev(self):
- return True
-
- def _contains_ref(self, tag, d):
- basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
- output = runfetchcmd("%s log --pretty=oneline -n 1 %s -- 2> /dev/null | wc -l" % (basecmd, tag), d, quiet=True)
- return output.split()[0] != "0"
-
- def _revision_key(self, url, ud, d, name):
- """
- Return a unique key for the url
- """
- return "git:" + ud.host + ud.path.replace('/', '.') + ud.branches[name]
-
- def _latest_revision(self, url, ud, d, name):
- """
- Compute the HEAD revision for the url
- """
- if ud.user:
- username = ud.user + '@'
- else:
- username = ""
-
- bb.fetch2.check_network_access(d, "git ls-remote %s%s %s" % (ud.host, ud.path, ud.branches[name]))
- basecmd = data.getVar("FETCHCMD_git", d, True) or "git"
- cmd = "%s ls-remote %s://%s%s%s %s" % (basecmd, ud.proto, username, ud.host, ud.path, ud.branches[name])
- output = runfetchcmd(cmd, d, True)
- if not output:
- raise bb.fetch2.FetchError("The command %s gave empty output unexpectedly" % cmd, url)
- return output.split()[0]
-
- def _build_revision(self, url, ud, d, name):
- return ud.revisions[name]
-
- def _sortable_buildindex_disabled(self, url, ud, d, rev):
- """
- Return a suitable buildindex for the revision specified. This is done by counting revisions
- using "git rev-list" which may or may not work in different circumstances.
- """
-
- cwd = os.getcwd()
-
- # Check if we have the rev already
-
- if not os.path.exists(ud.clonedir):
- print("no repo")
- self.download(None, ud, d)
- if not os.path.exists(ud.clonedir):
- logger.error("GIT repository for %s doesn't exist in %s, cannot get sortable buildnumber, using old value", url, ud.clonedir)
- return None
-
-
- os.chdir(ud.clonedir)
- if not self._contains_ref(rev, d):
- self.download(None, ud, d)
-
- output = runfetchcmd("%s rev-list %s -- 2> /dev/null | wc -l" % (ud.basecmd, rev), d, quiet=True)
- os.chdir(cwd)
-
- buildindex = "%s" % output.split()[0]
- logger.debug(1, "GIT repository for %s in %s is returning %s revisions in rev-list before %s", url, ud.clonedir, buildindex, rev)
- return buildindex
diff --git a/bitbake/lib/bb/fetch2/hg.py b/bitbake/lib/bb/fetch2/hg.py
deleted file mode 100644
index 6a56f8d0cd..0000000000
--- a/bitbake/lib/bb/fetch2/hg.py
+++ /dev/null
@@ -1,174 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for mercurial DRCS (hg).
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-# Copyright (C) 2007 Robert Schuster
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Hg(FetchMethod):
- """Class to fetch from mercurial repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with mercurial.
- """
- return ud.type in ['hg']
-
- def urldata_init(self, ud, d):
- """
- init hg specific variable within url data
- """
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
-
- ud.module = ud.parm["module"]
-
- # Create paths to mercurial checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${HGDIR}', d), ud.host, relpath)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- elif not ud.revision:
- ud.revision = self.latest_revision(ud.url, ud, d)
-
- ud.localfile = data.expand('%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- def need_update(self, url, ud, d):
- revTag = ud.parm.get('rev', 'tip')
- if revTag == "tip":
- return True
- if not os.path.exists(ud.localpath):
- return True
- return False
-
- def _buildhgcommand(self, ud, d, command):
- """
- Build up an hg commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_hg}', d)
-
- proto = ud.parm.get('proto', 'http')
-
- host = ud.host
- if proto == "file":
- host = "/"
- ud.host = "localhost"
-
- if not ud.user:
- hgroot = host + ud.path
- else:
- hgroot = ud.user + "@" + host + ud.path
-
- if command is "info":
- return "%s identify -i %s://%s/%s" % (basecmd, proto, hgroot, ud.module)
-
- options = [];
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- if command is "fetch":
- cmd = "%s clone %s %s://%s/%s %s" % (basecmd, " ".join(options), proto, hgroot, ud.module, ud.module)
- elif command is "pull":
- # do not pass options list; limiting pull to rev causes the local
- # repo not to contain it and immediately following "update" command
- # will crash
- cmd = "%s pull" % (basecmd)
- elif command is "update":
- cmd = "%s update -C %s" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid hg command %s" % command, ud.url)
-
- return cmd
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(ud.moddir, '.hg'), os.R_OK):
- updatecmd = self._buildhgcommand(ud, d, "pull")
- logger.info("Update " + loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- bb.fetch2.check_network_access(d, updatecmd, ud.url)
- runfetchcmd(updatecmd, d)
-
- else:
- fetchcmd = self._buildhgcommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", fetchcmd)
- bb.fetch2.check_network_access(d, fetchcmd, ud.url)
- runfetchcmd(fetchcmd, d)
-
- # Even when we clone (fetch), we still need to update as hg's clone
- # won't checkout the specified revision if its on a branch
- updatecmd = self._buildhgcommand(ud, d, "update")
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", updatecmd)
- runfetchcmd(updatecmd, d)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.hg' --exclude '.hgrags'"
-
- os.chdir(ud.pkgdir)
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
-
- def supports_srcrev(self):
- return True
-
- def _latest_revision(self, url, ud, d, name):
- """
- Compute tip revision for the url
- """
- bb.fetch2.check_network_access(d, self._buildhgcommand(ud, d, "info"))
- output = runfetchcmd(self._buildhgcommand(ud, d, "info"), d)
- return output.strip()
-
- def _build_revision(self, url, ud, d):
- return ud.revision
-
- def _revision_key(self, url, ud, d, name):
- """
- Return a unique key for the url
- """
- return "hg:" + ud.moddir
diff --git a/bitbake/lib/bb/fetch2/local.py b/bitbake/lib/bb/fetch2/local.py
deleted file mode 100644
index 77a296ec67..0000000000
--- a/bitbake/lib/bb/fetch2/local.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import bb
-import bb.utils
-from bb import data
-from bb.fetch2 import FetchMethod
-
-class Local(FetchMethod):
- def supports(self, url, urldata, d):
- """
- Check to see if a given url represents a local fetch.
- """
- return urldata.type in ['file']
-
- def urldata_init(self, ud, d):
- # We don't set localfile as for this fetcher the file is already local!
- return
-
- def localpath(self, url, urldata, d):
- """
- Return the local filename of a given url assuming a successful fetch.
- """
- path = url.split("://")[1]
- path = path.split(";")[0]
- newpath = path
- if path[0] != "/":
- filespath = data.getVar('FILESPATH', d, True)
- if filespath:
- newpath = bb.utils.which(filespath, path)
- if not newpath:
- filesdir = data.getVar('FILESDIR', d, True)
- if filesdir:
- newpath = os.path.join(filesdir, path)
- return newpath
-
- def download(self, url, urldata, d):
- """Fetch urls (no-op for Local method)"""
- # no need to fetch local files, we'll deal with them in place.
- return 1
-
- def checkstatus(self, url, urldata, d):
- """
- Check the status of the url
- """
- if urldata.localpath.find("*") != -1:
- logger.info("URL %s looks like a glob and was therefore not checked.", url)
- return True
- if os.path.exists(urldata.localpath):
- return True
- return False
-
- def clean(self, urldata, d):
- return
-
diff --git a/bitbake/lib/bb/fetch2/osc.py b/bitbake/lib/bb/fetch2/osc.py
deleted file mode 100644
index 4bf411c24f..0000000000
--- a/bitbake/lib/bb/fetch2/osc.py
+++ /dev/null
@@ -1,135 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-Bitbake "Fetch" implementation for osc (Opensuse build service client).
-Based on the svn "Fetch" implementation.
-
-"""
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import runfetchcmd
-
-class Osc(FetchMethod):
- """Class to fetch a module or modules from Opensuse build server
- repositories."""
-
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with osc.
- """
- return ud.type in ['osc']
-
- def urldata_init(self, ud, d):
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
-
- ud.module = ud.parm["module"]
-
- # Create paths to osc checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${OSCDIR}', d), ud.host)
- ud.moddir = os.path.join(ud.pkgdir, relpath, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
- else:
- pv = data.getVar("PV", d, 0)
- rev = bb.fetch2.srcrev_internal_helper(ud, d)
- if rev and rev != True:
- ud.revision = rev
- else:
- ud.revision = ""
-
- ud.localfile = data.expand('%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.path.replace('/', '.'), ud.revision), d)
-
- def _buildosccommand(self, ud, d, command):
- """
- Build up an ocs commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_osc}', d)
-
- proto = ud.parm.get('proto', 'ocs')
-
- options = []
-
- config = "-c %s" % self.generate_config(ud, d)
-
- if ud.revision:
- options.append("-r %s" % ud.revision)
-
- coroot = self._strip_leading_slashes(ud.path)
-
- if command is "fetch":
- osccmd = "%s %s co %s/%s %s" % (basecmd, config, coroot, ud.module, " ".join(options))
- elif command is "update":
- osccmd = "%s %s up %s" % (basecmd, config, " ".join(options))
- else:
- raise FetchError("Invalid osc command %s" % command, ud.url)
-
- return osccmd
-
- def download(self, loc, ud, d):
- """
- Fetch url
- """
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(data.expand('${OSCDIR}', d), ud.path, ud.module), os.R_OK):
- oscupdatecmd = self._buildosccommand(ud, d, "update")
- logger.info("Update "+ loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", oscupdatecmd)
- bb.fetch2.check_network_access(d, oscupdatecmd, ud.url)
- runfetchcmd(oscupdatecmd, d)
- else:
- oscfetchcmd = self._buildosccommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", oscfetchcmd)
- bb.fetch2.check_network_access(d, oscfetchcmd, ud.url)
- runfetchcmd(oscfetchcmd, d)
-
- os.chdir(os.path.join(ud.pkgdir + ud.path))
- # tar them up to a defined filename
- runfetchcmd("tar -czf %s %s" % (ud.localpath, ud.module), d, cleanup = [ud.localpath])
-
- def supports_srcrev(self):
- return False
-
- def generate_config(self, ud, d):
- """
- Generate a .oscrc to be used for this run.
- """
-
- config_path = os.path.join(data.expand('${OSCDIR}', d), "oscrc")
- if (os.path.exists(config_path)):
- os.remove(config_path)
-
- f = open(config_path, 'w')
- f.write("[general]\n")
- f.write("apisrv = %s\n" % ud.host)
- f.write("scheme = http\n")
- f.write("su-wrapper = su -c\n")
- f.write("build-root = %s\n" % data.expand('${WORKDIR}', d))
- f.write("urllist = http://moblin-obs.jf.intel.com:8888/build/%(project)s/%(repository)s/%(buildarch)s/:full/%(name)s.rpm\n")
- f.write("extra-pkgs = gzip\n")
- f.write("\n")
- f.write("[%s]\n" % ud.host)
- f.write("user = %s\n" % ud.parm["user"])
- f.write("pass = %s\n" % ud.parm["pswd"])
- f.close()
-
- return config_path
diff --git a/bitbake/lib/bb/fetch2/perforce.py b/bitbake/lib/bb/fetch2/perforce.py
deleted file mode 100644
index 6347834c76..0000000000
--- a/bitbake/lib/bb/fetch2/perforce.py
+++ /dev/null
@@ -1,196 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from future_builtins import zip
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-class Perforce(FetchMethod):
- def supports(self, url, ud, d):
- return ud.type in ['p4']
-
- def doparse(url, d):
- parm = {}
- path = url.split("://")[1]
- delim = path.find("@");
- if delim != -1:
- (user, pswd, host, port) = path.split('@')[0].split(":")
- path = path.split('@')[1]
- else:
- (host, port) = data.getVar('P4PORT', d).split(':')
- user = ""
- pswd = ""
-
- if path.find(";") != -1:
- keys=[]
- values=[]
- plist = path.split(';')
- for item in plist:
- if item.count('='):
- (key, value) = item.split('=')
- keys.append(key)
- values.append(value)
-
- parm = dict(zip(keys, values))
- path = "//" + path.split(';')[0]
- host += ":%s" % (port)
- parm["cset"] = Perforce.getcset(d, path, host, user, pswd, parm)
-
- return host, path, user, pswd, parm
- doparse = staticmethod(doparse)
-
- def getcset(d, depot, host, user, pswd, parm):
- p4opt = ""
- if "cset" in parm:
- return parm["cset"];
- if user:
- p4opt += " -u %s" % (user)
- if pswd:
- p4opt += " -P %s" % (pswd)
- if host:
- p4opt += " -p %s" % (host)
-
- p4date = data.getVar("P4DATE", d, True)
- if "revision" in parm:
- depot += "#%s" % (parm["revision"])
- elif "label" in parm:
- depot += "@%s" % (parm["label"])
- elif p4date:
- depot += "@%s" % (p4date)
-
- p4cmd = data.getVar('FETCHCOMMAND_p4', d, True)
- logger.debug(1, "Running %s%s changes -m 1 %s", p4cmd, p4opt, depot)
- p4file = os.popen("%s%s changes -m 1 %s" % (p4cmd, p4opt, depot))
- cset = p4file.readline().strip()
- logger.debug(1, "READ %s", cset)
- if not cset:
- return -1
-
- return cset.split(' ')[1]
- getcset = staticmethod(getcset)
-
- def urldata_init(self, ud, d):
- (host, path, user, pswd, parm) = Perforce.doparse(ud.url, d)
-
- # If a label is specified, we use that as our filename
-
- if "label" in parm:
- ud.localfile = "%s.tar.gz" % (parm["label"])
- return
-
- base = path
- which = path.find('/...')
- if which != -1:
- base = path[:which]
-
- base = self._strip_leading_slashes(base)
-
- cset = Perforce.getcset(d, path, host, user, pswd, parm)
-
- ud.localfile = data.expand('%s+%s+%s.tar.gz' % (host, base.replace('/', '.'), cset), d)
-
- def download(self, loc, ud, d):
- """
- Fetch urls
- """
-
- (host, depot, user, pswd, parm) = Perforce.doparse(loc, d)
-
- if depot.find('/...') != -1:
- path = depot[:depot.find('/...')]
- else:
- path = depot
-
- module = parm.get('module', os.path.basename(path))
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "p4:%s" % data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- # Get the p4 command
- p4opt = ""
- if user:
- p4opt += " -u %s" % (user)
-
- if pswd:
- p4opt += " -P %s" % (pswd)
-
- if host:
- p4opt += " -p %s" % (host)
-
- p4cmd = data.getVar('FETCHCOMMAND', localdata, True)
-
- # create temp directory
- logger.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oep4.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
-
- if "label" in parm:
- depot = "%s@%s" % (depot, parm["label"])
- else:
- cset = Perforce.getcset(d, depot, host, user, pswd, parm)
- depot = "%s@%s" % (depot, cset)
-
- os.chdir(tmpfile)
- logger.info("Fetch " + loc)
- logger.info("%s%s files %s", p4cmd, p4opt, depot)
- p4file = os.popen("%s%s files %s" % (p4cmd, p4opt, depot))
-
- if not p4file:
- raise FetchError("Fetch: unable to get the P4 files from %s" % depot, loc)
-
- count = 0
-
- for file in p4file:
- list = file.split()
-
- if list[2] == "delete":
- continue
-
- dest = list[0][len(path)+1:]
- where = dest.find("#")
-
- os.system("%s%s print -o %s/%s %s" % (p4cmd, p4opt, module, dest[:where], list[0]))
- count = count + 1
-
- if count == 0:
- logger.error()
- raise FetchError("Fetch: No files gathered from the P4 fetch", loc)
-
- runfetchcmd("tar -czf %s %s" % (ud.localpath, module), d, cleanup = [ud.localpath])
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/repo.py b/bitbake/lib/bb/fetch2/repo.py
deleted file mode 100644
index 54130a8c3b..0000000000
--- a/bitbake/lib/bb/fetch2/repo.py
+++ /dev/null
@@ -1,98 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake "Fetch" repo (git) implementation
-
-"""
-
-# Copyright (C) 2009 Tom Rini <trini@embeddedalley.com>
-#
-# Based on git.py which is:
-#Copyright (C) 2005 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import os
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import runfetchcmd
-
-class Repo(FetchMethod):
- """Class to fetch a module or modules from repo (git) repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with repo.
- """
- return ud.type in ["repo"]
-
- def urldata_init(self, ud, d):
- """
- We don"t care about the git rev of the manifests repository, but
- we do care about the manifest to use. The default is "default".
- We also care about the branch or tag to be used. The default is
- "master".
- """
-
- ud.proto = ud.parm.get('protocol', 'git')
- ud.branch = ud.parm.get('branch', 'master')
- ud.manifest = ud.parm.get('manifest', 'default.xml')
- if not ud.manifest.endswith('.xml'):
- ud.manifest += '.xml'
-
- ud.localfile = data.expand("repo_%s%s_%s_%s.tar.gz" % (ud.host, ud.path.replace("/", "."), ud.manifest, ud.branch), d)
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- if os.access(os.path.join(data.getVar("DL_DIR", d, True), ud.localfile), os.R_OK):
- logger.debug(1, "%s already exists (or was stashed). Skipping repo init / sync.", ud.localpath)
- return
-
- gitsrcname = "%s%s" % (ud.host, ud.path.replace("/", "."))
- repodir = data.getVar("REPODIR", d, True) or os.path.join(data.getVar("DL_DIR", d, True), "repo")
- codir = os.path.join(repodir, gitsrcname, ud.manifest)
-
- if ud.user:
- username = ud.user + "@"
- else:
- username = ""
-
- bb.mkdirhier(os.path.join(codir, "repo"))
- os.chdir(os.path.join(codir, "repo"))
- if not os.path.exists(os.path.join(codir, "repo", ".repo")):
- bb.fetch2.check_network_access(d, "repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), ud.url)
- runfetchcmd("repo init -m %s -b %s -u %s://%s%s%s" % (ud.manifest, ud.branch, ud.proto, username, ud.host, ud.path), d)
-
- bb.fetch2.check_network_access(d, "repo sync %s" % ud.url, ud.url)
- runfetchcmd("repo sync", d)
- os.chdir(codir)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.repo' --exclude '.git'"
-
- # Create a cache
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, os.path.join(".", "*") ), d)
-
- def supports_srcrev(self):
- return False
-
- def _build_revision(self, url, ud, d):
- return ud.manifest
-
- def _want_sortable_revision(self, url, ud, d):
- return False
diff --git a/bitbake/lib/bb/fetch2/ssh.py b/bitbake/lib/bb/fetch2/ssh.py
deleted file mode 100644
index 91ac15faae..0000000000
--- a/bitbake/lib/bb/fetch2/ssh.py
+++ /dev/null
@@ -1,120 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-'''
-BitBake 'Fetch' implementations
-
-This implementation is for Secure Shell (SSH), and attempts to comply with the
-IETF secsh internet draft:
- http://tools.ietf.org/wg/secsh/draft-ietf-secsh-scp-sftp-ssh-uri/
-
- Currently does not support the sftp parameters, as this uses scp
- Also does not support the 'fingerprint' connection parameter.
-
-'''
-
-# Copyright (C) 2006 OpenedHand Ltd.
-#
-#
-# Based in part on svk.py:
-# Copyright (C) 2006 Holger Hans Peter Freyther
-# Based on svn.py:
-# Copyright (C) 2003, 2004 Chris Larson
-# Based on functions from the base bb module:
-# Copyright 2003 Holger Schurig
-#
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, os
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-
-__pattern__ = re.compile(r'''
- \s* # Skip leading whitespace
- ssh:// # scheme
- ( # Optional username/password block
- (?P<user>\S+) # username
- (:(?P<pass>\S+))? # colon followed by the password (optional)
- )?
- (?P<cparam>(;[^;]+)*)? # connection parameters block (optional)
- @
- (?P<host>\S+?) # non-greedy match of the host
- (:(?P<port>[0-9]+))? # colon followed by the port (optional)
- /
- (?P<path>[^;]+) # path on the remote system, may be absolute or relative,
- # and may include the use of '~' to reference the remote home
- # directory
- (?P<sparam>(;[^;]+)*)? # parameters block (optional)
- $
-''', re.VERBOSE)
-
-class SSH(FetchMethod):
- '''Class to fetch a module or modules via Secure Shell'''
-
- def supports(self, url, urldata, d):
- return __pattern__.match(url) != None
-
- def localpath(self, url, urldata, d):
- m = __pattern__.match(urldata.url)
- path = m.group('path')
- host = m.group('host')
- lpath = os.path.join(data.getVar('DL_DIR', d, True), host, os.path.basename(path))
- return lpath
-
- def download(self, url, urldata, d):
- dldir = data.getVar('DL_DIR', d, True)
-
- m = __pattern__.match(url)
- path = m.group('path')
- host = m.group('host')
- port = m.group('port')
- user = m.group('user')
- password = m.group('pass')
-
- ldir = os.path.join(dldir, host)
- lpath = os.path.join(ldir, os.path.basename(path))
-
- if not os.path.exists(ldir):
- os.makedirs(ldir)
-
- if port:
- port = '-P %s' % port
- else:
- port = ''
-
- if user:
- fr = user
- if password:
- fr += ':%s' % password
- fr += '@%s' % host
- else:
- fr = host
- fr += ':%s' % path
-
-
- import commands
- cmd = 'scp -B -r %s %s %s/' % (
- port,
- commands.mkarg(fr),
- commands.mkarg(ldir)
- )
-
- bb.fetch2.check_network_access(d, cmd, urldata.url)
-
- runfetchcmd(cmd, d)
-
diff --git a/bitbake/lib/bb/fetch2/svk.py b/bitbake/lib/bb/fetch2/svk.py
deleted file mode 100644
index 6211cac8d3..0000000000
--- a/bitbake/lib/bb/fetch2/svk.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-This implementation is for svk. It is based on the svn implementation
-
-"""
-
-# Copyright (C) 2006 Holger Hans Peter Freyther
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-class Svk(FetchMethod):
- """Class to fetch a module or modules from svk repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with svk.
- """
- return ud.type in ['svk']
-
- def urldata_init(self, ud, d):
-
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
- else:
- ud.module = ud.parm["module"]
-
- ud.revision = ud.parm.get('rev', "")
-
- ud.localfile = data.expand('%s_%s_%s_%s_%s.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision, ud.date), d)
-
- def need_update(self, url, ud, d):
- if ud.date == "now":
- return True
- if not os.path.exists(ud.localpath):
- return True
- return False
-
- def download(self, loc, ud, d):
- """Fetch urls"""
-
- svkroot = ud.host + ud.path
-
- svkcmd = "svk co -r {%s} %s/%s" % (ud.date, svkroot, ud.module)
-
- if ud.revision:
- svkcmd = "svk co -r %s %s/%s" % (ud.revision, svkroot, ud.module)
-
- # create temp directory
- localdata = data.createCopy(d)
- data.update_data(localdata)
- logger.debug(2, "Fetch: creating temporary directory")
- bb.mkdirhier(data.expand('${WORKDIR}', localdata))
- data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvk.XXXXXX', localdata), localdata)
- tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, True) or "false")
- tmpfile = tmppipe.readline().strip()
- if not tmpfile:
- logger.error()
- raise FetchError("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.", loc)
-
- # check out sources there
- os.chdir(tmpfile)
- logger.info("Fetch " + loc)
- logger.debug(1, "Running %s", svkcmd)
- runfetchcmd(svkcmd, d, cleanup = [tmpfile])
-
- os.chdir(os.path.join(tmpfile, os.path.dirname(ud.module)))
- # tar them up to a defined filename
- runfetchcmd("tar -czf %s %s" % (ud.localpath, os.path.basename(ud.module)), d, cleanup = [ud.localpath])
-
- # cleanup
- bb.utils.prunedir(tmpfile)
diff --git a/bitbake/lib/bb/fetch2/svn.py b/bitbake/lib/bb/fetch2/svn.py
deleted file mode 100644
index ac4fd27e14..0000000000
--- a/bitbake/lib/bb/fetch2/svn.py
+++ /dev/null
@@ -1,180 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementation for svn.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2004 Marcin Juszkiewicz
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import sys
-import logging
-import bb
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import MissingParameterError
-from bb.fetch2 import runfetchcmd
-from bb.fetch2 import logger
-
-class Svn(FetchMethod):
- """Class to fetch a module or modules from svn repositories"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with svn.
- """
- return ud.type in ['svn']
-
- def urldata_init(self, ud, d):
- """
- init svn specific variable within url data
- """
- if not "module" in ud.parm:
- raise MissingParameterError('module', ud.url)
-
- ud.module = ud.parm["module"]
-
- # Create paths to svn checkouts
- relpath = self._strip_leading_slashes(ud.path)
- ud.pkgdir = os.path.join(data.expand('${SVNDIR}', d), ud.host, relpath)
- ud.moddir = os.path.join(ud.pkgdir, ud.module)
-
- if 'rev' in ud.parm:
- ud.revision = ud.parm['rev']
-
- ud.localfile = data.expand('%s_%s_%s_%s_.tar.gz' % (ud.module.replace('/', '.'), ud.host, ud.path.replace('/', '.'), ud.revision), d)
-
- def _buildsvncommand(self, ud, d, command):
- """
- Build up an svn commandline based on ud
- command is "fetch", "update", "info"
- """
-
- basecmd = data.expand('${FETCHCMD_svn}', d)
-
- proto = ud.parm.get('proto', 'svn')
-
- svn_rsh = None
- if proto == "svn+ssh" and "rsh" in ud.parm:
- svn_rsh = ud.parm["rsh"]
-
- svnroot = ud.host + ud.path
-
- options = []
-
- if ud.user:
- options.append("--username %s" % ud.user)
-
- if ud.pswd:
- options.append("--password %s" % ud.pswd)
-
- if command is "info":
- svncmd = "%s info %s %s://%s/%s/" % (basecmd, " ".join(options), proto, svnroot, ud.module)
- else:
- suffix = ""
- if ud.revision:
- options.append("-r %s" % ud.revision)
- suffix = "@%s" % (ud.revision)
-
- if command is "fetch":
- svncmd = "%s co %s %s://%s/%s%s %s" % (basecmd, " ".join(options), proto, svnroot, ud.module, suffix, ud.module)
- elif command is "update":
- svncmd = "%s update %s" % (basecmd, " ".join(options))
- else:
- raise FetchError("Invalid svn command %s" % command, ud.url)
-
- if svn_rsh:
- svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd)
-
- return svncmd
-
- def download(self, loc, ud, d):
- """Fetch url"""
-
- logger.debug(2, "Fetch: checking for module directory '" + ud.moddir + "'")
-
- if os.access(os.path.join(ud.moddir, '.svn'), os.R_OK):
- svnupdatecmd = self._buildsvncommand(ud, d, "update")
- logger.info("Update " + loc)
- # update sources there
- os.chdir(ud.moddir)
- logger.debug(1, "Running %s", svnupdatecmd)
- bb.fetch2.check_network_access(d, svnupdatecmd, ud.url)
- runfetchcmd(svnupdatecmd, d)
- else:
- svnfetchcmd = self._buildsvncommand(ud, d, "fetch")
- logger.info("Fetch " + loc)
- # check out sources there
- bb.mkdirhier(ud.pkgdir)
- os.chdir(ud.pkgdir)
- logger.debug(1, "Running %s", svnfetchcmd)
- bb.fetch2.check_network_access(d, svnfetchcmd, ud.url)
- runfetchcmd(svnfetchcmd, d)
-
- scmdata = ud.parm.get("scmdata", "")
- if scmdata == "keep":
- tar_flags = ""
- else:
- tar_flags = "--exclude '.svn'"
-
- os.chdir(ud.pkgdir)
- # tar them up to a defined filename
- runfetchcmd("tar %s -czf %s %s" % (tar_flags, ud.localpath, ud.module), d, cleanup = [ud.localpath])
-
- def clean(self, ud, d):
- """ Clean SVN specific files and dirs """
-
- bb.utils.remove(ud.localpath)
- bb.utils.remove(ud.moddir, True)
-
-
- def supports_srcrev(self):
- return True
-
- def _revision_key(self, url, ud, d, name):
- """
- Return a unique key for the url
- """
- return "svn:" + ud.moddir
-
- def _latest_revision(self, url, ud, d, name):
- """
- Return the latest upstream revision number
- """
- bb.fetch2.check_network_access(d, self._buildsvncommand(ud, d, "info"))
-
- output = runfetchcmd("LANG=C LC_ALL=C " + self._buildsvncommand(ud, d, "info"), d, True)
-
- revision = None
- for line in output.splitlines():
- if "Last Changed Rev" in line:
- revision = line.split(":")[1].strip()
-
- return revision
-
- def _sortable_revision(self, url, ud, d):
- """
- Return a sortable revision number which in our case is the revision number
- """
-
- return self._build_revision(url, ud, d)
-
- def _build_revision(self, url, ud, d):
- return ud.revision
diff --git a/bitbake/lib/bb/fetch2/wget.py b/bitbake/lib/bb/fetch2/wget.py
deleted file mode 100644
index 7bd027adc5..0000000000
--- a/bitbake/lib/bb/fetch2/wget.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'Fetch' implementations
-
-Classes for obtaining upstream sources for the
-BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-import os
-import logging
-import bb
-import urllib
-from bb import data
-from bb.fetch2 import FetchMethod
-from bb.fetch2 import FetchError
-from bb.fetch2 import encodeurl
-from bb.fetch2 import decodeurl
-from bb.fetch2 import logger
-from bb.fetch2 import runfetchcmd
-
-class Wget(FetchMethod):
- """Class to fetch urls via 'wget'"""
- def supports(self, url, ud, d):
- """
- Check to see if a given url can be fetched with wget.
- """
- return ud.type in ['http', 'https', 'ftp']
-
- def urldata_init(self, ud, d):
-
- ud.basename = os.path.basename(ud.path)
- ud.localfile = data.expand(urllib.unquote(ud.basename), d)
-
- def download(self, uri, ud, d, checkonly = False):
- """Fetch urls"""
-
- def fetch_uri(uri, ud, d):
- if checkonly:
- fetchcmd = data.getVar("CHECKCOMMAND", d, True)
- elif os.path.exists(ud.localpath):
- # file exists, but we didnt complete it.. trying again..
- fetchcmd = data.getVar("RESUMECOMMAND", d, True)
- else:
- fetchcmd = data.getVar("FETCHCOMMAND", d, True)
-
- uri = uri.split(";")[0]
- uri_decoded = list(decodeurl(uri))
- uri_type = uri_decoded[0]
- uri_host = uri_decoded[1]
-
- fetchcmd = fetchcmd.replace("${URI}", uri.split(";")[0])
- fetchcmd = fetchcmd.replace("${FILE}", ud.basename)
- logger.info("fetch " + uri)
- logger.debug(2, "executing " + fetchcmd)
- bb.fetch2.check_network_access(d, fetchcmd)
- runfetchcmd(fetchcmd, d)
-
- # Sanity check since wget can pretend it succeed when it didn't
- # Also, this used to happen if sourceforge sent us to the mirror page
- if not os.path.exists(ud.localpath) and not checkonly:
- raise FetchError("The fetch command returned success for url %s but %s doesn't exist?!" % (uri, ud.localpath), uri)
-
- localdata = data.createCopy(d)
- data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata)
- data.update_data(localdata)
-
- fetch_uri(uri, ud, localdata)
-
- return True
-
- def checkstatus(self, uri, ud, d):
- return self.download(uri, ud, d, True)
diff --git a/bitbake/lib/bb/methodpool.py b/bitbake/lib/bb/methodpool.py
deleted file mode 100644
index 1485b1357d..0000000000
--- a/bitbake/lib/bb/methodpool.py
+++ /dev/null
@@ -1,84 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-#
-# Copyright (C) 2006 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-
-"""
- What is a method pool?
-
- BitBake has a global method scope where .bb, .inc and .bbclass
- files can install methods. These methods are parsed from strings.
- To avoid recompiling and executing these string we introduce
- a method pool to do this task.
-
- This pool will be used to compile and execute the functions. It
- will be smart enough to
-"""
-
-from bb.utils import better_compile, better_exec
-from bb import error
-
-# A dict of modules we have handled
-# it is the number of .bbclasses + x in size
-_parsed_methods = { }
-_parsed_fns = { }
-
-def insert_method(modulename, code, fn):
- """
- Add code of a module should be added. The methods
- will be simply added, no checking will be done
- """
- comp = better_compile(code, modulename, fn )
- better_exec(comp, None, code, fn)
-
- # now some instrumentation
- code = comp.co_names
- for name in code:
- if name in ['None', 'False']:
- continue
- elif name in _parsed_fns and not _parsed_fns[name] == modulename:
- error( "Error Method already seen: %s in' %s' now in '%s'" % (name, _parsed_fns[name], modulename))
- else:
- _parsed_fns[name] = modulename
-
-def check_insert_method(modulename, code, fn):
- """
- Add the code if it wasnt added before. The module
- name will be used for that
-
- Variables:
- @modulename a short name e.g. base.bbclass
- @code The actual python code
- @fn The filename from the outer file
- """
- if not modulename in _parsed_methods:
- return insert_method(modulename, code, fn)
- _parsed_methods[modulename] = 1
-
-def parsed_module(modulename):
- """
- Inform me file xyz was parsed
- """
- return modulename in _parsed_methods
-
-
-def get_parsed_dict():
- """
- shortcut
- """
- return _parsed_methods
diff --git a/bitbake/lib/bb/msg.py b/bitbake/lib/bb/msg.py
deleted file mode 100644
index 1f9ff904af..0000000000
--- a/bitbake/lib/bb/msg.py
+++ /dev/null
@@ -1,200 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'msg' implementation
-
-Message handling infrastructure for bitbake
-
-"""
-
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import sys
-import logging
-import collections
-from itertools import groupby
-import warnings
-import bb
-import bb.event
-
-class BBLogFormatter(logging.Formatter):
- """Formatter which ensures that our 'plain' messages (logging.INFO + 1) are used as is"""
-
- DEBUG3 = logging.DEBUG - 2
- DEBUG2 = logging.DEBUG - 1
- DEBUG = logging.DEBUG
- VERBOSE = logging.INFO - 1
- NOTE = logging.INFO
- PLAIN = logging.INFO + 1
- ERROR = logging.ERROR
- WARNING = logging.WARNING
- CRITICAL = logging.CRITICAL
-
- levelnames = {
- DEBUG3 : 'DEBUG',
- DEBUG2 : 'DEBUG',
- DEBUG : 'DEBUG',
- VERBOSE: 'NOTE',
- NOTE : 'NOTE',
- PLAIN : '',
- WARNING : 'WARNING',
- ERROR : 'ERROR',
- CRITICAL: 'ERROR',
- }
-
- def getLevelName(self, levelno):
- try:
- return self.levelnames[levelno]
- except KeyError:
- self.levelnames[levelno] = value = 'Level %d' % levelno
- return value
-
- def format(self, record):
- record.levelname = self.getLevelName(record.levelno)
- if record.levelno == self.PLAIN:
- return record.getMessage()
- else:
- return logging.Formatter.format(self, record)
-
-class Loggers(dict):
- def __getitem__(self, key):
- if key in self:
- return dict.__getitem__(self, key)
- else:
- log = logging.getLogger("BitBake.%s" % domain._fields[key])
- dict.__setitem__(self, key, log)
- return log
-
-class DebugLevel(dict):
- def __getitem__(self, key):
- if key == "default":
- key = domain.Default
- return get_debug_level(key)
-
-def _NamedTuple(name, fields):
- Tuple = collections.namedtuple(name, " ".join(fields))
- return Tuple(*range(len(fields)))
-
-domain = _NamedTuple("Domain", (
- "Default",
- "Build",
- "Cache",
- "Collection",
- "Data",
- "Depends",
- "Fetcher",
- "Parsing",
- "PersistData",
- "Provider",
- "RunQueue",
- "TaskData",
- "Util"))
-logger = logging.getLogger("BitBake")
-loggers = Loggers()
-debug_level = DebugLevel()
-
-# Message control functions
-#
-
-def set_debug_level(level):
- for log in loggers.itervalues():
- log.setLevel(logging.NOTSET)
-
- if level:
- logger.setLevel(logging.DEBUG - level + 1)
- else:
- logger.setLevel(logging.INFO)
-
-def get_debug_level(msgdomain = domain.Default):
- if not msgdomain:
- level = logger.getEffectiveLevel()
- else:
- level = loggers[msgdomain].getEffectiveLevel()
- return max(0, logging.DEBUG - level + 1)
-
-def set_verbose(level):
- if level:
- logger.setLevel(BBLogFormatter.VERBOSE)
- else:
- logger.setLevel(BBLogFormatter.INFO)
-
-def set_debug_domains(domainargs):
- for (domainarg, iterator) in groupby(domainargs):
- for index, msgdomain in enumerate(domain._fields):
- if msgdomain == domainarg:
- level = len(tuple(iterator))
- if level:
- loggers[index].setLevel(logging.DEBUG - level + 1)
- break
- else:
- warn(None, "Logging domain %s is not valid, ignoring" % domainarg)
-
-#
-# Message handling functions
-#
-
-def debug(level, msgdomain, msg):
- warnings.warn("bb.msg.debug will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- level = logging.DEBUG - (level - 1)
- if not msgdomain:
- logger.debug(level, msg)
- else:
- loggers[msgdomain].debug(level, msg)
-
-def plain(msg):
- warnings.warn("bb.msg.plain will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- logger.plain(msg)
-
-def note(level, msgdomain, msg):
- warnings.warn("bb.msg.note will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- if level > 1:
- if msgdomain:
- logger.verbose(msg)
- else:
- loggers[msgdomain].verbose(msg)
- else:
- if msgdomain:
- logger.info(msg)
- else:
- loggers[msgdomain].info(msg)
-
-def warn(msgdomain, msg):
- warnings.warn("bb.msg.warn will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- if not msgdomain:
- logger.warn(msg)
- else:
- loggers[msgdomain].warn(msg)
-
-def error(msgdomain, msg):
- warnings.warn("bb.msg.error will soon be deprecated in favor of the python 'logging' module",
- PendingDeprecationWarning, stacklevel=2)
- if not msgdomain:
- logger.error(msg)
- else:
- loggers[msgdomain].error(msg)
-
-def fatal(msgdomain, msg):
- warnings.warn("bb.msg.fatal will soon be deprecated in favor of raising appropriate exceptions",
- PendingDeprecationWarning, stacklevel=2)
- if not msgdomain:
- logger.critical(msg)
- else:
- loggers[msgdomain].critical(msg)
- sys.exit(1)
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py
deleted file mode 100644
index eee8d9cddb..0000000000
--- a/bitbake/lib/bb/parse/__init__.py
+++ /dev/null
@@ -1,123 +0,0 @@
-"""
-BitBake Parsers
-
-File parsers for the BitBake build tools.
-
-"""
-
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-handlers = []
-
-import os
-import stat
-import logging
-import bb
-import bb.utils
-import bb.siggen
-
-logger = logging.getLogger("BitBake.Parsing")
-
-class ParseError(Exception):
- """Exception raised when parsing fails"""
-
-class SkipPackage(Exception):
- """Exception raised to skip this package"""
-
-__mtime_cache = {}
-def cached_mtime(f):
- if f not in __mtime_cache:
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- return __mtime_cache[f]
-
-def cached_mtime_noerror(f):
- if f not in __mtime_cache:
- try:
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- except OSError:
- return 0
- return __mtime_cache[f]
-
-def update_mtime(f):
- __mtime_cache[f] = os.stat(f)[stat.ST_MTIME]
- return __mtime_cache[f]
-
-def mark_dependency(d, f):
- if f.startswith('./'):
- f = "%s/%s" % (os.getcwd(), f[2:])
- deps = bb.data.getVar('__depends', d) or set()
- deps.update([(f, cached_mtime(f))])
- bb.data.setVar('__depends', deps, d)
-
-def supports(fn, data):
- """Returns true if we have a handler for this file, false otherwise"""
- for h in handlers:
- if h['supports'](fn, data):
- return 1
- return 0
-
-def handle(fn, data, include = 0):
- """Call the handler that is appropriate for this file"""
- for h in handlers:
- if h['supports'](fn, data):
- return h['handle'](fn, data, include)
- raise ParseError("%s is not a BitBake file" % fn)
-
-def init(fn, data):
- for h in handlers:
- if h['supports'](fn):
- return h['init'](data)
-
-def init_parser(d):
- bb.parse.siggen = bb.siggen.init(d)
-
-def resolve_file(fn, d):
- if not os.path.isabs(fn):
- bbpath = bb.data.getVar("BBPATH", d, True)
- newfn = bb.utils.which(bbpath, fn)
- if not newfn:
- raise IOError("file %s not found in %s" % (fn, bbpath))
- fn = newfn
-
- logger.debug(2, "LOAD %s", fn)
- return fn
-
-# Used by OpenEmbedded metadata
-__pkgsplit_cache__={}
-def vars_from_file(mypkg, d):
- if not mypkg:
- return (None, None, None)
- if mypkg in __pkgsplit_cache__:
- return __pkgsplit_cache__[mypkg]
-
- myfile = os.path.splitext(os.path.basename(mypkg))
- parts = myfile[0].split('_')
- __pkgsplit_cache__[mypkg] = parts
- if len(parts) > 3:
- raise ParseError("Unable to generate default variables from the filename: %s (too many underscores)" % mypkg)
- exp = 3 - len(parts)
- tmplist = []
- while exp != 0:
- exp -= 1
- tmplist.append(None)
- parts.extend(tmplist)
- return parts
-
-from bb.parse.parse_py import __version__, ConfHandler, BBHandler
diff --git a/bitbake/lib/bb/parse/ast.py b/bitbake/lib/bb/parse/ast.py
deleted file mode 100644
index b968db40b3..0000000000
--- a/bitbake/lib/bb/parse/ast.py
+++ /dev/null
@@ -1,446 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
- AbstractSyntaxTree classes for the Bitbake language
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2009 Holger Hans Peter Freyther
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import absolute_import
-from future_builtins import filter
-import re
-import string
-import logging
-import bb
-import itertools
-from bb import methodpool
-from bb.parse import logger
-
-__parsed_methods__ = bb.methodpool.get_parsed_dict()
-_bbversions_re = re.compile(r"\[(?P<from>[0-9]+)-(?P<to>[0-9]+)\]")
-
-class StatementGroup(list):
- def eval(self, data):
- for statement in self:
- statement.eval(data)
-
-class AstNode(object):
- def __init__(self, filename, lineno):
- self.filename = filename
- self.lineno = lineno
-
-class IncludeNode(AstNode):
- def __init__(self, filename, lineno, what_file, force):
- AstNode.__init__(self, filename, lineno)
- self.what_file = what_file
- self.force = force
-
- def eval(self, data):
- """
- Include the file and evaluate the statements
- """
- s = bb.data.expand(self.what_file, data)
- logger.debug(2, "CONF %s:%s: including %s", self.filename, self.lineno, s)
-
- # TODO: Cache those includes... maybe not here though
- if self.force:
- bb.parse.ConfHandler.include(self.filename, s, data, "include required")
- else:
- bb.parse.ConfHandler.include(self.filename, s, data, False)
-
-class ExportNode(AstNode):
- def __init__(self, filename, lineno, var):
- AstNode.__init__(self, filename, lineno)
- self.var = var
-
- def eval(self, data):
- bb.data.setVarFlag(self.var, "export", 1, data)
-
-class DataNode(AstNode):
- """
- Various data related updates. For the sake of sanity
- we have one class doing all this. This means that all
- this need to be re-evaluated... we might be able to do
- that faster with multiple classes.
- """
- def __init__(self, filename, lineno, groupd):
- AstNode.__init__(self, filename, lineno)
- self.groupd = groupd
-
- def getFunc(self, key, data):
- if 'flag' in self.groupd and self.groupd['flag'] != None:
- return bb.data.getVarFlag(key, self.groupd['flag'], data)
- else:
- return bb.data.getVar(key, data)
-
- def eval(self, data):
- groupd = self.groupd
- key = groupd["var"]
- if "exp" in groupd and groupd["exp"] != None:
- bb.data.setVarFlag(key, "export", 1, data)
- if "ques" in groupd and groupd["ques"] != None:
- val = self.getFunc(key, data)
- if val == None:
- val = groupd["value"]
- elif "colon" in groupd and groupd["colon"] != None:
- e = data.createCopy()
- bb.data.update_data(e)
- val = bb.data.expand(groupd["value"], e)
- elif "append" in groupd and groupd["append"] != None:
- val = "%s %s" % ((self.getFunc(key, data) or ""), groupd["value"])
- elif "prepend" in groupd and groupd["prepend"] != None:
- val = "%s %s" % (groupd["value"], (self.getFunc(key, data) or ""))
- elif "postdot" in groupd and groupd["postdot"] != None:
- val = "%s%s" % ((self.getFunc(key, data) or ""), groupd["value"])
- elif "predot" in groupd and groupd["predot"] != None:
- val = "%s%s" % (groupd["value"], (self.getFunc(key, data) or ""))
- else:
- val = groupd["value"]
-
- if 'flag' in groupd and groupd['flag'] != None:
- bb.data.setVarFlag(key, groupd['flag'], val, data)
- elif groupd["lazyques"]:
- bb.data.setVarFlag(key, "defaultval", val, data)
- else:
- bb.data.setVar(key, val, data)
-
-class MethodNode(AstNode):
- def __init__(self, filename, lineno, func_name, body):
- AstNode.__init__(self, filename, lineno)
- self.func_name = func_name
- self.body = body
-
- def eval(self, data):
- if self.func_name == "__anonymous":
- funcname = ("__anon_%s_%s" % (self.lineno, self.filename.translate(string.maketrans('/.+-', '____'))))
- if not funcname in bb.methodpool._parsed_fns:
- text = "def %s(d):\n" % (funcname) + '\n'.join(self.body)
- bb.methodpool.insert_method(funcname, text, self.filename)
- anonfuncs = bb.data.getVar('__BBANONFUNCS', data) or []
- anonfuncs.append(funcname)
- bb.data.setVar('__BBANONFUNCS', anonfuncs, data)
- else:
- bb.data.setVarFlag(self.func_name, "func", 1, data)
- bb.data.setVar(self.func_name, '\n'.join(self.body), data)
-
-class PythonMethodNode(AstNode):
- def __init__(self, filename, lineno, function, define, body):
- AstNode.__init__(self, filename, lineno)
- self.function = function
- self.define = define
- self.body = body
-
- def eval(self, data):
- # Note we will add root to parsedmethods after having parse
- # 'this' file. This means we will not parse methods from
- # bb classes twice
- text = '\n'.join(self.body)
- if not bb.methodpool.parsed_module(self.define):
- bb.methodpool.insert_method(self.define, text, self.filename)
- bb.data.setVarFlag(self.function, "func", 1, data)
- bb.data.setVarFlag(self.function, "python", 1, data)
- bb.data.setVar(self.function, text, data)
-
-class MethodFlagsNode(AstNode):
- def __init__(self, filename, lineno, key, m):
- AstNode.__init__(self, filename, lineno)
- self.key = key
- self.m = m
-
- def eval(self, data):
- if bb.data.getVar(self.key, data):
- # clean up old version of this piece of metadata, as its
- # flags could cause problems
- bb.data.setVarFlag(self.key, 'python', None, data)
- bb.data.setVarFlag(self.key, 'fakeroot', None, data)
- if self.m.group("py") is not None:
- bb.data.setVarFlag(self.key, "python", "1", data)
- else:
- bb.data.delVarFlag(self.key, "python", data)
- if self.m.group("fr") is not None:
- bb.data.setVarFlag(self.key, "fakeroot", "1", data)
- else:
- bb.data.delVarFlag(self.key, "fakeroot", data)
-
-class ExportFuncsNode(AstNode):
- def __init__(self, filename, lineno, fns, classes):
- AstNode.__init__(self, filename, lineno)
- self.n = fns.split()
- self.classes = classes
-
- def eval(self, data):
- for f in self.n:
- allvars = []
- allvars.append(f)
- allvars.append(self.classes[-1] + "_" + f)
-
- vars = [[ allvars[0], allvars[1] ]]
- if len(self.classes) > 1 and self.classes[-2] is not None:
- allvars.append(self.classes[-2] + "_" + f)
- vars = []
- vars.append([allvars[2], allvars[1]])
- vars.append([allvars[0], allvars[2]])
-
- for (var, calledvar) in vars:
- if bb.data.getVar(var, data) and not bb.data.getVarFlag(var, 'export_func', data):
- continue
-
- if bb.data.getVar(var, data):
- bb.data.setVarFlag(var, 'python', None, data)
- bb.data.setVarFlag(var, 'func', None, data)
-
- for flag in [ "func", "python" ]:
- if bb.data.getVarFlag(calledvar, flag, data):
- bb.data.setVarFlag(var, flag, bb.data.getVarFlag(calledvar, flag, data), data)
- for flag in [ "dirs" ]:
- if bb.data.getVarFlag(var, flag, data):
- bb.data.setVarFlag(calledvar, flag, bb.data.getVarFlag(var, flag, data), data)
-
- if bb.data.getVarFlag(calledvar, "python", data):
- bb.data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", data)
- else:
- bb.data.setVar(var, "\t" + calledvar + "\n", data)
- bb.data.setVarFlag(var, 'export_func', '1', data)
-
-class AddTaskNode(AstNode):
- def __init__(self, filename, lineno, func, before, after):
- AstNode.__init__(self, filename, lineno)
- self.func = func
- self.before = before
- self.after = after
-
- def eval(self, data):
- var = self.func
- if self.func[:3] != "do_":
- var = "do_" + self.func
-
- bb.data.setVarFlag(var, "task", 1, data)
- bbtasks = bb.data.getVar('__BBTASKS', data) or []
- if not var in bbtasks:
- bbtasks.append(var)
- bb.data.setVar('__BBTASKS', bbtasks, data)
-
- existing = bb.data.getVarFlag(var, "deps", data) or []
- if self.after is not None:
- # set up deps for function
- for entry in self.after.split():
- if entry not in existing:
- existing.append(entry)
- bb.data.setVarFlag(var, "deps", existing, data)
- if self.before is not None:
- # set up things that depend on this func
- for entry in self.before.split():
- existing = bb.data.getVarFlag(entry, "deps", data) or []
- if var not in existing:
- bb.data.setVarFlag(entry, "deps", [var] + existing, data)
-
-class BBHandlerNode(AstNode):
- def __init__(self, filename, lineno, fns):
- AstNode.__init__(self, filename, lineno)
- self.hs = fns.split()
-
- def eval(self, data):
- bbhands = bb.data.getVar('__BBHANDLERS', data) or []
- for h in self.hs:
- bbhands.append(h)
- bb.data.setVarFlag(h, "handler", 1, data)
- bb.data.setVar('__BBHANDLERS', bbhands, data)
-
-class InheritNode(AstNode):
- def __init__(self, filename, lineno, classes):
- AstNode.__init__(self, filename, lineno)
- self.classes = classes
-
- def eval(self, data):
- bb.parse.BBHandler.inherit(self.classes, data)
-
-def handleInclude(statements, filename, lineno, m, force):
- statements.append(IncludeNode(filename, lineno, m.group(1), force))
-
-def handleExport(statements, filename, lineno, m):
- statements.append(ExportNode(filename, lineno, m.group(1)))
-
-def handleData(statements, filename, lineno, groupd):
- statements.append(DataNode(filename, lineno, groupd))
-
-def handleMethod(statements, filename, lineno, func_name, body):
- statements.append(MethodNode(filename, lineno, func_name, body))
-
-def handlePythonMethod(statements, filename, lineno, funcname, root, body):
- statements.append(PythonMethodNode(filename, lineno, funcname, root, body))
-
-def handleMethodFlags(statements, filename, lineno, key, m):
- statements.append(MethodFlagsNode(filename, lineno, key, m))
-
-def handleExportFuncs(statements, filename, lineno, m, classes):
- statements.append(ExportFuncsNode(filename, lineno, m.group(1), classes))
-
-def handleAddTask(statements, filename, lineno, m):
- func = m.group("func")
- before = m.group("before")
- after = m.group("after")
- if func is None:
- return
-
- statements.append(AddTaskNode(filename, lineno, func, before, after))
-
-def handleBBHandlers(statements, filename, lineno, m):
- statements.append(BBHandlerNode(filename, lineno, m.group(1)))
-
-def handleInherit(statements, filename, lineno, m):
- classes = m.group(1)
- statements.append(InheritNode(filename, lineno, classes.split()))
-
-def finalize(fn, d, variant = None):
- bb.data.expandKeys(d)
- bb.data.update_data(d)
- code = []
- for funcname in bb.data.getVar("__BBANONFUNCS", d) or []:
- code.append("%s(d)" % funcname)
- bb.utils.simple_exec("\n".join(code), {"d": d})
- bb.data.update_data(d)
-
- all_handlers = {}
- for var in bb.data.getVar('__BBHANDLERS', d) or []:
- # try to add the handler
- handler = bb.data.getVar(var, d)
- bb.event.register(var, handler)
-
- tasklist = bb.data.getVar('__BBTASKS', d) or []
- bb.build.add_tasks(tasklist, d)
-
- bb.parse.siggen.finalise(fn, d, variant)
-
- bb.event.fire(bb.event.RecipeParsed(fn), d)
-
-def _create_variants(datastores, names, function):
- def create_variant(name, orig_d, arg = None):
- new_d = bb.data.createCopy(orig_d)
- function(arg or name, new_d)
- datastores[name] = new_d
-
- for variant, variant_d in datastores.items():
- for name in names:
- if not variant:
- # Based on main recipe
- create_variant(name, variant_d)
- else:
- create_variant("%s-%s" % (variant, name), variant_d, name)
-
-def _expand_versions(versions):
- def expand_one(version, start, end):
- for i in xrange(start, end + 1):
- ver = _bbversions_re.sub(str(i), version, 1)
- yield ver
-
- versions = iter(versions)
- while True:
- try:
- version = next(versions)
- except StopIteration:
- break
-
- range_ver = _bbversions_re.search(version)
- if not range_ver:
- yield version
- else:
- newversions = expand_one(version, int(range_ver.group("from")),
- int(range_ver.group("to")))
- versions = itertools.chain(newversions, versions)
-
-def multi_finalize(fn, d):
- appends = (d.getVar("__BBAPPEND", True) or "").split()
- for append in appends:
- logger.debug(2, "Appending .bbappend file %s to %s", append, fn)
- bb.parse.BBHandler.handle(append, d, True)
-
- safe_d = d
- d = bb.data.createCopy(safe_d)
- try:
- finalize(fn, d)
- except bb.parse.SkipPackage:
- bb.data.setVar("__SKIPPED", True, d)
- datastores = {"": safe_d}
-
- versions = (d.getVar("BBVERSIONS", True) or "").split()
- if versions:
- pv = orig_pv = d.getVar("PV", True)
- baseversions = {}
-
- def verfunc(ver, d, pv_d = None):
- if pv_d is None:
- pv_d = d
-
- overrides = d.getVar("OVERRIDES", True).split(":")
- pv_d.setVar("PV", ver)
- overrides.append(ver)
- bpv = baseversions.get(ver) or orig_pv
- pv_d.setVar("BPV", bpv)
- overrides.append(bpv)
- d.setVar("OVERRIDES", ":".join(overrides))
-
- versions = list(_expand_versions(versions))
- for pos, version in enumerate(list(versions)):
- try:
- pv, bpv = version.split(":", 2)
- except ValueError:
- pass
- else:
- versions[pos] = pv
- baseversions[pv] = bpv
-
- if pv in versions and not baseversions.get(pv):
- versions.remove(pv)
- else:
- pv = versions.pop()
-
- # This is necessary because our existing main datastore
- # has already been finalized with the old PV, we need one
- # that's been finalized with the new PV.
- d = bb.data.createCopy(safe_d)
- verfunc(pv, d, safe_d)
- try:
- finalize(fn, d)
- except bb.parse.SkipPackage:
- bb.data.setVar("__SKIPPED", True, d)
-
- _create_variants(datastores, versions, verfunc)
-
- extended = d.getVar("BBCLASSEXTEND", True) or ""
- if extended:
- pn = d.getVar("PN", True)
- def extendfunc(name, d):
- d.setVar("PN", "%s-%s" % (pn, name))
- bb.parse.BBHandler.inherit([name], d)
-
- safe_d.setVar("BBCLASSEXTEND", extended)
- _create_variants(datastores, extended.split(), extendfunc)
-
- for variant, variant_d in datastores.iteritems():
- if variant:
- try:
- finalize(fn, variant_d, variant)
- except bb.parse.SkipPackage:
- bb.data.setVar("__SKIPPED", True, variant_d)
-
- if len(datastores) > 1:
- variants = filter(None, datastores.iterkeys())
- safe_d.setVar("__VARIANTS", " ".join(variants))
-
- datastores[""] = d
- return datastores
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py
deleted file mode 100644
index 402cd07e2a..0000000000
--- a/bitbake/lib/bb/parse/parse_py/BBHandler.py
+++ /dev/null
@@ -1,254 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
- class for handling .bb files
-
- Reads a .bb file and obtains its metadata
-
-"""
-
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import absolute_import
-import re, bb, os
-import logging
-import bb.build, bb.utils
-from bb import data
-
-from . import ConfHandler
-from .. import resolve_file, ast, logger
-from .ConfHandler import include, init
-
-# For compatibility
-bb.deprecate_import(__name__, "bb.parse", ["vars_from_file"])
-
-__func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" )
-__inherit_regexp__ = re.compile( r"inherit\s+(.+)" )
-__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" )
-__addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*")
-__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" )
-__def_regexp__ = re.compile( r"def\s+(\w+).*:" )
-__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" )
-
-
-__infunc__ = ""
-__inpython__ = False
-__body__ = []
-__classname__ = ""
-classes = [ None, ]
-
-cached_statements = {}
-
-# We need to indicate EOF to the feeder. This code is so messy that
-# factoring it out to a close_parse_file method is out of question.
-# We will use the IN_PYTHON_EOF as an indicator to just close the method
-#
-# The two parts using it are tightly integrated anyway
-IN_PYTHON_EOF = -9999999999999
-
-
-
-def supports(fn, d):
- """Return True if fn has a supported extension"""
- return os.path.splitext(fn)[-1] in [".bb", ".bbclass", ".inc"]
-
-def inherit(files, d):
- __inherit_cache = data.getVar('__inherit_cache', d) or []
- fn = ""
- lineno = 0
- for file in files:
- file = data.expand(file, d)
- if not os.path.isabs(file) and not file.endswith(".bbclass"):
- file = os.path.join('classes', '%s.bbclass' % file)
-
- if not file in __inherit_cache:
- logger.log(logging.DEBUG -1, "BB %s:%d: inheriting %s", fn, lineno, file)
- __inherit_cache.append( file )
- data.setVar('__inherit_cache', __inherit_cache, d)
- include(fn, file, d, "inherit")
- __inherit_cache = data.getVar('__inherit_cache', d) or []
-
-def get_statements(filename, absolute_filename, base_name):
- global cached_statements
-
- try:
- return cached_statements[absolute_filename]
- except KeyError:
- file = open(absolute_filename, 'r')
- statements = ast.StatementGroup()
-
- lineno = 0
- while True:
- lineno = lineno + 1
- s = file.readline()
- if not s: break
- s = s.rstrip()
- feeder(lineno, s, filename, base_name, statements)
- if __inpython__:
- # add a blank line to close out any python definition
- feeder(IN_PYTHON_EOF, "", filename, base_name, statements)
-
- if filename.endswith(".bbclass") or filename.endswith(".inc"):
- cached_statements[absolute_filename] = statements
- return statements
-
-def handle(fn, d, include):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __residue__
- __body__ = []
- __infunc__ = ""
- __classname__ = ""
- __residue__ = []
-
-
- if include == 0:
- logger.debug(2, "BB %s: handle(data)", fn)
- else:
- logger.debug(2, "BB %s: handle(data, include)", fn)
-
- base_name = os.path.basename(fn)
- (root, ext) = os.path.splitext(base_name)
- init(d)
-
- if ext == ".bbclass":
- __classname__ = root
- classes.append(__classname__)
- __inherit_cache = data.getVar('__inherit_cache', d) or []
- if not fn in __inherit_cache:
- __inherit_cache.append(fn)
- data.setVar('__inherit_cache', __inherit_cache, d)
-
- if include != 0:
- oldfile = data.getVar('FILE', d)
- else:
- oldfile = None
-
- abs_fn = resolve_file(fn, d)
-
- if include:
- bb.parse.mark_dependency(d, abs_fn)
-
- # actual loading
- statements = get_statements(fn, abs_fn, base_name)
-
- # DONE WITH PARSING... time to evaluate
- if ext != ".bbclass":
- data.setVar('FILE', fn, d)
-
- statements.eval(d)
-
- if ext == ".bbclass":
- classes.remove(__classname__)
- else:
- if include == 0:
- return ast.multi_finalize(fn, d)
-
- if oldfile:
- bb.data.setVar("FILE", oldfile, d)
-
- # we have parsed the bb class now
- if ext == ".bbclass" or ext == ".inc":
- bb.methodpool.get_parsed_dict()[base_name] = 1
-
- return d
-
-def feeder(lineno, s, fn, root, statements):
- global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__, __infunc__, __body__, classes, bb, __residue__
- if __infunc__:
- if s == '}':
- __body__.append('')
- ast.handleMethod(statements, fn, lineno, __infunc__, __body__)
- __infunc__ = ""
- __body__ = []
- else:
- __body__.append(s)
- return
-
- if __inpython__:
- m = __python_func_regexp__.match(s)
- if m and lineno != IN_PYTHON_EOF:
- __body__.append(s)
- return
- else:
- ast.handlePythonMethod(statements, fn, lineno, __inpython__,
- root, __body__)
- __body__ = []
- __inpython__ = False
-
- if lineno == IN_PYTHON_EOF:
- return
-
-
- # Skip empty lines
- if s == '':
- return
-
- if s[0] == '#':
- if len(__residue__) != 0 and __residue__[0][0] != "#":
- bb.error("There is a comment on line %s of file %s (%s) which is in the middle of a multiline expression.\nBitbake used to ignore these but no longer does so, please fix your metadata as errors are likely as a result of this change." % (lineno, fn, s))
-
- if s[-1] == '\\':
- __residue__.append(s[:-1])
- return
-
- s = "".join(__residue__) + s
- __residue__ = []
-
- # Skip comments
- if s[0] == '#':
- return
-
- m = __func_start_regexp__.match(s)
- if m:
- __infunc__ = m.group("func") or "__anonymous"
- ast.handleMethodFlags(statements, fn, lineno, __infunc__, m)
- return
-
- m = __def_regexp__.match(s)
- if m:
- __body__.append(s)
- __inpython__ = m.group(1)
-
- return
-
- m = __export_func_regexp__.match(s)
- if m:
- ast.handleExportFuncs(statements, fn, lineno, m, classes)
- return
-
- m = __addtask_regexp__.match(s)
- if m:
- ast.handleAddTask(statements, fn, lineno, m)
- return
-
- m = __addhandler_regexp__.match(s)
- if m:
- ast.handleBBHandlers(statements, fn, lineno, m)
- return
-
- m = __inherit_regexp__.match(s)
- if m:
- ast.handleInherit(statements, fn, lineno, m)
- return
-
- return ConfHandler.feeder(lineno, s, fn, statements)
-
-# Add us to the handlers list
-from .. import handlers
-handlers.append({'supports': supports, 'handle': handle, 'init': init})
-del handlers
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py
deleted file mode 100644
index fc239a3540..0000000000
--- a/bitbake/lib/bb/parse/parse_py/ConfHandler.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
- class for handling configuration data files
-
- Reads a .conf file and obtains its metadata
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, bb.data, os
-import logging
-import bb.utils
-from bb.parse import ParseError, resolve_file, ast, logger
-
-#__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
-__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<lazyques>\?\?=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$")
-__include_regexp__ = re.compile( r"include\s+(.+)" )
-__require_regexp__ = re.compile( r"require\s+(.+)" )
-__export_regexp__ = re.compile( r"export\s+(.+)" )
-
-def init(data):
- topdir = bb.data.getVar('TOPDIR', data)
- if not topdir:
- bb.data.setVar('TOPDIR', os.getcwd(), data)
-
-
-def supports(fn, d):
- return fn[-5:] == ".conf"
-
-def include(oldfn, fn, data, error_out):
- """
- error_out If True a ParseError will be raised if the to be included
- config-files could not be included.
- """
- if oldfn == fn: # prevent infinite recursion
- return None
-
- import bb
- fn = bb.data.expand(fn, data)
- oldfn = bb.data.expand(oldfn, data)
-
- if not os.path.isabs(fn):
- dname = os.path.dirname(oldfn)
- bbpath = "%s:%s" % (dname, bb.data.getVar("BBPATH", data, 1))
- abs_fn = bb.utils.which(bbpath, fn)
- if abs_fn:
- fn = abs_fn
-
- from bb.parse import handle
- try:
- ret = handle(fn, data, True)
- except IOError:
- if error_out:
- raise ParseError("Could not %(error_out)s file %(fn)s" % vars() )
- logger.debug(2, "CONF file '%s' not found", fn)
-
-def handle(fn, data, include):
- init(data)
-
- if include == 0:
- oldfile = None
- else:
- oldfile = bb.data.getVar('FILE', data)
-
- abs_fn = resolve_file(fn, data)
- f = open(abs_fn, 'r')
-
- if include:
- bb.parse.mark_dependency(data, abs_fn)
-
- statements = ast.StatementGroup()
- lineno = 0
- while True:
- lineno = lineno + 1
- s = f.readline()
- if not s: break
- w = s.strip()
- if not w: continue # skip empty lines
- s = s.rstrip()
- if s[0] == '#': continue # skip comments
- while s[-1] == '\\':
- s2 = f.readline()[:-1].strip()
- lineno = lineno + 1
- s = s[:-1] + s2
- feeder(lineno, s, fn, statements)
-
- # DONE WITH PARSING... time to evaluate
- bb.data.setVar('FILE', fn, data)
- statements.eval(data)
- if oldfile:
- bb.data.setVar('FILE', oldfile, data)
-
- return data
-
-def feeder(lineno, s, fn, statements):
- m = __config_regexp__.match(s)
- if m:
- groupd = m.groupdict()
- ast.handleData(statements, fn, lineno, groupd)
- return
-
- m = __include_regexp__.match(s)
- if m:
- ast.handleInclude(statements, fn, lineno, m, False)
- return
-
- m = __require_regexp__.match(s)
- if m:
- ast.handleInclude(statements, fn, lineno, m, True)
- return
-
- m = __export_regexp__.match(s)
- if m:
- ast.handleExport(statements, fn, lineno, m)
- return
-
- raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s));
-
-# Add us to the handlers list
-from bb.parse import handlers
-handlers.append({'supports': supports, 'handle': handle, 'init': init})
-del handlers
diff --git a/bitbake/lib/bb/parse/parse_py/__init__.py b/bitbake/lib/bb/parse/parse_py/__init__.py
deleted file mode 100644
index 3e658d0de9..0000000000
--- a/bitbake/lib/bb/parse/parse_py/__init__.py
+++ /dev/null
@@ -1,33 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake Parsers
-
-File parsers for the BitBake build tools.
-
-"""
-
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-# Based on functions from the base bb module, Copyright 2003 Holger Schurig
-
-from __future__ import absolute_import
-from . import ConfHandler
-from . import BBHandler
-
-__version__ = '1.0'
diff --git a/bitbake/lib/bb/persist_data.py b/bitbake/lib/bb/persist_data.py
deleted file mode 100644
index da05752311..0000000000
--- a/bitbake/lib/bb/persist_data.py
+++ /dev/null
@@ -1,194 +0,0 @@
-"""BitBake Persistent Data Store
-
-Used to store data in a central location such that other threads/tasks can
-access them at some future date. Acts as a convenience wrapper around sqlite,
-currently, providing a key/value store accessed by 'domain'.
-"""
-
-# Copyright (C) 2007 Richard Purdie
-# Copyright (C) 2010 Chris Larson <chris_larson@mentor.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import collections
-import logging
-import os.path
-import sys
-import warnings
-import bb.msg, bb.data, bb.utils
-
-try:
- import sqlite3
-except ImportError:
- from pysqlite2 import dbapi2 as sqlite3
-
-sqlversion = sqlite3.sqlite_version_info
-if sqlversion[0] < 3 or (sqlversion[0] == 3 and sqlversion[1] < 3):
- raise Exception("sqlite3 version 3.3.0 or later is required.")
-
-
-logger = logging.getLogger("BitBake.PersistData")
-
-
-class SQLTable(collections.MutableMapping):
- """Object representing a table/domain in the database"""
- def __init__(self, cursor, table):
- self.cursor = cursor
- self.table = table
-
- self._execute("CREATE TABLE IF NOT EXISTS %s(key TEXT, value TEXT);"
- % table)
-
- def _execute(self, *query):
- """Execute a query, waiting to acquire a lock if necessary"""
- count = 0
- while True:
- try:
- return self.cursor.execute(*query)
- except sqlite3.OperationalError as exc:
- if 'database is locked' in str(exc) and count < 500:
- count = count + 1
- continue
- raise
-
- def __getitem__(self, key):
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- for row in data:
- return row[1]
-
- def __delitem__(self, key):
- self._execute("DELETE from %s where key=?;" % self.table, [key])
-
- def __setitem__(self, key, value):
- data = self._execute("SELECT * from %s where key=?;" %
- self.table, [key])
- exists = len(list(data))
- if exists:
- self._execute("UPDATE %s SET value=? WHERE key=?;" % self.table,
- [value, key])
- else:
- self._execute("INSERT into %s(key, value) values (?, ?);" %
- self.table, [key, value])
-
- def __contains__(self, key):
- return key in set(self)
-
- def __len__(self):
- data = self._execute("SELECT COUNT(key) FROM %s;" % self.table)
- for row in data:
- return row[0]
-
- def __iter__(self):
- data = self._execute("SELECT key FROM %s;" % self.table)
- for row in data:
- yield row[0]
-
- def iteritems(self):
- data = self._execute("SELECT * FROM %s;" % self.table)
- for row in data:
- yield row[0], row[1]
-
- def itervalues(self):
- data = self._execute("SELECT value FROM %s;" % self.table)
- for row in data:
- yield row[0]
-
-
-class SQLData(object):
- """Object representing the persistent data"""
- def __init__(self, filename):
- bb.utils.mkdirhier(os.path.dirname(filename))
-
- self.filename = filename
- self.connection = sqlite3.connect(filename, timeout=5,
- isolation_level=None)
- self.cursor = self.connection.cursor()
- self._tables = {}
-
- def __getitem__(self, table):
- if not isinstance(table, basestring):
- raise TypeError("table argument must be a string, not '%s'" %
- type(table))
-
- if table in self._tables:
- return self._tables[table]
- else:
- tableobj = self._tables[table] = SQLTable(self.cursor, table)
- return tableobj
-
- def __delitem__(self, table):
- if table in self._tables:
- del self._tables[table]
- self.cursor.execute("DROP TABLE IF EXISTS %s;" % table)
-
-
-class PersistData(object):
- """Deprecated representation of the bitbake persistent data store"""
- def __init__(self, d):
- warnings.warn("Use of PersistData will be deprecated in the future",
- category=PendingDeprecationWarning,
- stacklevel=2)
-
- self.data = persist(d)
- logger.debug(1, "Using '%s' as the persistent data cache",
- self.data.filename)
-
- def addDomain(self, domain):
- """
- Add a domain (pending deprecation)
- """
- return self.data[domain]
-
- def delDomain(self, domain):
- """
- Removes a domain and all the data it contains
- """
- del self.data[domain]
-
- def getKeyValues(self, domain):
- """
- Return a list of key + value pairs for a domain
- """
- return self.data[domain].items()
-
- def getValue(self, domain, key):
- """
- Return the value of a key for a domain
- """
- return self.data[domain][key]
-
- def setValue(self, domain, key, value):
- """
- Sets the value of a key for a domain
- """
- self.data[domain][key] = value
-
- def delValue(self, domain, key):
- """
- Deletes a key/value pair
- """
- del self.data[domain][key]
-
-
-def persist(d):
- """Convenience factory for construction of SQLData based upon metadata"""
- cachedir = (bb.data.getVar("PERSISTENT_DIR", d, True) or
- bb.data.getVar("CACHE", d, True))
- if not cachedir:
- logger.critical("Please set the 'PERSISTENT_DIR' or 'CACHE' variable")
- sys.exit(1)
-
- cachefile = os.path.join(cachedir, "bb_persist_data.sqlite3")
- return SQLData(cachefile)
diff --git a/bitbake/lib/bb/process.py b/bitbake/lib/bb/process.py
deleted file mode 100644
index 4150d80e06..0000000000
--- a/bitbake/lib/bb/process.py
+++ /dev/null
@@ -1,109 +0,0 @@
-import logging
-import signal
-import subprocess
-
-logger = logging.getLogger('BitBake.Process')
-
-def subprocess_setup():
- # Python installs a SIGPIPE handler by default. This is usually not what
- # non-Python subprocesses expect.
- signal.signal(signal.SIGPIPE, signal.SIG_DFL)
-
-class CmdError(RuntimeError):
- def __init__(self, command, msg=None):
- self.command = command
- self.msg = msg
-
- def __str__(self):
- if not isinstance(self.command, basestring):
- cmd = subprocess.list2cmdline(self.command)
- else:
- cmd = self.command
-
- msg = "Execution of '%s' failed" % cmd
- if self.msg:
- msg += ': %s' % self.msg
- return msg
-
-class NotFoundError(CmdError):
- def __str__(self):
- return CmdError.__str__(self) + ": command not found"
-
-class ExecutionError(CmdError):
- def __init__(self, command, exitcode, stdout = None, stderr = None):
- CmdError.__init__(self, command)
- self.exitcode = exitcode
- self.stdout = stdout
- self.stderr = stderr
-
- def __str__(self):
- message = ""
- if self.stderr:
- message += self.stderr
- if self.stdout:
- message += self.stdout
- if message:
- message = ":\n" + message
- return (CmdError.__str__(self) +
- " with exit code %s" % self.exitcode + message)
-
-class Popen(subprocess.Popen):
- defaults = {
- "close_fds": True,
- "preexec_fn": subprocess_setup,
- "stdout": subprocess.PIPE,
- "stderr": subprocess.STDOUT,
- "stdin": subprocess.PIPE,
- "shell": False,
- }
-
- def __init__(self, *args, **kwargs):
- options = dict(self.defaults)
- options.update(kwargs)
- subprocess.Popen.__init__(self, *args, **options)
-
-def _logged_communicate(pipe, log, input):
- if pipe.stdin:
- if input is not None:
- pipe.stdin.write(input)
- pipe.stdin.close()
-
- bufsize = 512
- outdata, errdata = [], []
- while pipe.poll() is None:
- if pipe.stdout is not None:
- data = pipe.stdout.read(bufsize)
- if data is not None:
- outdata.append(data)
- log.write(data)
-
- if pipe.stderr is not None:
- data = pipe.stderr.read(bufsize)
- if data is not None:
- errdata.append(data)
- log.write(data)
- return ''.join(outdata), ''.join(errdata)
-
-def run(cmd, input=None, log=None, **options):
- """Convenience function to run a command and return its output, raising an
- exception when the command fails"""
-
- if isinstance(cmd, basestring) and not "shell" in options:
- options["shell"] = True
-
- try:
- pipe = Popen(cmd, **options)
- except OSError, exc:
- if exc.errno == 2:
- raise NotFoundError(cmd)
- else:
- raise CmdError(cmd, exc)
-
- if log:
- stdout, stderr = _logged_communicate(pipe, log, input)
- else:
- stdout, stderr = pipe.communicate(input)
-
- if pipe.returncode != 0:
- raise ExecutionError(cmd, pipe.returncode, stdout, stderr)
- return stdout, stderr
diff --git a/bitbake/lib/bb/providers.py b/bitbake/lib/bb/providers.py
deleted file mode 100644
index dcba9ae255..0000000000
--- a/bitbake/lib/bb/providers.py
+++ /dev/null
@@ -1,330 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2003, 2004 Chris Larson
-# Copyright (C) 2003, 2004 Phil Blundell
-# Copyright (C) 2003 - 2005 Michael 'Mickey' Lauer
-# Copyright (C) 2005 Holger Hans Peter Freyther
-# Copyright (C) 2005 ROAD GmbH
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re
-import logging
-from bb import data, utils
-import bb
-
-logger = logging.getLogger("BitBake.Provider")
-
-class NoProvider(Exception):
- """Exception raised when no provider of a build dependency can be found"""
-
-class NoRProvider(Exception):
- """Exception raised when no provider of a runtime dependency can be found"""
-
-
-def sortPriorities(pn, dataCache, pkg_pn = None):
- """
- Reorder pkg_pn by file priority and default preference
- """
-
- if not pkg_pn:
- pkg_pn = dataCache.pkg_pn
-
- files = pkg_pn[pn]
- priorities = {}
- for f in files:
- priority = dataCache.bbfile_priority[f]
- preference = dataCache.pkg_dp[f]
- if priority not in priorities:
- priorities[priority] = {}
- if preference not in priorities[priority]:
- priorities[priority][preference] = []
- priorities[priority][preference].append(f)
- tmp_pn = []
- for pri in sorted(priorities, lambda a, b: a - b):
- tmp_pref = []
- for pref in sorted(priorities[pri], lambda a, b: b - a):
- tmp_pref.extend(priorities[pri][pref])
- tmp_pn = [tmp_pref] + tmp_pn
-
- return tmp_pn
-
-def preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
- """
- Check if the version pe,pv,pr is the preferred one.
- If there is preferred version defined and ends with '%', then pv has to start with that version after removing the '%'
- """
- if (pr == preferred_r or preferred_r == None):
- if (pe == preferred_e or preferred_e == None):
- if preferred_v == pv:
- return True
- if preferred_v != None and preferred_v.endswith('%') and pv.startswith(preferred_v[:len(preferred_v)-1]):
- return True
- return False
-
-def findPreferredProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
- """
- Find the first provider in pkg_pn with a PREFERRED_VERSION set.
- """
-
- preferred_file = None
- preferred_ver = None
-
- localdata = data.createCopy(cfgData)
- bb.data.setVar('OVERRIDES', "pn-%s:%s:%s" % (pn, pn, data.getVar('OVERRIDES', localdata)), localdata)
- bb.data.update_data(localdata)
-
- preferred_v = bb.data.getVar('PREFERRED_VERSION_%s' % pn, localdata, True)
- if preferred_v:
- m = re.match('(\d+:)*(.*)(_.*)*', preferred_v)
- if m:
- if m.group(1):
- preferred_e = int(m.group(1)[:-1])
- else:
- preferred_e = None
- preferred_v = m.group(2)
- if m.group(3):
- preferred_r = m.group(3)[1:]
- else:
- preferred_r = None
- else:
- preferred_e = None
- preferred_r = None
-
- for file_set in pkg_pn:
- for f in file_set:
- pe, pv, pr = dataCache.pkg_pepvpr[f]
- if preferredVersionMatch(pe, pv, pr, preferred_e, preferred_v, preferred_r):
- preferred_file = f
- preferred_ver = (pe, pv, pr)
- break
- if preferred_file:
- break;
- if preferred_r:
- pv_str = '%s-%s' % (preferred_v, preferred_r)
- else:
- pv_str = preferred_v
- if not (preferred_e is None):
- pv_str = '%s:%s' % (preferred_e, pv_str)
- itemstr = ""
- if item:
- itemstr = " (for item %s)" % item
- if preferred_file is None:
- logger.info("preferred version %s of %s not available%s", pv_str, pn, itemstr)
- else:
- logger.debug(1, "selecting %s as PREFERRED_VERSION %s of package %s%s", preferred_file, pv_str, pn, itemstr)
-
- return (preferred_ver, preferred_file)
-
-
-def findLatestProvider(pn, cfgData, dataCache, file_set):
- """
- Return the highest version of the providers in file_set.
- Take default preferences into account.
- """
- latest = None
- latest_p = 0
- latest_f = None
- for file_name in file_set:
- pe, pv, pr = dataCache.pkg_pepvpr[file_name]
- dp = dataCache.pkg_dp[file_name]
-
- if (latest is None) or ((latest_p == dp) and (utils.vercmp(latest, (pe, pv, pr)) < 0)) or (dp > latest_p):
- latest = (pe, pv, pr)
- latest_f = file_name
- latest_p = dp
-
- return (latest, latest_f)
-
-
-def findBestProvider(pn, cfgData, dataCache, pkg_pn = None, item = None):
- """
- If there is a PREFERRED_VERSION, find the highest-priority bbfile
- providing that version. If not, find the latest version provided by
- an bbfile in the highest-priority set.
- """
-
- sortpkg_pn = sortPriorities(pn, dataCache, pkg_pn)
- # Find the highest priority provider with a PREFERRED_VERSION set
- (preferred_ver, preferred_file) = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn, item)
- # Find the latest version of the highest priority provider
- (latest, latest_f) = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[0])
-
- if preferred_file is None:
- preferred_file = latest_f
- preferred_ver = latest
-
- return (latest, latest_f, preferred_ver, preferred_file)
-
-
-def _filterProviders(providers, item, cfgData, dataCache):
- """
- Take a list of providers and filter/reorder according to the
- environment variables and previous build results
- """
- eligible = []
- preferred_versions = {}
- sortpkg_pn = {}
-
- # The order of providers depends on the order of the files on the disk
- # up to here. Sort pkg_pn to make dependency issues reproducible rather
- # than effectively random.
- providers.sort()
-
- # Collate providers by PN
- pkg_pn = {}
- for p in providers:
- pn = dataCache.pkg_fn[p]
- if pn not in pkg_pn:
- pkg_pn[pn] = []
- pkg_pn[pn].append(p)
-
- logger.debug(1, "providers for %s are: %s", item, pkg_pn.keys())
-
- # First add PREFERRED_VERSIONS
- for pn in pkg_pn:
- sortpkg_pn[pn] = sortPriorities(pn, dataCache, pkg_pn)
- preferred_versions[pn] = findPreferredProvider(pn, cfgData, dataCache, sortpkg_pn[pn], item)
- if preferred_versions[pn][1]:
- eligible.append(preferred_versions[pn][1])
-
- # Now add latest versions
- for pn in sortpkg_pn:
- if pn in preferred_versions and preferred_versions[pn][1]:
- continue
- preferred_versions[pn] = findLatestProvider(pn, cfgData, dataCache, sortpkg_pn[pn][0])
- eligible.append(preferred_versions[pn][1])
-
- if len(eligible) == 0:
- logger.error("no eligible providers for %s", item)
- return 0
-
- # If pn == item, give it a slight default preference
- # This means PREFERRED_PROVIDER_foobar defaults to foobar if available
- for p in providers:
- pn = dataCache.pkg_fn[p]
- if pn != item:
- continue
- (newvers, fn) = preferred_versions[pn]
- if not fn in eligible:
- continue
- eligible.remove(fn)
- eligible = [fn] + eligible
-
- return eligible
-
-
-def filterProviders(providers, item, cfgData, dataCache):
- """
- Take a list of providers and filter/reorder according to the
- environment variables and previous build results
- Takes a "normal" target item
- """
-
- eligible = _filterProviders(providers, item, cfgData, dataCache)
-
- prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % item, cfgData, 1)
- if prefervar:
- dataCache.preferred[item] = prefervar
-
- foundUnique = False
- if item in dataCache.preferred:
- for p in eligible:
- pn = dataCache.pkg_fn[p]
- if dataCache.preferred[item] == pn:
- logger.verbose("selecting %s to satisfy %s due to PREFERRED_PROVIDERS", pn, item)
- eligible.remove(p)
- eligible = [p] + eligible
- foundUnique = True
- break
-
- logger.debug(1, "sorted providers for %s are: %s", item, eligible)
-
- return eligible, foundUnique
-
-def filterProvidersRunTime(providers, item, cfgData, dataCache):
- """
- Take a list of providers and filter/reorder according to the
- environment variables and previous build results
- Takes a "runtime" target item
- """
-
- eligible = _filterProviders(providers, item, cfgData, dataCache)
-
- # Should use dataCache.preferred here?
- preferred = []
- preferred_vars = []
- pns = {}
- for p in eligible:
- pns[dataCache.pkg_fn[p]] = p
- for p in eligible:
- pn = dataCache.pkg_fn[p]
- provides = dataCache.pn_provides[pn]
- for provide in provides:
- prefervar = bb.data.getVar('PREFERRED_PROVIDER_%s' % provide, cfgData, 1)
- logger.verbose("checking PREFERRED_PROVIDER_%s (value %s) against %s", provide, prefervar, pns.keys())
- if prefervar in pns and pns[prefervar] not in preferred:
- var = "PREFERRED_PROVIDER_%s = %s" % (provide, prefervar)
- logger.verbose("selecting %s to satisfy runtime %s due to %s", prefervar, item, var)
- preferred_vars.append(var)
- pref = pns[prefervar]
- eligible.remove(pref)
- eligible = [pref] + eligible
- preferred.append(pref)
- break
-
- numberPreferred = len(preferred)
-
- if numberPreferred > 1:
- logger.error("Trying to resolve runtime dependency %s resulted in conflicting PREFERRED_PROVIDER entries being found.\nThe providers found were: %s\nThe PREFERRED_PROVIDER entries resulting in this conflict were: %s", item, preferred, preferred_vars)
-
- logger.debug(1, "sorted providers for %s are: %s", item, eligible)
-
- return eligible, numberPreferred
-
-regexp_cache = {}
-
-def getRuntimeProviders(dataCache, rdepend):
- """
- Return any providers of runtime dependency
- """
- rproviders = []
-
- if rdepend in dataCache.rproviders:
- rproviders += dataCache.rproviders[rdepend]
-
- if rdepend in dataCache.packages:
- rproviders += dataCache.packages[rdepend]
-
- if rproviders:
- return rproviders
-
- # Only search dynamic packages if we can't find anything in other variables
- for pattern in dataCache.packages_dynamic:
- pattern = pattern.replace('+', "\+")
- if pattern in regexp_cache:
- regexp = regexp_cache[pattern]
- else:
- try:
- regexp = re.compile(pattern)
- except:
- logger.error("Error parsing regular expression '%s'", pattern)
- raise
- regexp_cache[pattern] = regexp
- if regexp.match(rdepend):
- rproviders += dataCache.packages_dynamic[pattern]
-
- return rproviders
diff --git a/bitbake/lib/bb/pysh/__init__.py b/bitbake/lib/bb/pysh/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/bitbake/lib/bb/pysh/__init__.py
+++ /dev/null
diff --git a/bitbake/lib/bb/pysh/builtin.py b/bitbake/lib/bb/pysh/builtin.py
deleted file mode 100644
index 25ad22eb74..0000000000
--- a/bitbake/lib/bb/pysh/builtin.py
+++ /dev/null
@@ -1,710 +0,0 @@
-# builtin.py - builtins and utilities definitions for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Builtin and internal utilities implementations.
-
-- Beware not to use python interpreter environment as if it were the shell
-environment. For instance, commands working directory must be explicitely handled
-through env['PWD'] instead of relying on python working directory.
-"""
-import errno
-import optparse
-import os
-import re
-import subprocess
-import sys
-import time
-
-def has_subprocess_bug():
- return getattr(subprocess, 'list2cmdline') and \
- ( subprocess.list2cmdline(['']) == '' or \
- subprocess.list2cmdline(['foo|bar']) == 'foo|bar')
-
-# Detect python bug 1634343: "subprocess swallows empty arguments under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1634343&group_id=5470&atid=105470>
-# Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32"
-# <http://sourceforge.net/tracker/index.php?func=detail&aid=1710802&group_id=5470&atid=105470>
-if has_subprocess_bug():
- import subprocess_fix
- subprocess.list2cmdline = subprocess_fix.list2cmdline
-
-from sherrors import *
-
-class NonExitingParser(optparse.OptionParser):
- """OptionParser default behaviour upon error is to print the error message and
- exit. Raise a utility error instead.
- """
- def error(self, msg):
- raise UtilityError(msg)
-
-#-------------------------------------------------------------------------------
-# set special builtin
-#-------------------------------------------------------------------------------
-OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False,
- help='The shell shall disable pathname expansion.')
-OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False,
- help="""When this option is on, if a simple command fails for any of the \
- reasons listed in Consequences of Shell Errors or returns an exit status \
- value >0, and is not part of the compound list following a while, until, \
- or if keyword, and is not a part of an AND or OR list, and is not a \
- pipeline preceded by the ! reserved word, then the shell shall immediately \
- exit.""")
-OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False,
- help="""The shell shall write to standard error a trace for each command \
- after it expands the command and before it executes it. It is unspecified \
- whether the command that turns tracing off is traced.""")
-
-def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SET.parse_args(args)
- env = interp.get_env()
-
- if option.has_f:
- env.set_opt('-f')
- if option.has_e:
- env.set_opt('-e')
- if option.has_x:
- env.set_opt('-x')
- return 0
-
-#-------------------------------------------------------------------------------
-# shift special builtin
-#-------------------------------------------------------------------------------
-def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- params = interp.get_env().get_positional_args()
- if args:
- try:
- n = int(args[0])
- if n > len(params):
- raise ValueError()
- except ValueError:
- return 1
- else:
- n = 1
-
- params[:n] = []
- interp.get_env().set_positional_args(params)
- return 0
-
-#-------------------------------------------------------------------------------
-# export special builtin
-#-------------------------------------------------------------------------------
-OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters")
-OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_EXPORT.parse_args(args)
- if option.has_p:
- raise NotImplementedError()
-
- for arg in args:
- try:
- name, value = arg.split('=', 1)
- except ValueError:
- name, value = arg, None
- env = interp.get_env().export(name, value)
-
- return 0
-
-#-------------------------------------------------------------------------------
-# return special builtin
-#-------------------------------------------------------------------------------
-def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- res = 0
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = 0
- if not 0<=res<=255:
- res = 0
-
- # BUG: should be last executed command exit code
- raise ReturnSignal(res)
-
-#-------------------------------------------------------------------------------
-# trap special builtin
-#-------------------------------------------------------------------------------
-def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- if len(args) < 2:
- stderr.write('trap: usage: trap [[arg] signal_spec ...]\n')
- return 2
-
- action = args[0]
- for sig in args[1:]:
- try:
- env.traps[sig] = action
- except Exception, e:
- stderr.write('trap: %s\n' % str(e))
- return 0
-
-#-------------------------------------------------------------------------------
-# unset special builtin
-#-------------------------------------------------------------------------------
-OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions")
-OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False)
-OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False)
-
-def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_UNSET.parse_args(args)
-
- status = 0
- env = interp.get_env()
- for arg in args:
- try:
- if option.has_f:
- env.remove_function(arg)
- else:
- del env[arg]
- except KeyError:
- pass
- except VarAssignmentError:
- status = 1
-
- return status
-
-#-------------------------------------------------------------------------------
-# wait special builtin
-#-------------------------------------------------------------------------------
-def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return interp.wait([int(arg) for arg in args])
-
-#-------------------------------------------------------------------------------
-# cat utility
-#-------------------------------------------------------------------------------
-def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if not args:
- args = ['-']
-
- status = 0
- for arg in args:
- if arg == '-':
- data = stdin.read()
- else:
- path = os.path.join(env['PWD'], arg)
- try:
- f = file(path, 'rb')
- try:
- data = f.read()
- finally:
- f.close()
- except IOError, e:
- if e.errno != errno.ENOENT:
- raise
- status = 1
- continue
- stdout.write(data)
- stdout.flush()
- return status
-
-#-------------------------------------------------------------------------------
-# cd utility
-#-------------------------------------------------------------------------------
-OPT_CD = NonExitingParser("cd - change the working directory")
-
-def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_CD.parse_args(args)
- env = interp.get_env()
-
- directory = None
- printdir = False
- if not args:
- home = env.get('HOME')
- if home:
- # Unspecified, do nothing
- return 0
- else:
- directory = home
- elif len(args)==1:
- directory = args[0]
- if directory=='-':
- if 'OLDPWD' not in env:
- raise UtilityError("OLDPWD not set")
- printdir = True
- directory = env['OLDPWD']
- else:
- raise UtilityError("too many arguments")
-
- curpath = None
- # Absolute directories will be handled correctly by the os.path.join call.
- if not directory.startswith('.') and not directory.startswith('..'):
- cdpaths = env.get('CDPATH', '.').split(';')
- for cdpath in cdpaths:
- p = os.path.join(cdpath, directory)
- if os.path.isdir(p):
- curpath = p
- break
-
- if curpath is None:
- curpath = directory
- curpath = os.path.join(env['PWD'], directory)
-
- env['OLDPWD'] = env['PWD']
- env['PWD'] = curpath
- if printdir:
- stdout.write('%s\n' % curpath)
- return 0
-
-#-------------------------------------------------------------------------------
-# colon utility
-#-------------------------------------------------------------------------------
-def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# echo utility
-#-------------------------------------------------------------------------------
-def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Echo only takes arguments, no options. Use printf if you need fancy stuff.
- output = ' '.join(args) + '\n'
- stdout.write(output)
- stdout.flush()
- return 0
-
-#-------------------------------------------------------------------------------
-# egrep utility
-#-------------------------------------------------------------------------------
-# egrep is usually a shell script.
-# Unfortunately, pysh does not support shell scripts *with arguments* right now,
-# so the redirection is implemented here, assuming grep is available.
-def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-E'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# env utility
-#-------------------------------------------------------------------------------
-def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- if args and args[0]=='-i':
- raise NotImplementedError('env: -i option is not implemented')
-
- i = 0
- for arg in args:
- if '=' not in arg:
- break
- # Update the current environment
- name, value = arg.split('=', 1)
- env[name] = value
- i += 1
-
- if args[i:]:
- # Find then execute the specified interpreter
- utility = env.find_in_path(args[i])
- if not utility:
- return 127
- args[i:i+1] = utility
- name = args[i]
- args = args[i+1:]
- try:
- return run_command(name, args, interp, env, stdin, stdout, stderr,
- debugflags)
- except UtilityError:
- stderr.write('env: failed to execute %s' % ' '.join([name]+args))
- return 126
- else:
- for pair in env.get_variables().iteritems():
- stdout.write('%s=%s\n' % pair)
- return 0
-
-#-------------------------------------------------------------------------------
-# exit utility
-#-------------------------------------------------------------------------------
-def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- res = None
- if args:
- try:
- res = int(args[0])
- except ValueError:
- res = None
- if not 0<=res<=255:
- res = None
-
- if res is None:
- # BUG: should be last executed command exit code
- res = 0
-
- raise ExitSignal(res)
-
-#-------------------------------------------------------------------------------
-# fgrep utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('grep', ['-F'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# gunzip utility
-#-------------------------------------------------------------------------------
-# see egrep
-def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# kill utility
-#-------------------------------------------------------------------------------
-def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- for arg in args:
- pid = int(arg)
- status = subprocess.call(['pskill', '/T', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE)
- # pskill is asynchronous, hence the stupid polling loop
- while 1:
- p = subprocess.Popen(['pslist', str(pid)],
- shell=True,
- stdout=subprocess.PIPE,
- stderr=subprocess.STDOUT)
- output = p.communicate()[0]
- if ('process %d was not' % pid) in output:
- break
- time.sleep(1)
- return status
-
-#-------------------------------------------------------------------------------
-# mkdir utility
-#-------------------------------------------------------------------------------
-OPT_MKDIR = NonExitingParser("mkdir - make directories.")
-OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False)
-
-def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # TODO: implement umask
- # TODO: implement proper utility error report
- option, args = OPT_MKDIR.parse_args(args)
- for arg in args:
- path = os.path.join(env['PWD'], arg)
- if option.has_p:
- try:
- os.makedirs(path)
- except IOError, e:
- if e.errno != errno.EEXIST:
- raise
- else:
- os.mkdir(path)
- return 0
-
-#-------------------------------------------------------------------------------
-# netstat utility
-#-------------------------------------------------------------------------------
-def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags):
- # Do you really expect me to implement netstat ?
- # This empty form is enough for Mercurial tests since it's
- # supposed to generate nothing upon success. Faking this test
- # is not a big deal either.
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# pwd utility
-#-------------------------------------------------------------------------------
-OPT_PWD = NonExitingParser("pwd - return working directory name")
-OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True,
- help="""If the PWD environment variable contains an absolute pathname of \
- the current directory that does not contain the filenames dot or dot-dot, \
- pwd shall write this pathname to standard output. Otherwise, the -L option \
- shall behave as the -P option.""")
-OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False,
- help="""The absolute pathname written shall not contain filenames that, in \
- the context of the pathname, refer to files of type symbolic link.""")
-
-def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_PWD.parse_args(args)
- stdout.write('%s\n' % env['PWD'])
- return 0
-
-#-------------------------------------------------------------------------------
-# printf utility
-#-------------------------------------------------------------------------------
-RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)')
-
-def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- def replace(m):
- assert m.group()
- g = m.group()[1:]
- if g.startswith('x'):
- return chr(int(g[1:], 16))
- if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g):
- # Yay, an octal number
- return chr(int(g, 8))
- return {
- 'a': '\a',
- 'b': '\b',
- 'f': '\f',
- 'n': '\n',
- 'r': '\r',
- 't': '\t',
- 'v': '\v',
- '\\': '\\',
- }.get(g)
-
- # Convert escape sequences
- format = re.sub(RE_UNESCAPE, replace, args[0])
- stdout.write(format % tuple(args[1:]))
- return 0
-
-#-------------------------------------------------------------------------------
-# true utility
-#-------------------------------------------------------------------------------
-def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- return 0
-
-#-------------------------------------------------------------------------------
-# sed utility
-#-------------------------------------------------------------------------------
-RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$')
-
-# cygwin sed fails with some expressions when they do not end with a single space.
-# see unit tests for details. Interestingly, the same expressions works perfectly
-# in cygwin shell.
-def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- # Scan pattern arguments and append a space if necessary
- for i in xrange(len(args)):
- if not RE_SED.search(args[i]):
- continue
- args[i] = args[i] + ' '
-
- return run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags)
-
-#-------------------------------------------------------------------------------
-# sleep utility
-#-------------------------------------------------------------------------------
-def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags):
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
- time.sleep(int(args[0]))
- return 0
-
-#-------------------------------------------------------------------------------
-# sort utility
-#-------------------------------------------------------------------------------
-OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files")
-
-def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags):
-
- def sort(path):
- if path == '-':
- lines = stdin.readlines()
- else:
- try:
- f = file(path)
- try:
- lines = f.readlines()
- finally:
- f.close()
- except IOError, e:
- stderr.write(str(e) + '\n')
- return 1
-
- if lines and lines[-1][-1]!='\n':
- lines[-1] = lines[-1] + '\n'
- return lines
-
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- option, args = OPT_SORT.parse_args(args)
- alllines = []
-
- if len(args)<=0:
- args += ['-']
-
- # Load all files lines
- curdir = os.getcwd()
- try:
- os.chdir(env['PWD'])
- for path in args:
- alllines += sort(path)
- finally:
- os.chdir(curdir)
-
- alllines.sort()
- for line in alllines:
- stdout.write(line)
- return 0
-
-#-------------------------------------------------------------------------------
-# hg utility
-#-------------------------------------------------------------------------------
-
-hgcommands = [
- 'add',
- 'addremove',
- 'commit', 'ci',
- 'debugrename',
- 'debugwalk',
- 'falabala', # Dummy command used in a mercurial test
- 'incoming',
- 'locate',
- 'pull',
- 'push',
- 'qinit',
- 'remove', 'rm',
- 'rename', 'mv',
- 'revert',
- 'showconfig',
- 'status', 'st',
- 'strip',
- ]
-
-def rewriteslashes(name, args):
- # Several hg commands output file paths, rewrite the separators
- if len(args) > 1 and name.lower().endswith('python') \
- and args[0].endswith('hg'):
- for cmd in hgcommands:
- if cmd in args[1:]:
- return True
-
- # svn output contains many paths with OS specific separators.
- # Normalize these to unix paths.
- base = os.path.basename(name)
- if base.startswith('svn'):
- return True
-
- return False
-
-def rewritehg(output):
- if not output:
- return output
- # Rewrite os specific messages
- output = output.replace(': The system cannot find the file specified',
- ': No such file or directory')
- output = re.sub(': Access is denied.*$', ': Permission denied', output)
- output = output.replace(': No connection could be made because the target machine actively refused it',
- ': Connection refused')
- return output
-
-
-def run_command(name, args, interp, env, stdin, stdout,
- stderr, debugflags):
- # Execute the command
- if 'debug-utility' in debugflags:
- print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n')
-
- hgbin = interp.options().hgbinary
- ishg = hgbin and ('hg' in name or args and 'hg' in args[0])
- unixoutput = 'cygwin' in name or ishg
-
- exec_env = env.get_variables()
- try:
- # BUG: comparing file descriptor is clearly not a reliable way to tell
- # whether they point on the same underlying object. But in pysh limited
- # scope this is usually right, we do not expect complicated redirections
- # besides usual 2>&1.
- # Still there is one case we have but cannot deal with is when stdout
- # and stderr are redirected *by pysh caller*. This the reason for the
- # --redirect pysh() option.
- # Now, we want to know they are the same because we sometimes need to
- # transform the command output, mostly remove CR-LF to ensure that
- # command output is unix-like. Cygwin utilies are a special case because
- # they explicitely set their output streams to binary mode, so we have
- # nothing to do. For all others commands, we have to guess whether they
- # are sending text data, in which case the transformation must be done.
- # Again, the NUL character test is unreliable but should be enough for
- # hg tests.
- redirected = stdout.fileno()==stderr.fileno()
- if not redirected:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- else:
- p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env,
- stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
- out, err = p.communicate()
- except WindowsError, e:
- raise UtilityError(str(e))
-
- if not unixoutput:
- def encode(s):
- if '\0' in s:
- return s
- return s.replace('\r\n', '\n')
- else:
- encode = lambda s: s
-
- if rewriteslashes(name, args):
- encode1_ = encode
- def encode(s):
- s = encode1_(s)
- s = s.replace('\\\\', '\\')
- s = s.replace('\\', '/')
- return s
-
- if ishg:
- encode2_ = encode
- def encode(s):
- return rewritehg(encode2_(s))
-
- stdout.write(encode(out))
- if not redirected:
- stderr.write(encode(err))
- return p.returncode
-
diff --git a/bitbake/lib/bb/pysh/interp.py b/bitbake/lib/bb/pysh/interp.py
deleted file mode 100644
index efe5181e1e..0000000000
--- a/bitbake/lib/bb/pysh/interp.py
+++ /dev/null
@@ -1,1367 +0,0 @@
-# interp.py - shell interpreter for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Implement the shell interpreter.
-
-Most references are made to "The Open Group Base Specifications Issue 6".
-<http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html>
-"""
-# TODO: document the fact input streams must implement fileno() so Popen will work correctly.
-# it requires non-stdin stream to be implemented as files. Still to be tested...
-# DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here.
-# TODO: stop command execution upon error.
-# TODO: sort out the filename/io_number mess. It should be possible to use filenames only.
-# TODO: review subshell implementation
-# TODO: test environment cloning for non-special builtins
-# TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost
-# TODO: unit test for variable assignment
-# TODO: test error management wrt error type/utility type
-# TODO: test for binary output everywhere
-# BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary.
-import base64
-import cPickle as pickle
-import errno
-import glob
-import os
-import re
-import subprocess
-import sys
-import tempfile
-
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
-
-import builtin
-from sherrors import *
-import pyshlex
-import pyshyacc
-
-def mappend(func, *args, **kargs):
- """Like map but assume func returns a list. Returned lists are merged into
- a single one.
- """
- return reduce(lambda a,b: a+b, map(func, *args, **kargs), [])
-
-class FileWrapper:
- """File object wrapper to ease debugging.
-
- Allow mode checking and implement file duplication through a simple
- reference counting scheme. Not sure the latter is really useful since
- only real file descriptors can be used.
- """
- def __init__(self, mode, file, close=True):
- if mode not in ('r', 'w', 'a'):
- raise IOError('invalid mode: %s' % mode)
- self._mode = mode
- self._close = close
- if isinstance(file, FileWrapper):
- if file._refcount[0] <= 0:
- raise IOError(0, 'Error')
- self._refcount = file._refcount
- self._refcount[0] += 1
- self._file = file._file
- else:
- self._refcount = [1]
- self._file = file
-
- def dup(self):
- return FileWrapper(self._mode, self, self._close)
-
- def fileno(self):
- """fileno() should be only necessary for input streams."""
- return self._file.fileno()
-
- def read(self, size=-1):
- if self._mode!='r':
- raise IOError(0, 'Error')
- return self._file.read(size)
-
- def readlines(self, *args, **kwargs):
- return self._file.readlines(*args, **kwargs)
-
- def write(self, s):
- if self._mode not in ('w', 'a'):
- raise IOError(0, 'Error')
- return self._file.write(s)
-
- def flush(self):
- self._file.flush()
-
- def close(self):
- if not self._refcount:
- return
- assert self._refcount[0] > 0
-
- self._refcount[0] -= 1
- if self._refcount[0] == 0:
- self._mode = 'c'
- if self._close:
- self._file.close()
- self._refcount = None
-
- def mode(self):
- return self._mode
-
- def __getattr__(self, name):
- if name == 'name':
- self.name = getattr(self._file, name)
- return self.name
- else:
- raise AttributeError(name)
-
- def __del__(self):
- self.close()
-
-
-def win32_open_devnull(mode):
- return open('NUL', mode)
-
-
-class Redirections:
- """Stores open files and their mapping to pseudo-sh file descriptor.
- """
- # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does
- # not make 1 to redirect to 4
- def __init__(self, stdin=None, stdout=None, stderr=None):
- self._descriptors = {}
- if stdin is not None:
- self._add_descriptor(0, stdin)
- if stdout is not None:
- self._add_descriptor(1, stdout)
- if stderr is not None:
- self._add_descriptor(2, stderr)
-
- def add_here_document(self, interp, name, content, io_number=None):
- if io_number is None:
- io_number = 0
-
- if name==pyshlex.unquote_wordtree(name):
- content = interp.expand_here_document(('TOKEN', content))
-
- # Write document content in a temporary file
- tmp = tempfile.TemporaryFile()
- try:
- tmp.write(content)
- tmp.flush()
- tmp.seek(0)
- self._add_descriptor(io_number, FileWrapper('r', tmp))
- except:
- tmp.close()
- raise
-
- def add(self, interp, op, filename, io_number=None):
- if op not in ('<', '>', '>|', '>>', '>&'):
- # TODO: add descriptor duplication and here_documents
- raise RedirectionError('Unsupported redirection operator "%s"' % op)
-
- if io_number is not None:
- io_number = int(io_number)
-
- if (op == '>&' and filename.isdigit()) or filename=='-':
- # No expansion for file descriptors, quote them if you want a filename
- fullname = filename
- else:
- if filename.startswith('/'):
- # TODO: win32 kludge
- if filename=='/dev/null':
- fullname = 'NUL'
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError()
- else:
- fullname = interp.expand_redirection(('TOKEN', filename))
- if not fullname:
- raise RedirectionError('%s: ambiguous redirect' % filename)
- # Build absolute path based on PWD
- fullname = os.path.join(interp.get_env()['PWD'], fullname)
-
- if op=='<':
- return self._add_input_redirection(interp, fullname, io_number)
- elif op in ('>', '>|'):
- clobber = ('>|'==op)
- return self._add_output_redirection(interp, fullname, io_number, clobber)
- elif op=='>>':
- return self._add_output_appending(interp, fullname, io_number)
- elif op=='>&':
- return self._dup_output_descriptor(fullname, io_number)
-
- def close(self):
- if self._descriptors is not None:
- for desc in self._descriptors.itervalues():
- desc.flush()
- desc.close()
- self._descriptors = None
-
- def stdin(self):
- return self._descriptors[0]
-
- def stdout(self):
- return self._descriptors[1]
-
- def stderr(self):
- return self._descriptors[2]
-
- def clone(self):
- clone = Redirections()
- for desc, fileobj in self._descriptors.iteritems():
- clone._descriptors[desc] = fileobj.dup()
- return clone
-
- def _add_output_redirection(self, interp, filename, io_number, clobber):
- if io_number is None:
- # io_number default to standard output
- io_number = 1
-
- if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename):
- # File already exist in no-clobber mode, bail out
- raise RedirectionError('File "%s" already exists' % filename)
-
- # Open and register
- self._add_file_descriptor(io_number, filename, 'w')
-
- def _add_output_appending(self, interp, filename, io_number):
- if io_number is None:
- io_number = 1
- self._add_file_descriptor(io_number, filename, 'a')
-
- def _add_input_redirection(self, interp, filename, io_number):
- if io_number is None:
- io_number = 0
- self._add_file_descriptor(io_number, filename, 'r')
-
- def _add_file_descriptor(self, io_number, filename, mode):
- try:
- if filename.startswith('/'):
- if filename=='/dev/null':
- f = win32_open_devnull(mode+'b')
- else:
- # TODO: handle absolute pathnames, they are unlikely to exist on the
- # current platform (win32 for instance).
- raise NotImplementedError('cannot open absolute path %s' % repr(filename))
- else:
- f = file(filename, mode+'b')
- except IOError, e:
- raise RedirectionError(str(e))
-
- wrapper = None
- try:
- wrapper = FileWrapper(mode, f)
- f = None
- self._add_descriptor(io_number, wrapper)
- except:
- if f: f.close()
- if wrapper: wrapper.close()
- raise
-
- def _dup_output_descriptor(self, source_fd, dest_fd):
- if source_fd is None:
- source_fd = 1
- self._dup_file_descriptor(source_fd, dest_fd, 'w')
-
- def _dup_file_descriptor(self, source_fd, dest_fd, mode):
- source_fd = int(source_fd)
- if source_fd not in self._descriptors:
- raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd))
- source = self._descriptors[source_fd]
-
- if source.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode))
-
- if dest_fd=='-':
- # Close the source descriptor
- del self._descriptors[source_fd]
- source.close()
- else:
- dest_fd = int(dest_fd)
- if dest_fd not in self._descriptors:
- raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd))
-
- dest = self._descriptors[dest_fd]
- if dest.mode()!=mode:
- raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode))
-
- self._descriptors[dest_fd] = source.dup()
- dest.close()
-
- def _add_descriptor(self, io_number, file):
- io_number = int(io_number)
-
- if io_number in self._descriptors:
- # Close the current descriptor
- d = self._descriptors[io_number]
- del self._descriptors[io_number]
- d.close()
-
- self._descriptors[io_number] = file
-
- def __str__(self):
- names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v
- in self._descriptors.iteritems()]
- names = ','.join(names)
- return 'Redirections(%s)' % names
-
- def __del__(self):
- self.close()
-
-def cygwin_to_windows_path(path):
- """Turn /cygdrive/c/foo into c:/foo, or return path if it
- is not a cygwin path.
- """
- if not path.startswith('/cygdrive/'):
- return path
- path = path[len('/cygdrive/'):]
- path = path[:1] + ':' + path[1:]
- return path
-
-def win32_to_unix_path(path):
- if path is not None:
- path = path.replace('\\', '/')
- return path
-
-_RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?')
-_SHEBANG_CMDS = {
- '/usr/bin/env': 'env',
- '/bin/sh': 'pysh',
- 'python': 'python',
-}
-
-def resolve_shebang(path, ignoreshell=False):
- """Return a list of arguments as shebang interpreter call or an empty list
- if path does not refer to an executable script.
- See <http://www.opengroup.org/austin/docs/austin_51r2.txt>.
-
- ignoreshell - set to True to ignore sh shebangs. Return an empty list instead.
- """
- try:
- f = file(path)
- try:
- # At most 80 characters in the first line
- header = f.read(80).splitlines()[0]
- finally:
- f.close()
-
- m = _RE_SHEBANG.search(header)
- if not m:
- return []
- cmd, arg = m.group(1,2)
- if os.path.isfile(cmd):
- # Keep this one, the hg script for instance contains a weird windows
- # shebang referencing the current python install.
- cmdfile = os.path.basename(cmd).lower()
- if cmdfile == 'python.exe':
- cmd = 'python'
- pass
- elif cmd not in _SHEBANG_CMDS:
- raise CommandNotFound('Unknown interpreter "%s" referenced in '\
- 'shebang' % header)
- cmd = _SHEBANG_CMDS.get(cmd)
- if cmd is None or (ignoreshell and cmd == 'pysh'):
- return []
- if arg is None:
- return [cmd, win32_to_unix_path(path)]
- return [cmd, arg, win32_to_unix_path(path)]
- except IOError, e:
- if e.errno!=errno.ENOENT and \
- (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM
- raise
- return []
-
-def win32_find_in_path(name, path):
- if isinstance(path, str):
- path = path.split(os.pathsep)
-
- exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep)
- for p in path:
- p_name = os.path.join(p, name)
-
- prefix = resolve_shebang(p_name)
- if prefix:
- return prefix
-
- for ext in exts:
- p_name_ext = p_name + ext
- if os.path.exists(p_name_ext):
- return [win32_to_unix_path(p_name_ext)]
- return []
-
-class Traps(dict):
- def __setitem__(self, key, value):
- if key not in ('EXIT',):
- raise NotImplementedError()
- super(Traps, self).__setitem__(key, value)
-
-# IFS white spaces character class
-_IFS_WHITESPACES = (' ', '\t', '\n')
-
-class Environment:
- """Environment holds environment variables, export table, function
- definitions and whatever is defined in 2.12 "Shell Execution Environment",
- redirection excepted.
- """
- def __init__(self, pwd):
- self._opt = set() #Shell options
-
- self._functions = {}
- self._env = {'?': '0', '#': '0'}
- self._exported = set([
- 'HOME', 'IFS', 'PATH'
- ])
-
- # Set environment vars with side-effects
- self._ifs_ws = None # Set of IFS whitespace characters
- self._ifs_re = None # Regular expression used to split between words using IFS classes
- self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values
- self['PWD'] = pwd
- self.traps = Traps()
-
- def clone(self, subshell=False):
- env = Environment(self['PWD'])
- env._opt = set(self._opt)
- for k,v in self.get_variables().iteritems():
- if k in self._exported:
- env.export(k,v)
- elif subshell:
- env[k] = v
-
- if subshell:
- env._functions = dict(self._functions)
-
- return env
-
- def __getitem__(self, key):
- if key in ('@', '*', '-', '$'):
- raise NotImplementedError('%s is not implemented' % repr(key))
- return self._env[key]
-
- def get(self, key, defval=None):
- try:
- return self[key]
- except KeyError:
- return defval
-
- def __setitem__(self, key, value):
- if key=='IFS':
- # Update the whitespace/non-whitespace classes
- self._update_ifs(value)
- elif key=='PWD':
- pwd = os.path.abspath(value)
- if not os.path.isdir(pwd):
- raise VarAssignmentError('Invalid directory %s' % value)
- value = pwd
- elif key in ('?', '!'):
- value = str(int(value))
- self._env[key] = value
-
- def __delitem__(self, key):
- if key in ('IFS', 'PWD', '?'):
- raise VarAssignmentError('%s cannot be unset' % key)
- del self._env[key]
-
- def __contains__(self, item):
- return item in self._env
-
- def set_positional_args(self, args):
- """Set the content of 'args' as positional argument from 1 to len(args).
- Return previous argument as a list of strings.
- """
- # Save and remove previous arguments
- prevargs = []
- for i in xrange(int(self._env['#'])):
- i = str(i+1)
- prevargs.append(self._env[i])
- del self._env[i]
- self._env['#'] = '0'
-
- #Set new ones
- for i,arg in enumerate(args):
- self._env[str(i+1)] = str(arg)
- self._env['#'] = str(len(args))
-
- return prevargs
-
- def get_positional_args(self):
- return [self._env[str(i+1)] for i in xrange(int(self._env['#']))]
-
- def get_variables(self):
- return dict(self._env)
-
- def export(self, key, value=None):
- if value is not None:
- self[key] = value
- self._exported.add(key)
-
- def get_exported(self):
- return [(k,self._env.get(k)) for k in self._exported]
-
- def split_fields(self, word):
- if not self._ifs_ws or not word:
- return [word]
- return re.split(self._ifs_re, word)
-
- def _update_ifs(self, value):
- """Update the split_fields related variables when IFS character set is
- changed.
- """
- # TODO: handle NULL IFS
-
- # Separate characters in whitespace and non-whitespace
- chars = set(value)
- ws = [c for c in chars if c in _IFS_WHITESPACES]
- nws = [c for c in chars if c not in _IFS_WHITESPACES]
-
- # Keep whitespaces in a string for left and right stripping
- self._ifs_ws = ''.join(ws)
-
- # Build a regexp to split fields
- trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']'
- if nws:
- # First, the single non-whitespace occurence.
- nws = '[' + ''.join([re.escape(c) for c in nws]) + ']'
- nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)'
- else:
- # Then mix all parts with quantifiers
- nws = trailing + '+'
- self._ifs_re = re.compile(nws)
-
- def has_opt(self, opt, val=None):
- return (opt, val) in self._opt
-
- def set_opt(self, opt, val=None):
- self._opt.add((opt, val))
-
- def find_in_path(self, name, pwd=False):
- path = self._env.get('PATH', '').split(os.pathsep)
- if pwd:
- path[:0] = [self['PWD']]
- if os.name == 'nt':
- return win32_find_in_path(name, self._env.get('PATH', ''))
- else:
- raise NotImplementedError()
-
- def define_function(self, name, body):
- if not is_name(name):
- raise ShellSyntaxError('%s is not a valid function name' % repr(name))
- self._functions[name] = body
-
- def remove_function(self, name):
- del self._functions[name]
-
- def is_function(self, name):
- return name in self._functions
-
- def get_function(self, name):
- return self._functions.get(name)
-
-
-name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
-name_charset = dict(zip(name_charset,name_charset))
-
-def match_name(s):
- """Return the length in characters of the longest prefix made of name
- allowed characters in s.
- """
- for i,c in enumerate(s):
- if c not in name_charset:
- return s[:i]
- return s
-
-def is_name(s):
- return len([c for c in s if c not in name_charset])<=0
-
-def is_special_param(c):
- return len(c)==1 and c in ('@','*','#','?','-','$','!','0')
-
-def utility_not_implemented(name, *args, **kwargs):
- raise NotImplementedError('%s utility is not implemented' % name)
-
-
-class Utility:
- """Define utilities properties:
- func -- utility callable. See builtin module for utility samples.
- is_special -- see XCU 2.8.
- """
- def __init__(self, func, is_special=0):
- self.func = func
- self.is_special = bool(is_special)
-
-
-def encodeargs(args):
- def encodearg(s):
- lines = base64.encodestring(s)
- lines = [l.splitlines()[0] for l in lines]
- return ''.join(lines)
-
- s = pickle.dumps(args)
- return encodearg(s)
-
-def decodeargs(s):
- s = base64.decodestring(s)
- return pickle.loads(s)
-
-
-class GlobError(Exception):
- pass
-
-class Options:
- def __init__(self):
- # True if Mercurial operates with binary streams
- self.hgbinary = True
-
-class Interpreter:
- # Implementation is very basic: the execute() method just makes a DFS on the
- # AST and execute nodes one by one. Nodes are tuple (name,obj) where name
- # is a string identifier and obj the AST element returned by the parser.
- #
- # Handler are named after the node identifiers.
- # TODO: check node names and remove the switch in execute with some
- # dynamic getattr() call to find node handlers.
- """Shell interpreter.
-
- The following debugging flags can be passed:
- debug-parsing - enable PLY debugging.
- debug-tree - print the generated AST.
- debug-cmd - trace command execution before word expansion, plus exit status.
- debug-utility - trace utility execution.
- """
-
- # List supported commands.
- COMMANDS = {
- 'cat': Utility(builtin.utility_cat,),
- 'cd': Utility(builtin.utility_cd,),
- ':': Utility(builtin.utility_colon,),
- 'echo': Utility(builtin.utility_echo),
- 'env': Utility(builtin.utility_env),
- 'exit': Utility(builtin.utility_exit),
- 'export': Utility(builtin.builtin_export, is_special=1),
- 'egrep': Utility(builtin.utility_egrep),
- 'fgrep': Utility(builtin.utility_fgrep),
- 'gunzip': Utility(builtin.utility_gunzip),
- 'kill': Utility(builtin.utility_kill),
- 'mkdir': Utility(builtin.utility_mkdir),
- 'netstat': Utility(builtin.utility_netstat),
- 'printf': Utility(builtin.utility_printf),
- 'pwd': Utility(builtin.utility_pwd),
- 'return': Utility(builtin.builtin_return, is_special=1),
- 'sed': Utility(builtin.utility_sed,),
- 'set': Utility(builtin.builtin_set,),
- 'shift': Utility(builtin.builtin_shift,),
- 'sleep': Utility(builtin.utility_sleep,),
- 'sort': Utility(builtin.utility_sort,),
- 'trap': Utility(builtin.builtin_trap, is_special=1),
- 'true': Utility(builtin.utility_true),
- 'unset': Utility(builtin.builtin_unset, is_special=1),
- 'wait': Utility(builtin.builtin_wait, is_special=1),
- }
-
- def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None,
- stdout=None, stderr=None, opts=Options()):
- self._env = env
- if self._env is None:
- self._env = Environment(pwd)
- self._children = {}
-
- self._redirs = redirs
- self._close_redirs = False
-
- if self._redirs is None:
- if stdin is None:
- stdin = sys.stdin
- if stdout is None:
- stdout = sys.stdout
- if stderr is None:
- stderr = sys.stderr
- stdin = FileWrapper('r', stdin, False)
- stdout = FileWrapper('w', stdout, False)
- stderr = FileWrapper('w', stderr, False)
- self._redirs = Redirections(stdin, stdout, stderr)
- self._close_redirs = True
-
- self._debugflags = list(debugflags)
- self._logfile = sys.stderr
- self._options = opts
-
- def close(self):
- """Must be called when the interpreter is no longer used."""
- script = self._env.traps.get('EXIT')
- if script:
- try:
- self.execute_script(script=script)
- except:
- pass
-
- if self._redirs is not None and self._close_redirs:
- self._redirs.close()
- self._redirs = None
-
- def log(self, s):
- self._logfile.write(s)
- self._logfile.flush()
-
- def __getitem__(self, key):
- return self._env[key]
-
- def __setitem__(self, key, value):
- self._env[key] = value
-
- def options(self):
- return self._options
-
- def redirect(self, redirs, ios):
- def add_redir(io):
- if isinstance(io, pyshyacc.IORedirect):
- redirs.add(self, io.op, io.filename, io.io_number)
- else:
- redirs.add_here_document(self, io.name, io.content, io.io_number)
-
- map(add_redir, ios)
- return redirs
-
- def execute_script(self, script=None, ast=None, sourced=False,
- scriptpath=None):
- """If script is not None, parse the input. Otherwise takes the supplied
- AST. Then execute the AST.
- Return the script exit status.
- """
- try:
- if scriptpath is not None:
- self._env['0'] = os.path.abspath(scriptpath)
-
- if script is not None:
- debug_parsing = ('debug-parsing' in self._debugflags)
- cmds, script = pyshyacc.parse(script, True, debug_parsing)
- if 'debug-tree' in self._debugflags:
- pyshyacc.print_commands(cmds, self._logfile)
- self._logfile.flush()
- else:
- cmds, script = ast, ''
-
- status = 0
- for cmd in cmds:
- try:
- status = self.execute(cmd)
- except ExitSignal, e:
- if sourced:
- raise
- status = int(e.args[0])
- return status
- except ShellError:
- self._env['?'] = 1
- raise
- if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags:
- self.log('returncode ' + str(status)+ '\n')
- return status
- except CommandNotFound, e:
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
- except RedirectionError, e:
- # TODO: should be handled depending on the utility status
- print >>self._redirs.stderr, str(e)
- self._redirs.stderr.flush()
- # Command not found by non-interactive shell
- # return 127
- raise
-
- def dotcommand(self, env, args):
- if len(args) < 1:
- raise ShellError('. expects at least one argument')
- path = args[0]
- if '/' not in path:
- found = env.find_in_path(args[0], True)
- if found:
- path = found[0]
- script = file(path).read()
- return self.execute_script(script=script, sourced=True)
-
- def execute(self, token, redirs=None):
- """Execute and AST subtree with supplied redirections overriding default
- interpreter ones.
- Return the exit status.
- """
- if not token:
- return 0
-
- if redirs is None:
- redirs = self._redirs
-
- if isinstance(token, list):
- # Commands sequence
- res = 0
- for t in token:
- res = self.execute(t, redirs)
- return res
-
- type, value = token
- status = 0
- if type=='simple_command':
- redirs_copy = redirs.clone()
- try:
- # TODO: define and handle command return values
- # TODO: implement set -e
- status = self._execute_simple_command(value, redirs_copy)
- finally:
- redirs_copy.close()
- elif type=='pipeline':
- status = self._execute_pipeline(value, redirs)
- elif type=='and_or':
- status = self._execute_and_or(value, redirs)
- elif type=='for_clause':
- status = self._execute_for_clause(value, redirs)
- elif type=='while_clause':
- status = self._execute_while_clause(value, redirs)
- elif type=='function_definition':
- status = self._execute_function_definition(value, redirs)
- elif type=='brace_group':
- status = self._execute_brace_group(value, redirs)
- elif type=='if_clause':
- status = self._execute_if_clause(value, redirs)
- elif type=='subshell':
- status = self.subshell(ast=value.cmds, redirs=redirs)
- elif type=='async':
- status = self._asynclist(value)
- elif type=='redirect_list':
- redirs_copy = self.redirect(redirs.clone(), value.redirs)
- try:
- status = self.execute(value.cmd, redirs_copy)
- finally:
- redirs_copy.close()
- else:
- raise NotImplementedError('Unsupported token type ' + type)
-
- if status < 0:
- status = 255
- return status
-
- def _execute_if_clause(self, if_clause, redirs):
- cond_status = self.execute(if_clause.cond, redirs)
- if cond_status==0:
- return self.execute(if_clause.if_cmds, redirs)
- else:
- return self.execute(if_clause.else_cmds, redirs)
-
- def _execute_brace_group(self, group, redirs):
- status = 0
- for cmd in group.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_function_definition(self, fundef, redirs):
- self._env.define_function(fundef.name, fundef.body)
- return 0
-
- def _execute_while_clause(self, while_clause, redirs):
- status = 0
- while 1:
- cond_status = 0
- for cond in while_clause.condition:
- cond_status = self.execute(cond, redirs)
-
- if cond_status:
- break
-
- for cmd in while_clause.cmds:
- status = self.execute(cmd, redirs)
-
- return status
-
- def _execute_for_clause(self, for_clause, redirs):
- if not is_name(for_clause.name):
- raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name))
- items = mappend(self.expand_token, for_clause.items)
-
- status = 0
- for item in items:
- self._env[for_clause.name] = item
- for cmd in for_clause.cmds:
- status = self.execute(cmd, redirs)
- return status
-
- def _execute_and_or(self, or_and, redirs):
- res = self.execute(or_and.left, redirs)
- if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0):
- res = self.execute(or_and.right, redirs)
- return res
-
- def _execute_pipeline(self, pipeline, redirs):
- if len(pipeline.commands)==1:
- status = self.execute(pipeline.commands[0], redirs)
- else:
- # Execute all commands one after the other
- status = 0
- inpath, outpath = None, None
- try:
- # Commands inputs and outputs cannot really be plugged as done
- # by a real shell. Run commands sequentially and chain their
- # input/output throught temporary files.
- tmpfd, inpath = tempfile.mkstemp()
- os.close(tmpfd)
- tmpfd, outpath = tempfile.mkstemp()
- os.close(tmpfd)
-
- inpath = win32_to_unix_path(inpath)
- outpath = win32_to_unix_path(outpath)
-
- for i, cmd in enumerate(pipeline.commands):
- call_redirs = redirs.clone()
- try:
- if i!=0:
- call_redirs.add(self, '<', inpath)
- if i!=len(pipeline.commands)-1:
- call_redirs.add(self, '>', outpath)
-
- status = self.execute(cmd, call_redirs)
-
- # Chain inputs/outputs
- inpath, outpath = outpath, inpath
- finally:
- call_redirs.close()
- finally:
- if inpath: os.remove(inpath)
- if outpath: os.remove(outpath)
-
- if pipeline.reverse_status:
- status = int(not status)
- self._env['?'] = status
- return status
-
- def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others):
- assert interp is self
-
- func = env.get_function(name)
- #Set positional parameters
- prevargs = None
- try:
- prevargs = env.set_positional_args(args)
- try:
- redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup())
- try:
- status = self.execute(func, redirs)
- finally:
- redirs.close()
- except ReturnSignal, e:
- status = int(e.args[0])
- env['?'] = status
- return status
- finally:
- #Reset positional parameters
- if prevargs is not None:
- env.set_positional_args(prevargs)
-
- def _execute_simple_command(self, token, redirs):
- """Can raise ReturnSignal when return builtin is called, ExitSignal when
- exit is called, and other shell exceptions upon builtin failures.
- """
- debug_command = 'debug-cmd' in self._debugflags
- if debug_command:
- self.log('word' + repr(token.words) + '\n')
- self.log('assigns' + repr(token.assigns) + '\n')
- self.log('redirs' + repr(token.redirs) + '\n')
-
- is_special = None
- env = self._env
-
- try:
- # Word expansion
- args = []
- for word in token.words:
- args += self.expand_token(word)
- if is_special is None and args:
- is_special = env.is_function(args[0]) or \
- (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special)
-
- if debug_command:
- self.log('_execute_simple_command' + str(args) + '\n')
-
- if not args:
- # Redirections happen is a subshell
- redirs = redirs.clone()
- elif not is_special:
- env = self._env.clone()
-
- # Redirections
- self.redirect(redirs, token.redirs)
-
- # Variables assignments
- res = 0
- for type,(k,v) in token.assigns:
- status, expanded = self.expand_variable((k,v))
- if status is not None:
- res = status
- if args:
- env.export(k, expanded)
- else:
- env[k] = expanded
-
- if args and args[0] in ('.', 'source'):
- res = self.dotcommand(env, args[1:])
- elif args:
- if args[0] in self.COMMANDS:
- command = self.COMMANDS[args[0]]
- elif env.is_function(args[0]):
- command = Utility(self._execute_function, is_special=True)
- else:
- if not '/' in args[0].replace('\\', '/'):
- cmd = env.find_in_path(args[0])
- if not cmd:
- # TODO: test error code on unknown command => 127
- raise CommandNotFound('Unknown command: "%s"' % args[0])
- else:
- # Handle commands like '/cygdrive/c/foo.bat'
- cmd = cygwin_to_windows_path(args[0])
- if not os.path.exists(cmd):
- raise CommandNotFound('%s: No such file or directory' % args[0])
- shebang = resolve_shebang(cmd)
- if shebang:
- cmd = shebang
- else:
- cmd = [cmd]
- args[0:1] = cmd
- command = Utility(builtin.run_command)
-
- # Command execution
- if 'debug-cmd' in self._debugflags:
- self.log('redirections ' + str(redirs) + '\n')
-
- res = command.func(args[0], args[1:], self, env,
- redirs.stdin(), redirs.stdout(),
- redirs.stderr(), self._debugflags)
-
- if self._env.has_opt('-x'):
- # Trace command execution in shell environment
- # BUG: would be hard to reproduce a real shell behaviour since
- # the AST is not annotated with source lines/tokens.
- self._redirs.stdout().write(' '.join(args))
-
- except ReturnSignal:
- raise
- except ShellError, e:
- if is_special or isinstance(e, (ExitSignal,
- ShellSyntaxError, ExpansionError)):
- raise e
- self._redirs.stderr().write(str(e)+'\n')
- return 1
-
- return res
-
- def expand_token(self, word):
- """Expand a word as specified in [2.6 Word Expansions]. Return the list
- of expanded words.
- """
- status, wtrees = self._expand_word(word)
- return map(pyshlex.wordtree_as_string, wtrees)
-
- def expand_variable(self, word):
- """Return a status code (or None if no command expansion occurred)
- and a single word.
- """
- status, wtrees = self._expand_word(word, pathname=False, split=False)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return status, words[0]
-
- def expand_here_document(self, word):
- """Return the expanded document as a single word. The here document is
- assumed to be unquoted.
- """
- status, wtrees = self._expand_word(word, pathname=False,
- split=False, here_document=True)
- words = map(pyshlex.wordtree_as_string, wtrees)
- assert len(words)==1
- return words[0]
-
- def expand_redirection(self, word):
- """Return a single word."""
- return self.expand_variable(word)[1]
-
- def get_env(self):
- return self._env
-
- def _expand_word(self, token, pathname=True, split=True, here_document=False):
- wtree = pyshlex.make_wordtree(token[1], here_document=here_document)
-
- # TODO: implement tilde expansion
- def expand(wtree):
- """Return a pseudo wordtree: the tree or its subelements can be empty
- lists when no value result from the expansion.
- """
- status = None
- for part in wtree:
- if not isinstance(part, list):
- continue
- if part[0]in ("'", '\\'):
- continue
- elif part[0] in ('`', '$('):
- status, result = self._expand_command(part)
- part[:] = result
- elif part[0] in ('$', '${'):
- part[:] = self._expand_parameter(part, wtree[0]=='"', split)
- elif part[0] in ('', '"'):
- status, result = expand(part)
- part[:] = result
- else:
- raise NotImplementedError('%s expansion is not implemented'
- % part[0])
- # [] is returned when an expansion result in no-field,
- # like an empty $@
- wtree = [p for p in wtree if p != []]
- if len(wtree) < 3:
- return status, []
- return status, wtree
-
- status, wtree = expand(wtree)
- if len(wtree) == 0:
- return status, wtree
- wtree = pyshlex.normalize_wordtree(wtree)
-
- if split:
- wtrees = self._split_fields(wtree)
- else:
- wtrees = [wtree]
-
- if pathname:
- wtrees = mappend(self._expand_pathname, wtrees)
-
- wtrees = map(self._remove_quotes, wtrees)
- return status, wtrees
-
- def _expand_command(self, wtree):
- # BUG: there is something to do with backslashes and quoted
- # characters here
- command = pyshlex.wordtree_as_string(wtree[1:-1])
- status, output = self.subshell_output(command)
- return status, ['', output, '']
-
- def _expand_parameter(self, wtree, quoted=False, split=False):
- """Return a valid wtree or an empty list when no parameter results."""
- # Get the parameter name
- # TODO: implement weird expansion rules with ':'
- name = pyshlex.wordtree_as_string(wtree[1:-1])
- if not is_name(name) and not is_special_param(name):
- raise ExpansionError('Bad substitution "%s"' % name)
- # TODO: implement special parameters
- if name in ('@', '*'):
- args = self._env.get_positional_args()
- if len(args) == 0:
- return []
- if len(args)<2:
- return ['', ''.join(args), '']
-
- sep = self._env.get('IFS', '')[:1]
- if split and quoted and name=='@':
- # Introduce a new token to tell the caller that these parameters
- # cause a split as specified in 2.5.2
- return ['@'] + args + ['']
- else:
- return ['', sep.join(args), '']
-
- return ['', self._env.get(name, ''), '']
-
- def _split_fields(self, wtree):
- def is_empty(split):
- return split==['', '', '']
-
- def split_positional(quoted):
- # Return a list of wtree split according positional parameters rules.
- # All remaining '@' groups are removed.
- assert quoted[0]=='"'
-
- splits = [[]]
- for part in quoted:
- if not isinstance(part, list) or part[0]!='@':
- splits[-1].append(part)
- else:
- # Empty or single argument list were dealt with already
- assert len(part)>3
- # First argument must join with the beginning part of the original word
- splits[-1].append(part[1])
- # Create double-quotes expressions for every argument after the first
- for arg in part[2:-1]:
- splits[-1].append('"')
- splits.append(['"', arg])
- return splits
-
- # At this point, all expansions but pathnames have occured. Only quoted
- # and positional sequences remain. Thus, all candidates for field splitting
- # are in the tree root, or are positional splits ('@') and lie in root
- # children.
- if not wtree or wtree[0] not in ('', '"'):
- # The whole token is quoted or empty, nothing to split
- return [wtree]
-
- if wtree[0]=='"':
- wtree = ['', wtree, '']
-
- result = [['', '']]
- for part in wtree[1:-1]:
- if isinstance(part, list):
- if part[0]=='"':
- splits = split_positional(part)
- if len(splits)<=1:
- result[-1] += [part, '']
- else:
- # Terminate the current split
- result[-1] += [splits[0], '']
- result += splits[1:-1]
- # Create a new split
- result += [['', splits[-1], '']]
- else:
- result[-1] += [part, '']
- else:
- splits = self._env.split_fields(part)
- if len(splits)<=1:
- # No split
- result[-1][-1] += part
- else:
- # Terminate the current resulting part and create a new one
- result[-1][-1] += splits[0]
- result[-1].append('')
- result += [['', r, ''] for r in splits[1:-1]]
- result += [['', splits[-1]]]
- result[-1].append('')
-
- # Leading and trailing empty groups come from leading/trailing blanks
- if result and is_empty(result[-1]):
- result[-1:] = []
- if result and is_empty(result[0]):
- result[:1] = []
- return result
-
- def _expand_pathname(self, wtree):
- """See [2.6.6 Pathname Expansion]."""
- if self._env.has_opt('-f'):
- return [wtree]
-
- # All expansions have been performed, only quoted sequences should remain
- # in the tree. Generate the pattern by folding the tree, escaping special
- # characters when appear quoted
- special_chars = '*?[]'
-
- def make_pattern(wtree):
- subpattern = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = make_pattern(part)
- elif wtree[0]!='':
- for c in part:
- # Meta-characters cannot be quoted
- if c in special_chars:
- raise GlobError()
- subpattern.append(part)
- return ''.join(subpattern)
-
- def pwd_glob(pattern):
- cwd = os.getcwd()
- os.chdir(self._env['PWD'])
- try:
- return glob.glob(pattern)
- finally:
- os.chdir(cwd)
-
- #TODO: check working directory issues here wrt relative patterns
- try:
- pattern = make_pattern(wtree)
- paths = pwd_glob(pattern)
- except GlobError:
- # BUG: Meta-characters were found in quoted sequences. The should
- # have been used literally but this is unsupported in current glob module.
- # Instead we consider the whole tree must be used literally and
- # therefore there is no point in globbing. This is wrong when meta
- # characters are mixed with quoted meta in the same pattern like:
- # < foo*"py*" >
- paths = []
-
- if not paths:
- return [wtree]
- return [['', path, ''] for path in paths]
-
- def _remove_quotes(self, wtree):
- """See [2.6.7 Quote Removal]."""
-
- def unquote(wtree):
- unquoted = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = unquote(part)
- unquoted.append(part)
- return ''.join(unquoted)
-
- return ['', unquote(wtree), '']
-
- def subshell(self, script=None, ast=None, redirs=None):
- """Execute the script or AST in a subshell, with inherited redirections
- if redirs is not None.
- """
- if redirs:
- sub_redirs = redirs
- else:
- sub_redirs = redirs.clone()
-
- subshell = None
- try:
- subshell = Interpreter(None, self._debugflags, self._env.clone(True),
- sub_redirs, opts=self._options)
- return subshell.execute_script(script, ast)
- finally:
- if not redirs: sub_redirs.close()
- if subshell: subshell.close()
-
- def subshell_output(self, script):
- """Execute the script in a subshell and return the captured output."""
- # Create temporary file to capture subshell output
- tmpfd, tmppath = tempfile.mkstemp()
- try:
- tmpfile = os.fdopen(tmpfd, 'wb')
- stdout = FileWrapper('w', tmpfile)
-
- redirs = Redirections(self._redirs.stdin().dup(),
- stdout,
- self._redirs.stderr().dup())
- try:
- status = self.subshell(script=script, redirs=redirs)
- finally:
- redirs.close()
- redirs = None
-
- # Extract subshell standard output
- tmpfile = open(tmppath, 'rb')
- try:
- output = tmpfile.read()
- return status, output.rstrip('\n')
- finally:
- tmpfile.close()
- finally:
- os.remove(tmppath)
-
- def _asynclist(self, cmd):
- args = (self._env.get_variables(), cmd)
- arg = encodeargs(args)
- assert len(args) < 30*1024
- cmd = ['pysh.bat', '--ast', '-c', arg]
- p = subprocess.Popen(cmd, cwd=self._env['PWD'])
- self._children[p.pid] = p
- self._env['!'] = p.pid
- return 0
-
- def wait(self, pids=None):
- if not pids:
- pids = self._children.keys()
-
- status = 127
- for pid in pids:
- if pid not in self._children:
- continue
- p = self._children.pop(pid)
- status = p.wait()
-
- return status
-
diff --git a/bitbake/lib/bb/pysh/lsprof.py b/bitbake/lib/bb/pysh/lsprof.py
deleted file mode 100644
index b1831c22a7..0000000000
--- a/bitbake/lib/bb/pysh/lsprof.py
+++ /dev/null
@@ -1,116 +0,0 @@
-#! /usr/bin/env python
-
-import sys
-from _lsprof import Profiler, profiler_entry
-
-__all__ = ['profile', 'Stats']
-
-def profile(f, *args, **kwds):
- """XXX docstring"""
- p = Profiler()
- p.enable(subcalls=True, builtins=True)
- try:
- f(*args, **kwds)
- finally:
- p.disable()
- return Stats(p.getstats())
-
-
-class Stats(object):
- """XXX docstring"""
-
- def __init__(self, data):
- self.data = data
-
- def sort(self, crit="inlinetime"):
- """XXX docstring"""
- if crit not in profiler_entry.__dict__:
- raise ValueError("Can't sort by %s" % crit)
- self.data.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
- for e in self.data:
- if e.calls:
- e.calls.sort(lambda b, a: cmp(getattr(a, crit),
- getattr(b, crit)))
-
- def pprint(self, top=None, file=None, limit=None, climit=None):
- """XXX docstring"""
- if file is None:
- file = sys.stdout
- d = self.data
- if top is not None:
- d = d[:top]
- cols = "% 12s %12s %11.4f %11.4f %s\n"
- hcols = "% 12s %12s %12s %12s %s\n"
- cols2 = "+%12s %12s %11.4f %11.4f + %s\n"
- file.write(hcols % ("CallCount", "Recursive", "Total(ms)",
- "Inline(ms)", "module:lineno(function)"))
- count = 0
- for e in d:
- file.write(cols % (e.callcount, e.reccallcount, e.totaltime,
- e.inlinetime, label(e.code)))
- count += 1
- if limit is not None and count == limit:
- return
- ccount = 0
- if e.calls:
- for se in e.calls:
- file.write(cols % ("+%s" % se.callcount, se.reccallcount,
- se.totaltime, se.inlinetime,
- "+%s" % label(se.code)))
- count += 1
- ccount += 1
- if limit is not None and count == limit:
- return
- if climit is not None and ccount == climit:
- break
-
- def freeze(self):
- """Replace all references to code objects with string
- descriptions; this makes it possible to pickle the instance."""
-
- # this code is probably rather ickier than it needs to be!
- for i in range(len(self.data)):
- e = self.data[i]
- if not isinstance(e.code, str):
- self.data[i] = type(e)((label(e.code),) + e[1:])
- if e.calls:
- for j in range(len(e.calls)):
- se = e.calls[j]
- if not isinstance(se.code, str):
- e.calls[j] = type(se)((label(se.code),) + se[1:])
-
-_fn2mod = {}
-
-def label(code):
- if isinstance(code, str):
- return code
- try:
- mname = _fn2mod[code.co_filename]
- except KeyError:
- for k, v in sys.modules.items():
- if v is None:
- continue
- if not hasattr(v, '__file__'):
- continue
- if not isinstance(v.__file__, str):
- continue
- if v.__file__.startswith(code.co_filename):
- mname = _fn2mod[code.co_filename] = k
- break
- else:
- mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename
-
- return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name)
-
-
-if __name__ == '__main__':
- import os
- sys.argv = sys.argv[1:]
- if not sys.argv:
- print >> sys.stderr, "usage: lsprof.py <script> <arguments...>"
- sys.exit(2)
- sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0])))
- stats = profile(execfile, sys.argv[0], globals(), locals())
- stats.sort()
- stats.pprint()
diff --git a/bitbake/lib/bb/pysh/pysh.py b/bitbake/lib/bb/pysh/pysh.py
deleted file mode 100644
index b4e6145b51..0000000000
--- a/bitbake/lib/bb/pysh/pysh.py
+++ /dev/null
@@ -1,167 +0,0 @@
-# pysh.py - command processing for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-import optparse
-import os
-import sys
-
-import interp
-
-SH_OPT = optparse.OptionParser(prog='pysh', usage="%prog [OPTIONS]", version='0.1')
-SH_OPT.add_option('-c', action='store_true', dest='command_string', default=None,
- help='A string that shall be interpreted by the shell as one or more commands')
-SH_OPT.add_option('--redirect-to', dest='redirect_to', default=None,
- help='Redirect script commands stdout and stderr to the specified file')
-# See utility_command in builtin.py about the reason for this flag.
-SH_OPT.add_option('--redirected', dest='redirected', action='store_true', default=False,
- help='Tell the interpreter that stdout and stderr are actually the same objects, which is really stdout')
-SH_OPT.add_option('--debug-parsing', action='store_true', dest='debug_parsing', default=False,
- help='Trace PLY execution')
-SH_OPT.add_option('--debug-tree', action='store_true', dest='debug_tree', default=False,
- help='Display the generated syntax tree.')
-SH_OPT.add_option('--debug-cmd', action='store_true', dest='debug_cmd', default=False,
- help='Trace command execution before parameters expansion and exit status.')
-SH_OPT.add_option('--debug-utility', action='store_true', dest='debug_utility', default=False,
- help='Trace utility calls, after parameters expansions')
-SH_OPT.add_option('--ast', action='store_true', dest='ast', default=False,
- help='Encoded commands to execute in a subprocess')
-SH_OPT.add_option('--profile', action='store_true', default=False,
- help='Profile pysh run')
-
-
-def split_args(args):
- # Separate shell arguments from command ones
- # Just stop at the first argument not starting with a dash. I know, this is completely broken,
- # it ignores files starting with a dash or may take option values for command file. This is not
- # supposed to happen for now
- command_index = len(args)
- for i,arg in enumerate(args):
- if not arg.startswith('-'):
- command_index = i
- break
-
- return args[:command_index], args[command_index:]
-
-
-def fixenv(env):
- path = env.get('PATH')
- if path is not None:
- parts = path.split(os.pathsep)
- # Remove Windows utilities from PATH, they are useless at best and
- # some of them (find) may be confused with other utilities.
- parts = [p for p in parts if 'system32' not in p.lower()]
- env['PATH'] = os.pathsep.join(parts)
- if env.get('HOME') is None:
- # Several utilities, including cvsps, cannot work without
- # a defined HOME directory.
- env['HOME'] = os.path.expanduser('~')
- return env
-
-def _sh(cwd, shargs, cmdargs, options, debugflags=None, env=None):
- if os.environ.get('PYSH_TEXT') != '1':
- import msvcrt
- for fp in (sys.stdin, sys.stdout, sys.stderr):
- msvcrt.setmode(fp.fileno(), os.O_BINARY)
-
- hgbin = os.environ.get('PYSH_HGTEXT') != '1'
-
- if debugflags is None:
- debugflags = []
- if options.debug_parsing: debugflags.append('debug-parsing')
- if options.debug_utility: debugflags.append('debug-utility')
- if options.debug_cmd: debugflags.append('debug-cmd')
- if options.debug_tree: debugflags.append('debug-tree')
-
- if env is None:
- env = fixenv(dict(os.environ))
- if cwd is None:
- cwd = os.getcwd()
-
- if not cmdargs:
- # Nothing to do
- return 0
-
- ast = None
- command_file = None
- if options.command_string:
- input = cmdargs[0]
- if not options.ast:
- input += '\n'
- else:
- args, input = interp.decodeargs(input), None
- env, ast = args
- cwd = env.get('PWD', cwd)
- else:
- command_file = cmdargs[0]
- arguments = cmdargs[1:]
-
- prefix = interp.resolve_shebang(command_file, ignoreshell=True)
- if prefix:
- input = ' '.join(prefix + [command_file] + arguments)
- else:
- # Read commands from file
- f = file(command_file)
- try:
- # Trailing newline to help the parser
- input = f.read() + '\n'
- finally:
- f.close()
-
- redirect = None
- try:
- if options.redirected:
- stdout = sys.stdout
- stderr = stdout
- elif options.redirect_to:
- redirect = open(options.redirect_to, 'wb')
- stdout = redirect
- stderr = redirect
- else:
- stdout = sys.stdout
- stderr = sys.stderr
-
- # TODO: set arguments to environment variables
- opts = interp.Options()
- opts.hgbinary = hgbin
- ip = interp.Interpreter(cwd, debugflags, stdout=stdout, stderr=stderr,
- opts=opts)
- try:
- # Export given environment in shell object
- for k,v in env.iteritems():
- ip.get_env().export(k,v)
- return ip.execute_script(input, ast, scriptpath=command_file)
- finally:
- ip.close()
- finally:
- if redirect is not None:
- redirect.close()
-
-def sh(cwd=None, args=None, debugflags=None, env=None):
- if args is None:
- args = sys.argv[1:]
- shargs, cmdargs = split_args(args)
- options, shargs = SH_OPT.parse_args(shargs)
-
- if options.profile:
- import lsprof
- p = lsprof.Profiler()
- p.enable(subcalls=True)
- try:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
- finally:
- p.disable()
- stats = lsprof.Stats(p.getstats())
- stats.sort()
- stats.pprint(top=10, file=sys.stderr, climit=5)
- else:
- return _sh(cwd, shargs, cmdargs, options, debugflags, env)
-
-def main():
- sys.exit(sh())
-
-if __name__=='__main__':
- main()
diff --git a/bitbake/lib/bb/pysh/pyshlex.py b/bitbake/lib/bb/pysh/pyshlex.py
deleted file mode 100644
index b977b5e869..0000000000
--- a/bitbake/lib/bb/pysh/pyshlex.py
+++ /dev/null
@@ -1,888 +0,0 @@
-# pyshlex.py - PLY compatible lexer for pysh.
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-# TODO:
-# - review all "char in 'abc'" snippets: the empty string can be matched
-# - test line continuations within quoted/expansion strings
-# - eof is buggy wrt sublexers
-# - the lexer cannot really work in pull mode as it would be required to run
-# PLY in pull mode. It was designed to work incrementally and it would not be
-# that hard to enable pull mode.
-import re
-try:
- s = set()
- del s
-except NameError:
- from Set import Set as set
-
-from ply import lex
-from sherrors import *
-
-class NeedMore(Exception):
- pass
-
-def is_blank(c):
- return c in (' ', '\t')
-
-_RE_DIGITS = re.compile(r'^\d+$')
-
-def are_digits(s):
- return _RE_DIGITS.search(s) is not None
-
-_OPERATORS = dict([
- ('&&', 'AND_IF'),
- ('||', 'OR_IF'),
- (';;', 'DSEMI'),
- ('<<', 'DLESS'),
- ('>>', 'DGREAT'),
- ('<&', 'LESSAND'),
- ('>&', 'GREATAND'),
- ('<>', 'LESSGREAT'),
- ('<<-', 'DLESSDASH'),
- ('>|', 'CLOBBER'),
- ('&', 'AMP'),
- (';', 'COMMA'),
- ('<', 'LESS'),
- ('>', 'GREATER'),
- ('(', 'LPARENS'),
- (')', 'RPARENS'),
-])
-
-#Make a function to silence pychecker "Local variable shadows global"
-def make_partial_ops():
- partials = {}
- for k in _OPERATORS:
- for i in range(1, len(k)+1):
- partials[k[:i]] = None
- return partials
-
-_PARTIAL_OPERATORS = make_partial_ops()
-
-def is_partial_op(s):
- """Return True if s matches a non-empty subpart of an operator starting
- at its first character.
- """
- return s in _PARTIAL_OPERATORS
-
-def is_op(s):
- """If s matches an operator, returns the operator identifier. Return None
- otherwise.
- """
- return _OPERATORS.get(s)
-
-_RESERVEDS = dict([
- ('if', 'If'),
- ('then', 'Then'),
- ('else', 'Else'),
- ('elif', 'Elif'),
- ('fi', 'Fi'),
- ('do', 'Do'),
- ('done', 'Done'),
- ('case', 'Case'),
- ('esac', 'Esac'),
- ('while', 'While'),
- ('until', 'Until'),
- ('for', 'For'),
- ('{', 'Lbrace'),
- ('}', 'Rbrace'),
- ('!', 'Bang'),
- ('in', 'In'),
- ('|', 'PIPE'),
-])
-
-def get_reserved(s):
- return _RESERVEDS.get(s)
-
-_RE_NAME = re.compile(r'^[0-9a-zA-Z_]+$')
-
-def is_name(s):
- return _RE_NAME.search(s) is not None
-
-def find_chars(seq, chars):
- for i,v in enumerate(seq):
- if v in chars:
- return i,v
- return -1, None
-
-class WordLexer:
- """WordLexer parse quoted or expansion expressions and return an expression
- tree. The input string can be any well formed sequence beginning with quoting
- or expansion character. Embedded expressions are handled recursively. The
- resulting tree is made of lists and strings. Lists represent quoted or
- expansion expressions. Each list first element is the opening separator,
- the last one the closing separator. In-between can be any number of strings
- or lists for sub-expressions. Non quoted/expansion expression can written as
- strings or as lists with empty strings as starting and ending delimiters.
- """
-
- NAME_CHARSET = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'
- NAME_CHARSET = dict(zip(NAME_CHARSET, NAME_CHARSET))
-
- SPECIAL_CHARSET = '@*#?-$!0'
-
- #Characters which can be escaped depends on the current delimiters
- ESCAPABLE = {
- '`': set(['$', '\\', '`']),
- '"': set(['$', '\\', '`', '"']),
- "'": set(),
- }
-
- def __init__(self, heredoc = False):
- # _buffer is the unprocessed input characters buffer
- self._buffer = []
- # _stack is empty or contains a quoted list being processed
- # (this is the DFS path to the quoted expression being evaluated).
- self._stack = []
- self._escapable = None
- # True when parsing unquoted here documents
- self._heredoc = heredoc
-
- def add(self, data, eof=False):
- """Feed the lexer with more data. If the quoted expression can be
- delimited, return a tuple (expr, remaining) containing the expression
- tree and the unconsumed data.
- Otherwise, raise NeedMore.
- """
- self._buffer += list(data)
- self._parse(eof)
-
- result = self._stack[0]
- remaining = ''.join(self._buffer)
- self._stack = []
- self._buffer = []
- return result, remaining
-
- def _is_escapable(self, c, delim=None):
- if delim is None:
- if self._heredoc:
- # Backslashes works as if they were double quoted in unquoted
- # here-documents
- delim = '"'
- else:
- if len(self._stack)<=1:
- return True
- delim = self._stack[-2][0]
-
- escapables = self.ESCAPABLE.get(delim, None)
- return escapables is None or c in escapables
-
- def _parse_squote(self, buf, result, eof):
- if not buf:
- raise NeedMore()
- try:
- pos = buf.index("'")
- except ValueError:
- raise NeedMore()
- result[-1] += ''.join(buf[:pos])
- result += ["'"]
- return pos+1, True
-
- def _parse_bquote(self, buf, result, eof):
- if not buf:
- raise NeedMore()
-
- if buf[0]=='\n':
- #Remove line continuations
- result[:] = ['', '', '']
- elif self._is_escapable(buf[0]):
- result[-1] += buf[0]
- result += ['']
- else:
- #Keep as such
- result[:] = ['', '\\'+buf[0], '']
-
- return 1, True
-
- def _parse_dquote(self, buf, result, eof):
- if not buf:
- raise NeedMore()
- pos, sep = find_chars(buf, '$\\`"')
- if pos==-1:
- raise NeedMore()
-
- result[-1] += ''.join(buf[:pos])
- if sep=='"':
- result += ['"']
- return pos+1, True
- else:
- #Keep everything until the separator and defer processing
- return pos, False
-
- def _parse_command(self, buf, result, eof):
- if not buf:
- raise NeedMore()
-
- chars = '$\\`"\''
- if result[0] == '$(':
- chars += ')'
- pos, sep = find_chars(buf, chars)
- if pos == -1:
- raise NeedMore()
-
- result[-1] += ''.join(buf[:pos])
- if (result[0]=='$(' and sep==')') or (result[0]=='`' and sep=='`'):
- result += [sep]
- return pos+1, True
- else:
- return pos, False
-
- def _parse_parameter(self, buf, result, eof):
- if not buf:
- raise NeedMore()
-
- pos, sep = find_chars(buf, '$\\`"\'}')
- if pos==-1:
- raise NeedMore()
-
- result[-1] += ''.join(buf[:pos])
- if sep=='}':
- result += [sep]
- return pos+1, True
- else:
- return pos, False
-
- def _parse_dollar(self, buf, result, eof):
- sep = result[0]
- if sep=='$':
- if not buf:
- #TODO: handle empty $
- raise NeedMore()
- if buf[0]=='(':
- if len(buf)==1:
- raise NeedMore()
-
- if buf[1]=='(':
- result[0] = '$(('
- buf[:2] = []
- else:
- result[0] = '$('
- buf[:1] = []
-
- elif buf[0]=='{':
- result[0] = '${'
- buf[:1] = []
- else:
- if buf[0] in self.SPECIAL_CHARSET:
- result[-1] = buf[0]
- read = 1
- else:
- for read,c in enumerate(buf):
- if c not in self.NAME_CHARSET:
- break
- else:
- if not eof:
- raise NeedMore()
- read += 1
-
- result[-1] += ''.join(buf[0:read])
-
- if not result[-1]:
- result[:] = ['', result[0], '']
- else:
- result += ['']
- return read,True
-
- sep = result[0]
- if sep=='$(':
- parsefunc = self._parse_command
- elif sep=='${':
- parsefunc = self._parse_parameter
- else:
- raise NotImplementedError()
-
- pos, closed = parsefunc(buf, result, eof)
- return pos, closed
-
- def _parse(self, eof):
- buf = self._buffer
- stack = self._stack
- recurse = False
-
- while 1:
- if not stack or recurse:
- if not buf:
- raise NeedMore()
- if buf[0] not in ('"\\`$\''):
- raise ShellSyntaxError('Invalid quoted string sequence')
- stack.append([buf[0], ''])
- buf[:1] = []
- recurse = False
-
- result = stack[-1]
- if result[0]=="'":
- parsefunc = self._parse_squote
- elif result[0]=='\\':
- parsefunc = self._parse_bquote
- elif result[0]=='"':
- parsefunc = self._parse_dquote
- elif result[0]=='`':
- parsefunc = self._parse_command
- elif result[0][0]=='$':
- parsefunc = self._parse_dollar
- else:
- raise NotImplementedError()
-
- read, closed = parsefunc(buf, result, eof)
-
- buf[:read] = []
- if closed:
- if len(stack)>1:
- #Merge in parent expression
- parsed = stack.pop()
- stack[-1] += [parsed]
- stack[-1] += ['']
- else:
- break
- else:
- recurse = True
-
-def normalize_wordtree(wtree):
- """Fold back every literal sequence (delimited with empty strings) into
- parent sequence.
- """
- def normalize(wtree):
- result = []
- for part in wtree[1:-1]:
- if isinstance(part, list):
- part = normalize(part)
- if part[0]=='':
- #Move the part content back at current level
- result += part[1:-1]
- continue
- elif not part:
- #Remove empty strings
- continue
- result.append(part)
- if not result:
- result = ['']
- return [wtree[0]] + result + [wtree[-1]]
-
- return normalize(wtree)
-
-
-def make_wordtree(token, here_document=False):
- """Parse a delimited token and return a tree similar to the ones returned by
- WordLexer. token may contain any combinations of expansion/quoted fields and
- non-ones.
- """
- tree = ['']
- remaining = token
- delimiters = '\\$`'
- if not here_document:
- delimiters += '\'"'
-
- while 1:
- pos, sep = find_chars(remaining, delimiters)
- if pos==-1:
- tree += [remaining, '']
- return normalize_wordtree(tree)
- tree.append(remaining[:pos])
- remaining = remaining[pos:]
-
- try:
- result, remaining = WordLexer(heredoc = here_document).add(remaining, True)
- except NeedMore:
- raise ShellSyntaxError('Invalid token "%s"')
- tree.append(result)
-
-
-def wordtree_as_string(wtree):
- """Rewrite an expression tree generated by make_wordtree as string."""
- def visit(node, output):
- for child in node:
- if isinstance(child, list):
- visit(child, output)
- else:
- output.append(child)
-
- output = []
- visit(wtree, output)
- return ''.join(output)
-
-
-def unquote_wordtree(wtree):
- """Fold the word tree while removing quotes everywhere. Other expansion
- sequences are joined as such.
- """
- def unquote(wtree):
- unquoted = []
- if wtree[0] in ('', "'", '"', '\\'):
- wtree = wtree[1:-1]
-
- for part in wtree:
- if isinstance(part, list):
- part = unquote(part)
- unquoted.append(part)
- return ''.join(unquoted)
-
- return unquote(wtree)
-
-
-class HereDocLexer:
- """HereDocLexer delimits whatever comes from the here-document starting newline
- not included to the closing delimiter line included.
- """
- def __init__(self, op, delim):
- assert op in ('<<', '<<-')
- if not delim:
- raise ShellSyntaxError('invalid here document delimiter %s' % str(delim))
-
- self._op = op
- self._delim = delim
- self._buffer = []
- self._token = []
-
- def add(self, data, eof):
- """If the here-document was delimited, return a tuple (content, remaining).
- Raise NeedMore() otherwise.
- """
- self._buffer += list(data)
- self._parse(eof)
- token = ''.join(self._token)
- remaining = ''.join(self._buffer)
- self._token, self._remaining = [], []
- return token, remaining
-
- def _parse(self, eof):
- while 1:
- #Look for first unescaped newline. Quotes may be ignored
- escaped = False
- for i,c in enumerate(self._buffer):
- if escaped:
- escaped = False
- elif c=='\\':
- escaped = True
- elif c=='\n':
- break
- else:
- i = -1
-
- if i==-1 or self._buffer[i]!='\n':
- if not eof:
- raise NeedMore()
- #No more data, maybe the last line is closing delimiter
- line = ''.join(self._buffer)
- eol = ''
- self._buffer[:] = []
- else:
- line = ''.join(self._buffer[:i])
- eol = self._buffer[i]
- self._buffer[:i+1] = []
-
- if self._op=='<<-':
- line = line.lstrip('\t')
-
- if line==self._delim:
- break
-
- self._token += [line, eol]
- if i==-1:
- break
-
-class Token:
- #TODO: check this is still in use
- OPERATOR = 'OPERATOR'
- WORD = 'WORD'
-
- def __init__(self):
- self.value = ''
- self.type = None
-
- def __getitem__(self, key):
- #Behave like a two elements tuple
- if key==0:
- return self.type
- if key==1:
- return self.value
- raise IndexError(key)
-
-
-class HereDoc:
- def __init__(self, op, name=None):
- self.op = op
- self.name = name
- self.pendings = []
-
-TK_COMMA = 'COMMA'
-TK_AMPERSAND = 'AMP'
-TK_OP = 'OP'
-TK_TOKEN = 'TOKEN'
-TK_COMMENT = 'COMMENT'
-TK_NEWLINE = 'NEWLINE'
-TK_IONUMBER = 'IO_NUMBER'
-TK_ASSIGNMENT = 'ASSIGNMENT_WORD'
-TK_HERENAME = 'HERENAME'
-
-class Lexer:
- """Main lexer.
-
- Call add() until the script AST is returned.
- """
- # Here-document handling makes the whole thing more complex because they basically
- # force tokens to be reordered: here-content must come right after the operator
- # and the here-document name, while some other tokens might be following the
- # here-document expression on the same line.
- #
- # So, here-doc states are basically:
- # *self._state==ST_NORMAL
- # - self._heredoc.op is None: no here-document
- # - self._heredoc.op is not None but name is: here-document operator matched,
- # waiting for the document name/delimiter
- # - self._heredoc.op and name are not None: here-document is ready, following
- # tokens are being stored and will be pushed again when the document is
- # completely parsed.
- # *self._state==ST_HEREDOC
- # - The here-document is being delimited by self._herelexer. Once it is done
- # the content is pushed in front of the pending token list then all these
- # tokens are pushed once again.
- ST_NORMAL = 'ST_NORMAL'
- ST_OP = 'ST_OP'
- ST_BACKSLASH = 'ST_BACKSLASH'
- ST_QUOTED = 'ST_QUOTED'
- ST_COMMENT = 'ST_COMMENT'
- ST_HEREDOC = 'ST_HEREDOC'
-
- #Match end of backquote strings
- RE_BACKQUOTE_END = re.compile(r'(?<!\\)(`)')
-
- def __init__(self, parent_state = None):
- self._input = []
- self._pos = 0
-
- self._token = ''
- self._type = TK_TOKEN
-
- self._state = self.ST_NORMAL
- self._parent_state = parent_state
- self._wordlexer = None
-
- self._heredoc = HereDoc(None)
- self._herelexer = None
-
- ### Following attributes are not used for delimiting token and can safely
- ### be changed after here-document detection (see _push_toke)
-
- # Count the number of tokens following a 'For' reserved word. Needed to
- # return an 'In' reserved word if it comes in third place.
- self._for_count = None
-
- def add(self, data, eof=False):
- """Feed the lexer with data.
-
- When eof is set to True, returns unconsumed data or raise if the lexer
- is in the middle of a delimiting operation.
- Raise NeedMore otherwise.
- """
- self._input += list(data)
- self._parse(eof)
- self._input[:self._pos] = []
- return ''.join(self._input)
-
- def _parse(self, eof):
- while self._state:
- if self._pos>=len(self._input):
- if not eof:
- raise NeedMore()
- elif self._state not in (self.ST_OP, self.ST_QUOTED, self.ST_HEREDOC):
- #Delimit the current token and leave cleanly
- self._push_token('')
- break
- else:
- #Let the sublexer handle the eof themselves
- pass
-
- if self._state==self.ST_NORMAL:
- self._parse_normal()
- elif self._state==self.ST_COMMENT:
- self._parse_comment()
- elif self._state==self.ST_OP:
- self._parse_op(eof)
- elif self._state==self.ST_QUOTED:
- self._parse_quoted(eof)
- elif self._state==self.ST_HEREDOC:
- self._parse_heredoc(eof)
- else:
- assert False, "Unknown state " + str(self._state)
-
- if self._heredoc.op is not None:
- raise ShellSyntaxError('missing here-document delimiter')
-
- def _parse_normal(self):
- c = self._input[self._pos]
- if c=='\n':
- self._push_token(c)
- self._token = c
- self._type = TK_NEWLINE
- self._push_token('')
- self._pos += 1
- elif c in ('\\', '\'', '"', '`', '$'):
- self._state = self.ST_QUOTED
- elif is_partial_op(c):
- self._push_token(c)
-
- self._type = TK_OP
- self._token += c
- self._pos += 1
- self._state = self.ST_OP
- elif is_blank(c):
- self._push_token(c)
-
- #Discard blanks
- self._pos += 1
- elif self._token:
- self._token += c
- self._pos += 1
- elif c=='#':
- self._state = self.ST_COMMENT
- self._type = TK_COMMENT
- self._pos += 1
- else:
- self._pos += 1
- self._token += c
-
- def _parse_op(self, eof):
- assert self._token
-
- while 1:
- if self._pos>=len(self._input):
- if not eof:
- raise NeedMore()
- c = ''
- else:
- c = self._input[self._pos]
-
- op = self._token + c
- if c and is_partial_op(op):
- #Still parsing an operator
- self._token = op
- self._pos += 1
- else:
- #End of operator
- self._push_token(c)
- self._state = self.ST_NORMAL
- break
-
- def _parse_comment(self):
- while 1:
- if self._pos>=len(self._input):
- raise NeedMore()
-
- c = self._input[self._pos]
- if c=='\n':
- #End of comment, do not consume the end of line
- self._state = self.ST_NORMAL
- break
- else:
- self._token += c
- self._pos += 1
-
- def _parse_quoted(self, eof):
- """Precondition: the starting backquote/dollar is still in the input queue."""
- if not self._wordlexer:
- self._wordlexer = WordLexer()
-
- if self._pos<len(self._input):
- #Transfer input queue character into the subparser
- input = self._input[self._pos:]
- self._pos += len(input)
-
- wtree, remaining = self._wordlexer.add(input, eof)
- self._wordlexer = None
- self._token += wordtree_as_string(wtree)
-
- #Put unparsed character back in the input queue
- if remaining:
- self._input[self._pos:self._pos] = list(remaining)
- self._state = self.ST_NORMAL
-
- def _parse_heredoc(self, eof):
- assert not self._token
-
- if self._herelexer is None:
- self._herelexer = HereDocLexer(self._heredoc.op, self._heredoc.name)
-
- if self._pos<len(self._input):
- #Transfer input queue character into the subparser
- input = self._input[self._pos:]
- self._pos += len(input)
-
- self._token, remaining = self._herelexer.add(input, eof)
-
- #Reset here-document state
- self._herelexer = None
- heredoc, self._heredoc = self._heredoc, HereDoc(None)
- if remaining:
- self._input[self._pos:self._pos] = list(remaining)
- self._state = self.ST_NORMAL
-
- #Push pending tokens
- heredoc.pendings[:0] = [(self._token, self._type, heredoc.name)]
- for token, type, delim in heredoc.pendings:
- self._token = token
- self._type = type
- self._push_token(delim)
-
- def _push_token(self, delim):
- if not self._token:
- return 0
-
- if self._heredoc.op is not None:
- if self._heredoc.name is None:
- #Here-document name
- if self._type!=TK_TOKEN:
- raise ShellSyntaxError("expecting here-document name, got '%s'" % self._token)
- self._heredoc.name = unquote_wordtree(make_wordtree(self._token))
- self._type = TK_HERENAME
- else:
- #Capture all tokens until the newline starting the here-document
- if self._type==TK_NEWLINE:
- assert self._state==self.ST_NORMAL
- self._state = self.ST_HEREDOC
-
- self._heredoc.pendings.append((self._token, self._type, delim))
- self._token = ''
- self._type = TK_TOKEN
- return 1
-
- # BEWARE: do not change parser state from here to the end of the function:
- # when parsing between an here-document operator to the end of the line
- # tokens are stored in self._heredoc.pendings. Therefore, they will not
- # reach the section below.
-
- #Check operators
- if self._type==TK_OP:
- #False positive because of partial op matching
- op = is_op(self._token)
- if not op:
- self._type = TK_TOKEN
- else:
- #Map to the specific operator
- self._type = op
- if self._token in ('<<', '<<-'):
- #Done here rather than in _parse_op because there is no need
- #to change the parser state since we are still waiting for
- #the here-document name
- if self._heredoc.op is not None:
- raise ShellSyntaxError("syntax error near token '%s'" % self._token)
- assert self._heredoc.op is None
- self._heredoc.op = self._token
-
- if self._type==TK_TOKEN:
- if '=' in self._token and not delim:
- if self._token.startswith('='):
- #Token is a WORD... a TOKEN that is.
- pass
- else:
- prev = self._token[:self._token.find('=')]
- if is_name(prev):
- self._type = TK_ASSIGNMENT
- else:
- #Just a token (unspecified)
- pass
- else:
- reserved = get_reserved(self._token)
- if reserved is not None:
- if reserved=='In' and self._for_count!=2:
- #Sorry, not a reserved word after all
- pass
- else:
- self._type = reserved
- if reserved in ('For', 'Case'):
- self._for_count = 0
- elif are_digits(self._token) and delim in ('<', '>'):
- #Detect IO_NUMBER
- self._type = TK_IONUMBER
- elif self._token==';':
- self._type = TK_COMMA
- elif self._token=='&':
- self._type = TK_AMPERSAND
- elif self._type==TK_COMMENT:
- #Comments are not part of sh grammar, ignore them
- self._token = ''
- self._type = TK_TOKEN
- return 0
-
- if self._for_count is not None:
- #Track token count in 'For' expression to detect 'In' reserved words.
- #Can only be in third position, no need to go beyond
- self._for_count += 1
- if self._for_count==3:
- self._for_count = None
-
- self.on_token((self._token, self._type))
- self._token = ''
- self._type = TK_TOKEN
- return 1
-
- def on_token(self, token):
- raise NotImplementedError
-
-
-tokens = [
- TK_TOKEN,
-# To silence yacc unused token warnings
-# TK_COMMENT,
- TK_NEWLINE,
- TK_IONUMBER,
- TK_ASSIGNMENT,
- TK_HERENAME,
-]
-
-#Add specific operators
-tokens += _OPERATORS.values()
-#Add reserved words
-tokens += _RESERVEDS.values()
-
-class PLYLexer(Lexer):
- """Bridge Lexer and PLY lexer interface."""
- def __init__(self):
- Lexer.__init__(self)
- self._tokens = []
- self._current = 0
- self.lineno = 0
-
- def on_token(self, token):
- value, type = token
-
- self.lineno = 0
- t = lex.LexToken()
- t.value = value
- t.type = type
- t.lexer = self
- t.lexpos = 0
- t.lineno = 0
-
- self._tokens.append(t)
-
- def is_empty(self):
- return not bool(self._tokens)
-
- #PLY compliant interface
- def token(self):
- if self._current>=len(self._tokens):
- return None
- t = self._tokens[self._current]
- self._current += 1
- return t
-
-
-def get_tokens(s):
- """Parse the input string and return a tuple (tokens, unprocessed) where
- tokens is a list of parsed tokens and unprocessed is the part of the input
- string left untouched by the lexer.
- """
- lexer = PLYLexer()
- untouched = lexer.add(s, True)
- tokens = []
- while 1:
- token = lexer.token()
- if token is None:
- break
- tokens.append(token)
-
- tokens = [(t.value, t.type) for t in tokens]
- return tokens, untouched
diff --git a/bitbake/lib/bb/pysh/pyshyacc.py b/bitbake/lib/bb/pysh/pyshyacc.py
deleted file mode 100644
index e8e80aac45..0000000000
--- a/bitbake/lib/bb/pysh/pyshyacc.py
+++ /dev/null
@@ -1,779 +0,0 @@
-# pyshyacc.py - PLY grammar definition for pysh
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""PLY grammar file.
-"""
-import os.path
-import sys
-
-import pyshlex
-tokens = pyshlex.tokens
-
-from ply import yacc
-import sherrors
-
-class IORedirect:
- def __init__(self, op, filename, io_number=None):
- self.op = op
- self.filename = filename
- self.io_number = io_number
-
-class HereDocument:
- def __init__(self, op, name, content, io_number=None):
- self.op = op
- self.name = name
- self.content = content
- self.io_number = io_number
-
-def make_io_redirect(p):
- """Make an IORedirect instance from the input 'io_redirect' production."""
- name, io_number, io_target = p
- assert name=='io_redirect'
-
- if io_target[0]=='io_file':
- io_type, io_op, io_file = io_target
- return IORedirect(io_op, io_file, io_number)
- elif io_target[0]=='io_here':
- io_type, io_op, io_name, io_content = io_target
- return HereDocument(io_op, io_name, io_content, io_number)
- else:
- assert False, "Invalid IO redirection token %s" % repr(io_type)
-
-class SimpleCommand:
- """
- assigns contains (name, value) pairs.
- """
- def __init__(self, words, redirs, assigns):
- self.words = list(words)
- self.redirs = list(redirs)
- self.assigns = list(assigns)
-
-class Pipeline:
- def __init__(self, commands, reverse_status=False):
- self.commands = list(commands)
- assert self.commands #Grammar forbids this
- self.reverse_status = reverse_status
-
-class AndOr:
- def __init__(self, op, left, right):
- self.op = str(op)
- self.left = left
- self.right = right
-
-class ForLoop:
- def __init__(self, name, items, cmds):
- self.name = str(name)
- self.items = list(items)
- self.cmds = list(cmds)
-
-class WhileLoop:
- def __init__(self, condition, cmds):
- self.condition = list(condition)
- self.cmds = list(cmds)
-
-class UntilLoop:
- def __init__(self, condition, cmds):
- self.condition = list(condition)
- self.cmds = list(cmds)
-
-class FunDef:
- def __init__(self, name, body):
- self.name = str(name)
- self.body = body
-
-class BraceGroup:
- def __init__(self, cmds):
- self.cmds = list(cmds)
-
-class IfCond:
- def __init__(self, cond, if_cmds, else_cmds):
- self.cond = list(cond)
- self.if_cmds = if_cmds
- self.else_cmds = else_cmds
-
-class Case:
- def __init__(self, name, items):
- self.name = name
- self.items = items
-
-class SubShell:
- def __init__(self, cmds):
- self.cmds = cmds
-
-class RedirectList:
- def __init__(self, cmd, redirs):
- self.cmd = cmd
- self.redirs = list(redirs)
-
-def get_production(productions, ptype):
- """productions must be a list of production tuples like (name, obj) where
- name is the production string identifier.
- Return the first production named 'ptype'. Raise KeyError if None can be
- found.
- """
- for production in productions:
- if production is not None and production[0]==ptype:
- return production
- raise KeyError(ptype)
-
-#-------------------------------------------------------------------------------
-# PLY grammar definition
-#-------------------------------------------------------------------------------
-
-def p_multiple_commands(p):
- """multiple_commands : newline_sequence
- | complete_command
- | multiple_commands complete_command"""
- if len(p)==2:
- if p[1] is not None:
- p[0] = [p[1]]
- else:
- p[0] = []
- else:
- p[0] = p[1] + [p[2]]
-
-def p_complete_command(p):
- """complete_command : list separator
- | list"""
- if len(p)==3 and p[2] and p[2][1] == '&':
- p[0] = ('async', p[1])
- else:
- p[0] = p[1]
-
-def p_list(p):
- """list : list separator_op and_or
- | and_or"""
- if len(p)==2:
- p[0] = [p[1]]
- else:
- #if p[2]!=';':
- # raise NotImplementedError('AND-OR list asynchronous execution is not implemented')
- p[0] = p[1] + [p[3]]
-
-def p_and_or(p):
- """and_or : pipeline
- | and_or AND_IF linebreak pipeline
- | and_or OR_IF linebreak pipeline"""
- if len(p)==2:
- p[0] = p[1]
- else:
- p[0] = ('and_or', AndOr(p[2], p[1], p[4]))
-
-def p_maybe_bang_word(p):
- """maybe_bang_word : Bang"""
- p[0] = ('maybe_bang_word', p[1])
-
-def p_pipeline(p):
- """pipeline : pipe_sequence
- | bang_word pipe_sequence"""
- if len(p)==3:
- p[0] = ('pipeline', Pipeline(p[2][1:], True))
- else:
- p[0] = ('pipeline', Pipeline(p[1][1:]))
-
-def p_pipe_sequence(p):
- """pipe_sequence : command
- | pipe_sequence PIPE linebreak command"""
- if len(p)==2:
- p[0] = ['pipe_sequence', p[1]]
- else:
- p[0] = p[1] + [p[4]]
-
-def p_command(p):
- """command : simple_command
- | compound_command
- | compound_command redirect_list
- | function_definition"""
-
- if p[1][0] in ( 'simple_command',
- 'for_clause',
- 'while_clause',
- 'until_clause',
- 'case_clause',
- 'if_clause',
- 'function_definition',
- 'subshell',
- 'brace_group',):
- if len(p) == 2:
- p[0] = p[1]
- else:
- p[0] = ('redirect_list', RedirectList(p[1], p[2][1:]))
- else:
- raise NotImplementedError('%s command is not implemented' % repr(p[1][0]))
-
-def p_compound_command(p):
- """compound_command : brace_group
- | subshell
- | for_clause
- | case_clause
- | if_clause
- | while_clause
- | until_clause"""
- p[0] = p[1]
-
-def p_subshell(p):
- """subshell : LPARENS compound_list RPARENS"""
- p[0] = ('subshell', SubShell(p[2][1:]))
-
-def p_compound_list(p):
- """compound_list : term
- | newline_list term
- | term separator
- | newline_list term separator"""
- productions = p[1:]
- try:
- sep = get_production(productions, 'separator')
- if sep[1]!=';':
- raise NotImplementedError()
- except KeyError:
- pass
- term = get_production(productions, 'term')
- p[0] = ['compound_list'] + term[1:]
-
-def p_term(p):
- """term : term separator and_or
- | and_or"""
- if len(p)==2:
- p[0] = ['term', p[1]]
- else:
- if p[2] is not None and p[2][1] == '&':
- p[0] = ['term', ('async', p[1][1:])] + [p[3]]
- else:
- p[0] = p[1] + [p[3]]
-
-def p_maybe_for_word(p):
- # Rearrange 'For' priority wrt TOKEN. See p_for_word
- """maybe_for_word : For"""
- p[0] = ('maybe_for_word', p[1])
-
-def p_for_clause(p):
- """for_clause : for_word name linebreak do_group
- | for_word name linebreak in sequential_sep do_group
- | for_word name linebreak in wordlist sequential_sep do_group"""
- productions = p[1:]
- do_group = get_production(productions, 'do_group')
- try:
- items = get_production(productions, 'in')[1:]
- except KeyError:
- raise NotImplementedError('"in" omission is not implemented')
-
- try:
- items = get_production(productions, 'wordlist')[1:]
- except KeyError:
- items = []
-
- name = p[2]
- p[0] = ('for_clause', ForLoop(name, items, do_group[1:]))
-
-def p_name(p):
- """name : token""" #Was NAME instead of token
- p[0] = p[1]
-
-def p_in(p):
- """in : In"""
- p[0] = ('in', p[1])
-
-def p_wordlist(p):
- """wordlist : wordlist token
- | token"""
- if len(p)==2:
- p[0] = ['wordlist', ('TOKEN', p[1])]
- else:
- p[0] = p[1] + [('TOKEN', p[2])]
-
-def p_case_clause(p):
- """case_clause : Case token linebreak in linebreak case_list Esac
- | Case token linebreak in linebreak case_list_ns Esac
- | Case token linebreak in linebreak Esac"""
- if len(p) < 8:
- items = []
- else:
- items = p[6][1:]
- name = p[2]
- p[0] = ('case_clause', Case(name, [c[1] for c in items]))
-
-def p_case_list_ns(p):
- """case_list_ns : case_list case_item_ns
- | case_item_ns"""
- p_case_list(p)
-
-def p_case_list(p):
- """case_list : case_list case_item
- | case_item"""
- if len(p)==2:
- p[0] = ['case_list', p[1]]
- else:
- p[0] = p[1] + [p[2]]
-
-def p_case_item_ns(p):
- """case_item_ns : pattern RPARENS linebreak
- | pattern RPARENS compound_list linebreak
- | LPARENS pattern RPARENS linebreak
- | LPARENS pattern RPARENS compound_list linebreak"""
- p_case_item(p)
-
-def p_case_item(p):
- """case_item : pattern RPARENS linebreak DSEMI linebreak
- | pattern RPARENS compound_list DSEMI linebreak
- | LPARENS pattern RPARENS linebreak DSEMI linebreak
- | LPARENS pattern RPARENS compound_list DSEMI linebreak"""
- if len(p) < 7:
- name = p[1][1:]
- else:
- name = p[2][1:]
-
- try:
- cmds = get_production(p[1:], "compound_list")[1:]
- except KeyError:
- cmds = []
-
- p[0] = ('case_item', (name, cmds))
-
-def p_pattern(p):
- """pattern : token
- | pattern PIPE token"""
- if len(p)==2:
- p[0] = ['pattern', ('TOKEN', p[1])]
- else:
- p[0] = p[1] + [('TOKEN', p[2])]
-
-def p_maybe_if_word(p):
- # Rearrange 'If' priority wrt TOKEN. See p_if_word
- """maybe_if_word : If"""
- p[0] = ('maybe_if_word', p[1])
-
-def p_maybe_then_word(p):
- # Rearrange 'Then' priority wrt TOKEN. See p_then_word
- """maybe_then_word : Then"""
- p[0] = ('maybe_then_word', p[1])
-
-def p_if_clause(p):
- """if_clause : if_word compound_list then_word compound_list else_part Fi
- | if_word compound_list then_word compound_list Fi"""
- else_part = []
- if len(p)==7:
- else_part = p[5]
- p[0] = ('if_clause', IfCond(p[2][1:], p[4][1:], else_part))
-
-def p_else_part(p):
- """else_part : Elif compound_list then_word compound_list else_part
- | Elif compound_list then_word compound_list
- | Else compound_list"""
- if len(p)==3:
- p[0] = p[2][1:]
- else:
- else_part = []
- if len(p)==6:
- else_part = p[5]
- p[0] = ('elif', IfCond(p[2][1:], p[4][1:], else_part))
-
-def p_while_clause(p):
- """while_clause : While compound_list do_group"""
- p[0] = ('while_clause', WhileLoop(p[2][1:], p[3][1:]))
-
-def p_maybe_until_word(p):
- # Rearrange 'Until' priority wrt TOKEN. See p_until_word
- """maybe_until_word : Until"""
- p[0] = ('maybe_until_word', p[1])
-
-def p_until_clause(p):
- """until_clause : until_word compound_list do_group"""
- p[0] = ('until_clause', UntilLoop(p[2][1:], p[3][1:]))
-
-def p_function_definition(p):
- """function_definition : fname LPARENS RPARENS linebreak function_body"""
- p[0] = ('function_definition', FunDef(p[1], p[5]))
-
-def p_function_body(p):
- """function_body : compound_command
- | compound_command redirect_list"""
- if len(p)!=2:
- raise NotImplementedError('functions redirections lists are not implemented')
- p[0] = p[1]
-
-def p_fname(p):
- """fname : TOKEN""" #Was NAME instead of token
- p[0] = p[1]
-
-def p_brace_group(p):
- """brace_group : Lbrace compound_list Rbrace"""
- p[0] = ('brace_group', BraceGroup(p[2][1:]))
-
-def p_maybe_done_word(p):
- #See p_assignment_word for details.
- """maybe_done_word : Done"""
- p[0] = ('maybe_done_word', p[1])
-
-def p_maybe_do_word(p):
- """maybe_do_word : Do"""
- p[0] = ('maybe_do_word', p[1])
-
-def p_do_group(p):
- """do_group : do_word compound_list done_word"""
- #Do group contains a list of AndOr
- p[0] = ['do_group'] + p[2][1:]
-
-def p_simple_command(p):
- """simple_command : cmd_prefix cmd_word cmd_suffix
- | cmd_prefix cmd_word
- | cmd_prefix
- | cmd_name cmd_suffix
- | cmd_name"""
- words, redirs, assigns = [], [], []
- for e in p[1:]:
- name = e[0]
- if name in ('cmd_prefix', 'cmd_suffix'):
- for sube in e[1:]:
- subname = sube[0]
- if subname=='io_redirect':
- redirs.append(make_io_redirect(sube))
- elif subname=='ASSIGNMENT_WORD':
- assigns.append(sube)
- else:
- words.append(sube)
- elif name in ('cmd_word', 'cmd_name'):
- words.append(e)
-
- cmd = SimpleCommand(words, redirs, assigns)
- p[0] = ('simple_command', cmd)
-
-def p_cmd_name(p):
- """cmd_name : TOKEN"""
- p[0] = ('cmd_name', p[1])
-
-def p_cmd_word(p):
- """cmd_word : token"""
- p[0] = ('cmd_word', p[1])
-
-def p_maybe_assignment_word(p):
- #See p_assignment_word for details.
- """maybe_assignment_word : ASSIGNMENT_WORD"""
- p[0] = ('maybe_assignment_word', p[1])
-
-def p_cmd_prefix(p):
- """cmd_prefix : io_redirect
- | cmd_prefix io_redirect
- | assignment_word
- | cmd_prefix assignment_word"""
- try:
- prefix = get_production(p[1:], 'cmd_prefix')
- except KeyError:
- prefix = ['cmd_prefix']
-
- try:
- value = get_production(p[1:], 'assignment_word')[1]
- value = ('ASSIGNMENT_WORD', value.split('=', 1))
- except KeyError:
- value = get_production(p[1:], 'io_redirect')
- p[0] = prefix + [value]
-
-def p_cmd_suffix(p):
- """cmd_suffix : io_redirect
- | cmd_suffix io_redirect
- | token
- | cmd_suffix token
- | maybe_for_word
- | cmd_suffix maybe_for_word
- | maybe_done_word
- | cmd_suffix maybe_done_word
- | maybe_do_word
- | cmd_suffix maybe_do_word
- | maybe_until_word
- | cmd_suffix maybe_until_word
- | maybe_assignment_word
- | cmd_suffix maybe_assignment_word
- | maybe_if_word
- | cmd_suffix maybe_if_word
- | maybe_then_word
- | cmd_suffix maybe_then_word
- | maybe_bang_word
- | cmd_suffix maybe_bang_word"""
- try:
- suffix = get_production(p[1:], 'cmd_suffix')
- token = p[2]
- except KeyError:
- suffix = ['cmd_suffix']
- token = p[1]
-
- if isinstance(token, tuple):
- if token[0]=='io_redirect':
- p[0] = suffix + [token]
- else:
- #Convert maybe_* to TOKEN if necessary
- p[0] = suffix + [('TOKEN', token[1])]
- else:
- p[0] = suffix + [('TOKEN', token)]
-
-def p_redirect_list(p):
- """redirect_list : io_redirect
- | redirect_list io_redirect"""
- if len(p) == 2:
- p[0] = ['redirect_list', make_io_redirect(p[1])]
- else:
- p[0] = p[1] + [make_io_redirect(p[2])]
-
-def p_io_redirect(p):
- """io_redirect : io_file
- | IO_NUMBER io_file
- | io_here
- | IO_NUMBER io_here"""
- if len(p)==3:
- p[0] = ('io_redirect', p[1], p[2])
- else:
- p[0] = ('io_redirect', None, p[1])
-
-def p_io_file(p):
- #Return the tuple (operator, filename)
- """io_file : LESS filename
- | LESSAND filename
- | GREATER filename
- | GREATAND filename
- | DGREAT filename
- | LESSGREAT filename
- | CLOBBER filename"""
- #Extract the filename from the file
- p[0] = ('io_file', p[1], p[2][1])
-
-def p_filename(p):
- #Return the filename
- """filename : TOKEN"""
- p[0] = ('filename', p[1])
-
-def p_io_here(p):
- """io_here : DLESS here_end
- | DLESSDASH here_end"""
- p[0] = ('io_here', p[1], p[2][1], p[2][2])
-
-def p_here_end(p):
- """here_end : HERENAME TOKEN"""
- p[0] = ('here_document', p[1], p[2])
-
-def p_newline_sequence(p):
- # Nothing in the grammar can handle leading NEWLINE productions, so add
- # this one with the lowest possible priority relatively to newline_list.
- """newline_sequence : newline_list"""
- p[0] = None
-
-def p_newline_list(p):
- """newline_list : NEWLINE
- | newline_list NEWLINE"""
- p[0] = None
-
-def p_linebreak(p):
- """linebreak : newline_list
- | empty"""
- p[0] = None
-
-def p_separator_op(p):
- """separator_op : COMMA
- | AMP"""
- p[0] = p[1]
-
-def p_separator(p):
- """separator : separator_op linebreak
- | newline_list"""
- if len(p)==2:
- #Ignore newlines
- p[0] = None
- else:
- #Keep the separator operator
- p[0] = ('separator', p[1])
-
-def p_sequential_sep(p):
- """sequential_sep : COMMA linebreak
- | newline_list"""
- p[0] = None
-
-# Low priority TOKEN => for_word conversion.
-# Let maybe_for_word be used as a token when necessary in higher priority
-# rules.
-def p_for_word(p):
- """for_word : maybe_for_word"""
- p[0] = p[1]
-
-def p_if_word(p):
- """if_word : maybe_if_word"""
- p[0] = p[1]
-
-def p_then_word(p):
- """then_word : maybe_then_word"""
- p[0] = p[1]
-
-def p_done_word(p):
- """done_word : maybe_done_word"""
- p[0] = p[1]
-
-def p_do_word(p):
- """do_word : maybe_do_word"""
- p[0] = p[1]
-
-def p_until_word(p):
- """until_word : maybe_until_word"""
- p[0] = p[1]
-
-def p_assignment_word(p):
- """assignment_word : maybe_assignment_word"""
- p[0] = ('assignment_word', p[1][1])
-
-def p_bang_word(p):
- """bang_word : maybe_bang_word"""
- p[0] = ('bang_word', p[1][1])
-
-def p_token(p):
- """token : TOKEN
- | Fi"""
- p[0] = p[1]
-
-def p_empty(p):
- 'empty :'
- p[0] = None
-
-# Error rule for syntax errors
-def p_error(p):
- msg = []
- w = msg.append
- w('%r\n' % p)
- w('followed by:\n')
- for i in range(5):
- n = yacc.token()
- if not n:
- break
- w(' %r\n' % n)
- raise sherrors.ShellSyntaxError(''.join(msg))
-
-# Build the parser
-try:
- import pyshtables
-except ImportError:
- outputdir = os.path.dirname(__file__)
- if not os.access(outputdir, os.W_OK):
- outputdir = ''
- yacc.yacc(tabmodule = 'pyshtables', outputdir = outputdir, debug = 0)
-else:
- yacc.yacc(tabmodule = 'pysh.pyshtables', write_tables = 0, debug = 0)
-
-
-def parse(input, eof=False, debug=False):
- """Parse a whole script at once and return the generated AST and unconsumed
- data in a tuple.
-
- NOTE: eof is probably meaningless for now, the parser being unable to work
- in pull mode. It should be set to True.
- """
- lexer = pyshlex.PLYLexer()
- remaining = lexer.add(input, eof)
- if lexer.is_empty():
- return [], remaining
- if debug:
- debug = 2
- return yacc.parse(lexer=lexer, debug=debug), remaining
-
-#-------------------------------------------------------------------------------
-# AST rendering helpers
-#-------------------------------------------------------------------------------
-
-def format_commands(v):
- """Return a tree made of strings and lists. Make command trees easier to
- display.
- """
- if isinstance(v, list):
- return [format_commands(c) for c in v]
- if isinstance(v, tuple):
- if len(v)==2 and isinstance(v[0], str) and not isinstance(v[1], str):
- if v[0] == 'async':
- return ['AsyncList', map(format_commands, v[1])]
- else:
- #Avoid decomposing tuples like ('pipeline', Pipeline(...))
- return format_commands(v[1])
- return format_commands(list(v))
- elif isinstance(v, IfCond):
- name = ['IfCond']
- name += ['if', map(format_commands, v.cond)]
- name += ['then', map(format_commands, v.if_cmds)]
- name += ['else', map(format_commands, v.else_cmds)]
- return name
- elif isinstance(v, ForLoop):
- name = ['ForLoop']
- name += [repr(v.name)+' in ', map(str, v.items)]
- name += ['commands', map(format_commands, v.cmds)]
- return name
- elif isinstance(v, AndOr):
- return [v.op, format_commands(v.left), format_commands(v.right)]
- elif isinstance(v, Pipeline):
- name = 'Pipeline'
- if v.reverse_status:
- name = '!' + name
- return [name, format_commands(v.commands)]
- elif isinstance(v, Case):
- name = ['Case']
- name += [v.name, format_commands(v.items)]
- elif isinstance(v, SimpleCommand):
- name = ['SimpleCommand']
- if v.words:
- name += ['words', map(str, v.words)]
- if v.assigns:
- assigns = [tuple(a[1]) for a in v.assigns]
- name += ['assigns', map(str, assigns)]
- if v.redirs:
- name += ['redirs', map(format_commands, v.redirs)]
- return name
- elif isinstance(v, RedirectList):
- name = ['RedirectList']
- if v.redirs:
- name += ['redirs', map(format_commands, v.redirs)]
- name += ['command', format_commands(v.cmd)]
- return name
- elif isinstance(v, IORedirect):
- return ' '.join(map(str, (v.io_number, v.op, v.filename)))
- elif isinstance(v, HereDocument):
- return ' '.join(map(str, (v.io_number, v.op, repr(v.name), repr(v.content))))
- elif isinstance(v, SubShell):
- return ['SubShell', map(format_commands, v.cmds)]
- else:
- return repr(v)
-
-def print_commands(cmds, output=sys.stdout):
- """Pretty print a command tree."""
- def print_tree(cmd, spaces, output):
- if isinstance(cmd, list):
- for c in cmd:
- print_tree(c, spaces + 3, output)
- else:
- print >>output, ' '*spaces + str(cmd)
-
- formatted = format_commands(cmds)
- print_tree(formatted, 0, output)
-
-
-def stringify_commands(cmds):
- """Serialize a command tree as a string.
-
- Returned string is not pretty and is currently used for unit tests only.
- """
- def stringify(value):
- output = []
- if isinstance(value, list):
- formatted = []
- for v in value:
- formatted.append(stringify(v))
- formatted = ' '.join(formatted)
- output.append(''.join(['<', formatted, '>']))
- else:
- output.append(value)
- return ' '.join(output)
-
- return stringify(format_commands(cmds))
-
-
-def visit_commands(cmds, callable):
- """Visit the command tree and execute callable on every Pipeline and
- SimpleCommand instances.
- """
- if isinstance(cmds, (tuple, list)):
- map(lambda c: visit_commands(c,callable), cmds)
- elif isinstance(cmds, (Pipeline, SimpleCommand)):
- callable(cmds)
diff --git a/bitbake/lib/bb/pysh/sherrors.py b/bitbake/lib/bb/pysh/sherrors.py
deleted file mode 100644
index 1d5bd53b3a..0000000000
--- a/bitbake/lib/bb/pysh/sherrors.py
+++ /dev/null
@@ -1,41 +0,0 @@
-# sherrors.py - shell errors and signals
-#
-# Copyright 2007 Patrick Mezard
-#
-# This software may be used and distributed according to the terms
-# of the GNU General Public License, incorporated herein by reference.
-
-"""Define shell exceptions and error codes.
-"""
-
-class ShellError(Exception):
- pass
-
-class ShellSyntaxError(ShellError):
- pass
-
-class UtilityError(ShellError):
- """Raised upon utility syntax error (option or operand error)."""
- pass
-
-class ExpansionError(ShellError):
- pass
-
-class CommandNotFound(ShellError):
- """Specified command was not found."""
- pass
-
-class RedirectionError(ShellError):
- pass
-
-class VarAssignmentError(ShellError):
- """Variable assignment error."""
- pass
-
-class ExitSignal(ShellError):
- """Exit signal."""
- pass
-
-class ReturnSignal(ShellError):
- """Exit signal."""
- pass \ No newline at end of file
diff --git a/bitbake/lib/bb/pysh/subprocess_fix.py b/bitbake/lib/bb/pysh/subprocess_fix.py
deleted file mode 100644
index 46eca22802..0000000000
--- a/bitbake/lib/bb/pysh/subprocess_fix.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# subprocess - Subprocesses with accessible I/O streams
-#
-# For more information about this module, see PEP 324.
-#
-# This module should remain compatible with Python 2.2, see PEP 291.
-#
-# Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se>
-#
-# Licensed to PSF under a Contributor Agreement.
-# See http://www.python.org/2.4/license for licensing details.
-
-def list2cmdline(seq):
- """
- Translate a sequence of arguments into a command line
- string, using the same rules as the MS C runtime:
-
- 1) Arguments are delimited by white space, which is either a
- space or a tab.
-
- 2) A string surrounded by double quotation marks is
- interpreted as a single argument, regardless of white space
- contained within. A quoted string can be embedded in an
- argument.
-
- 3) A double quotation mark preceded by a backslash is
- interpreted as a literal double quotation mark.
-
- 4) Backslashes are interpreted literally, unless they
- immediately precede a double quotation mark.
-
- 5) If backslashes immediately precede a double quotation mark,
- every pair of backslashes is interpreted as a literal
- backslash. If the number of backslashes is odd, the last
- backslash escapes the next double quotation mark as
- described in rule 3.
- """
-
- # See
- # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp
- result = []
- needquote = False
- for arg in seq:
- bs_buf = []
-
- # Add a space to separate this argument from the others
- if result:
- result.append(' ')
-
- needquote = (" " in arg) or ("\t" in arg) or ("|" in arg) or arg == ""
- if needquote:
- result.append('"')
-
- for c in arg:
- if c == '\\':
- # Don't know if we need to double yet.
- bs_buf.append(c)
- elif c == '"':
- # Double backspaces.
- result.append('\\' * len(bs_buf)*2)
- bs_buf = []
- result.append('\\"')
- else:
- # Normal char
- if bs_buf:
- result.extend(bs_buf)
- bs_buf = []
- result.append(c)
-
- # Add remaining backspaces, if any.
- if bs_buf:
- result.extend(bs_buf)
-
- if needquote:
- result.extend(bs_buf)
- result.append('"')
-
- return ''.join(result)
diff --git a/bitbake/lib/bb/runqueue.py b/bitbake/lib/bb/runqueue.py
deleted file mode 100644
index 172e591522..0000000000
--- a/bitbake/lib/bb/runqueue.py
+++ /dev/null
@@ -1,1663 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'RunQueue' implementation
-
-Handles preparation and execution of a queue of tasks
-"""
-
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import copy
-import os
-import sys
-import signal
-import stat
-import fcntl
-import logging
-import bb
-from bb import msg, data, event
-
-bblogger = logging.getLogger("BitBake")
-logger = logging.getLogger("BitBake.RunQueue")
-
-class RunQueueStats:
- """
- Holds statistics on the tasks handled by the associated runQueue
- """
- def __init__(self, total):
- self.completed = 0
- self.skipped = 0
- self.failed = 0
- self.active = 0
- self.total = total
-
- def copy(self):
- obj = self.__class__(self.total)
- obj.__dict__.update(self.__dict__)
- return obj
-
- def taskFailed(self):
- self.active = self.active - 1
- self.failed = self.failed + 1
-
- def taskCompleted(self, number = 1):
- self.active = self.active - number
- self.completed = self.completed + number
-
- def taskSkipped(self, number = 1):
- self.active = self.active + number
- self.skipped = self.skipped + number
-
- def taskActive(self):
- self.active = self.active + 1
-
-# These values indicate the next step due to be run in the
-# runQueue state machine
-runQueuePrepare = 2
-runQueueSceneInit = 3
-runQueueSceneRun = 4
-runQueueRunInit = 5
-runQueueRunning = 6
-runQueueFailed = 7
-runQueueCleanUp = 8
-runQueueComplete = 9
-runQueueChildProcess = 10
-
-class RunQueueScheduler(object):
- """
- Control the order tasks are scheduled in.
- """
- name = "basic"
-
- def __init__(self, runqueue, rqdata):
- """
- The default scheduler just returns the first buildable task (the
- priority map is sorted by task numer)
- """
- self.rq = runqueue
- self.rqdata = rqdata
- numTasks = len(self.rqdata.runq_fnid)
-
- self.prio_map = []
- self.prio_map.extend(range(numTasks))
-
- def next_buildable_task(self):
- """
- Return the id of the first task we find that is buildable
- """
- for tasknum in xrange(len(self.rqdata.runq_fnid)):
- taskid = self.prio_map[tasknum]
- if self.rq.runq_running[taskid] == 1:
- continue
- if self.rq.runq_buildable[taskid] == 1:
- return taskid
-
- def next(self):
- """
- Return the id of the task we should build next
- """
- if self.rq.stats.active < self.rq.number_tasks:
- return self.next_buildable_task()
-
-class RunQueueSchedulerSpeed(RunQueueScheduler):
- """
- A scheduler optimised for speed. The priority map is sorted by task weight,
- heavier weighted tasks (tasks needed by the most other tasks) are run first.
- """
- name = "speed"
-
- def __init__(self, runqueue, rqdata):
- """
- The priority map is sorted by task weight.
- """
-
- self.rq = runqueue
- self.rqdata = rqdata
-
- sortweight = sorted(copy.deepcopy(self.rqdata.runq_weight))
- copyweight = copy.deepcopy(self.rqdata.runq_weight)
- self.prio_map = []
-
- for weight in sortweight:
- idx = copyweight.index(weight)
- self.prio_map.append(idx)
- copyweight[idx] = -1
-
- self.prio_map.reverse()
-
-class RunQueueSchedulerCompletion(RunQueueSchedulerSpeed):
- """
- A scheduler optimised to complete .bb files are quickly as possible. The
- priority map is sorted by task weight, but then reordered so once a given
- .bb file starts to build, its completed as quickly as possible. This works
- well where disk space is at a premium and classes like OE's rm_work are in
- force.
- """
- name = "completion"
-
- def __init__(self, runqueue, rqdata):
- RunQueueSchedulerSpeed.__init__(self, runqueue, rqdata)
-
- #FIXME - whilst this groups all fnids together it does not reorder the
- #fnid groups optimally.
-
- basemap = copy.deepcopy(self.prio_map)
- self.prio_map = []
- while (len(basemap) > 0):
- entry = basemap.pop(0)
- self.prio_map.append(entry)
- fnid = self.rqdata.runq_fnid[entry]
- todel = []
- for entry in basemap:
- entry_fnid = self.rqdata.runq_fnid[entry]
- if entry_fnid == fnid:
- todel.append(basemap.index(entry))
- self.prio_map.append(entry)
- todel.reverse()
- for idx in todel:
- del basemap[idx]
-
-class RunQueueData:
- """
- BitBake Run Queue implementation
- """
- def __init__(self, rq, cooker, cfgData, dataCache, taskData, targets):
- self.cooker = cooker
- self.dataCache = dataCache
- self.taskData = taskData
- self.targets = targets
- self.rq = rq
-
- self.stampwhitelist = bb.data.getVar("BB_STAMP_WHITELIST", cfgData, 1) or ""
- self.multi_provider_whitelist = (bb.data.getVar("MULTI_PROVIDER_WHITELIST", cfgData, 1) or "").split()
-
- self.reset()
-
- def reset(self):
- self.runq_fnid = []
- self.runq_task = []
- self.runq_depends = []
- self.runq_revdeps = []
- self.runq_hash = []
-
- def runq_depends_names(self, ids):
- import re
- ret = []
- for id in self.runq_depends[ids]:
- nam = os.path.basename(self.get_user_idstring(id))
- nam = re.sub("_[^,]*,", ",", nam)
- ret.extend([nam])
- return ret
-
- def get_user_idstring(self, task):
- fn = self.taskData.fn_index[self.runq_fnid[task]]
- taskname = self.runq_task[task]
- return "%s, %s" % (fn, taskname)
-
- def get_task_id(self, fnid, taskname):
- for listid in xrange(len(self.runq_fnid)):
- if self.runq_fnid[listid] == fnid and self.runq_task[listid] == taskname:
- return listid
- return None
-
- def circular_depchains_handler(self, tasks):
- """
- Some tasks aren't buildable, likely due to circular dependency issues.
- Identify the circular dependencies and print them in a user readable format.
- """
- from copy import deepcopy
-
- valid_chains = []
- explored_deps = {}
- msgs = []
-
- def chain_reorder(chain):
- """
- Reorder a dependency chain so the lowest task id is first
- """
- lowest = 0
- new_chain = []
- for entry in xrange(len(chain)):
- if chain[entry] < chain[lowest]:
- lowest = entry
- new_chain.extend(chain[lowest:])
- new_chain.extend(chain[:lowest])
- return new_chain
-
- def chain_compare_equal(chain1, chain2):
- """
- Compare two dependency chains and see if they're the same
- """
- if len(chain1) != len(chain2):
- return False
- for index in xrange(len(chain1)):
- if chain1[index] != chain2[index]:
- return False
- return True
-
- def chain_array_contains(chain, chain_array):
- """
- Return True if chain_array contains chain
- """
- for ch in chain_array:
- if chain_compare_equal(ch, chain):
- return True
- return False
-
- def find_chains(taskid, prev_chain):
- prev_chain.append(taskid)
- total_deps = []
- total_deps.extend(self.runq_revdeps[taskid])
- for revdep in self.runq_revdeps[taskid]:
- if revdep in prev_chain:
- idx = prev_chain.index(revdep)
- # To prevent duplicates, reorder the chain to start with the lowest taskid
- # and search through an array of those we've already printed
- chain = prev_chain[idx:]
- new_chain = chain_reorder(chain)
- if not chain_array_contains(new_chain, valid_chains):
- valid_chains.append(new_chain)
- msgs.append("Dependency loop #%d found:\n" % len(valid_chains))
- for dep in new_chain:
- msgs.append(" Task %s (%s) (dependent Tasks %s)\n" % (dep, self.get_user_idstring(dep), self.runq_depends_names(dep)))
- msgs.append("\n")
- if len(valid_chains) > 10:
- msgs.append("Aborted dependency loops search after 10 matches.\n")
- return msgs
- continue
- scan = False
- if revdep not in explored_deps:
- scan = True
- elif revdep in explored_deps[revdep]:
- scan = True
- else:
- for dep in prev_chain:
- if dep in explored_deps[revdep]:
- scan = True
- if scan:
- find_chains(revdep, copy.deepcopy(prev_chain))
- for dep in explored_deps[revdep]:
- if dep not in total_deps:
- total_deps.append(dep)
-
- explored_deps[taskid] = total_deps
-
- for task in tasks:
- find_chains(task, [])
-
- return msgs
-
- def calculate_task_weights(self, endpoints):
- """
- Calculate a number representing the "weight" of each task. Heavier weighted tasks
- have more dependencies and hence should be executed sooner for maximum speed.
-
- This function also sanity checks the task list finding tasks that are not
- possible to execute due to circular dependencies.
- """
-
- numTasks = len(self.runq_fnid)
- weight = []
- deps_left = []
- task_done = []
-
- for listid in xrange(numTasks):
- task_done.append(False)
- weight.append(0)
- deps_left.append(len(self.runq_revdeps[listid]))
-
- for listid in endpoints:
- weight[listid] = 1
- task_done[listid] = True
-
- while True:
- next_points = []
- for listid in endpoints:
- for revdep in self.runq_depends[listid]:
- weight[revdep] = weight[revdep] + weight[listid]
- deps_left[revdep] = deps_left[revdep] - 1
- if deps_left[revdep] == 0:
- next_points.append(revdep)
- task_done[revdep] = True
- endpoints = next_points
- if len(next_points) == 0:
- break
-
- # Circular dependency sanity check
- problem_tasks = []
- for task in xrange(numTasks):
- if task_done[task] is False or deps_left[task] != 0:
- problem_tasks.append(task)
- logger.debug(2, "Task %s (%s) is not buildable", task, self.get_user_idstring(task))
- logger.debug(2, "(Complete marker was %s and the remaining dependency count was %s)\n", task_done[task], deps_left[task])
-
- if problem_tasks:
- message = "Unbuildable tasks were found.\n"
- message = message + "These are usually caused by circular dependencies and any circular dependency chains found will be printed below. Increase the debug level to see a list of unbuildable tasks.\n\n"
- message = message + "Identifying dependency loops (this may take a short while)...\n"
- logger.error(message)
-
- msgs = self.circular_depchains_handler(problem_tasks)
-
- message = "\n"
- for msg in msgs:
- message = message + msg
- bb.msg.fatal(bb.msg.domain.RunQueue, message)
-
- return weight
-
- def prepare(self):
- """
- Turn a set of taskData into a RunQueue and compute data needed
- to optimise the execution order.
- """
-
- runq_build = []
- recursive_tdepends = {}
- runq_recrdepends = []
- tdepends_fnid = {}
-
- taskData = self.taskData
-
- if len(taskData.tasks_name) == 0:
- # Nothing to do
- return 0
-
- logger.info("Preparing runqueue")
-
- # Step A - Work out a list of tasks to run
- #
- # Taskdata gives us a list of possible providers for every build and run
- # target ordered by priority. It also gives information on each of those
- # providers.
- #
- # To create the actual list of tasks to execute we fix the list of
- # providers and then resolve the dependencies into task IDs. This
- # process is repeated for each type of dependency (tdepends, deptask,
- # rdeptast, recrdeptask, idepends).
-
- def add_build_dependencies(depids, tasknames, depends):
- for depid in depids:
- # Won't be in build_targets if ASSUME_PROVIDED
- if depid not in taskData.build_targets:
- continue
- depdata = taskData.build_targets[depid][0]
- if depdata is None:
- continue
- dep = taskData.fn_index[depdata]
- for taskname in tasknames:
- taskid = taskData.gettask_id(dep, taskname, False)
- if taskid is not None:
- depends.append(taskid)
-
- def add_runtime_dependencies(depids, tasknames, depends):
- for depid in depids:
- if depid not in taskData.run_targets:
- continue
- depdata = taskData.run_targets[depid][0]
- if depdata is None:
- continue
- dep = taskData.fn_index[depdata]
- for taskname in tasknames:
- taskid = taskData.gettask_id(dep, taskname, False)
- if taskid is not None:
- depends.append(taskid)
-
- for task in xrange(len(taskData.tasks_name)):
- depends = []
- recrdepends = []
- fnid = taskData.tasks_fnid[task]
- fn = taskData.fn_index[fnid]
- task_deps = self.dataCache.task_deps[fn]
-
- logger.debug(2, "Processing %s:%s", fn, taskData.tasks_name[task])
-
- if fnid not in taskData.failed_fnids:
-
- # Resolve task internal dependencies
- #
- # e.g. addtask before X after Y
- depends = taskData.tasks_tdepends[task]
-
- # Resolve 'deptask' dependencies
- #
- # e.g. do_sometask[deptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all DEPENDS)
- if 'deptask' in task_deps and taskData.tasks_name[task] in task_deps['deptask']:
- tasknames = task_deps['deptask'][taskData.tasks_name[task]].split()
- add_build_dependencies(taskData.depids[fnid], tasknames, depends)
-
- # Resolve 'rdeptask' dependencies
- #
- # e.g. do_sometask[rdeptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all RDEPENDS)
- if 'rdeptask' in task_deps and taskData.tasks_name[task] in task_deps['rdeptask']:
- taskname = task_deps['rdeptask'][taskData.tasks_name[task]]
- add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
-
- # Resolve inter-task dependencies
- #
- # e.g. do_sometask[depends] = "targetname:do_someothertask"
- # (makes sure sometask runs after targetname's someothertask)
- if fnid not in tdepends_fnid:
- tdepends_fnid[fnid] = set()
- idepends = taskData.tasks_idepends[task]
- for (depid, idependtask) in idepends:
- if depid in taskData.build_targets:
- # Won't be in build_targets if ASSUME_PROVIDED
- depdata = taskData.build_targets[depid][0]
- if depdata is not None:
- dep = taskData.fn_index[depdata]
- taskid = taskData.gettask_id(dep, idependtask, False)
- if taskid is None:
- bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s in %s depends upon nonexistant task %s in %s" % (taskData.tasks_name[task], fn, idependtask, dep))
- depends.append(taskid)
- if depdata != fnid:
- tdepends_fnid[fnid].add(taskid)
-
-
- # Resolve recursive 'recrdeptask' dependencies (A)
- #
- # e.g. do_sometask[recrdeptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
- # We cover the recursive part of the dependencies below
- if 'recrdeptask' in task_deps and taskData.tasks_name[task] in task_deps['recrdeptask']:
- for taskname in task_deps['recrdeptask'][taskData.tasks_name[task]].split():
- recrdepends.append(taskname)
- add_build_dependencies(taskData.depids[fnid], [taskname], depends)
- add_runtime_dependencies(taskData.rdepids[fnid], [taskname], depends)
-
- # Rmove all self references
- if task in depends:
- newdep = []
- logger.debug(2, "Task %s (%s %s) contains self reference! %s", task, taskData.fn_index[taskData.tasks_fnid[task]], taskData.tasks_name[task], depends)
- for dep in depends:
- if task != dep:
- newdep.append(dep)
- depends = newdep
-
- self.runq_fnid.append(taskData.tasks_fnid[task])
- self.runq_task.append(taskData.tasks_name[task])
- self.runq_depends.append(set(depends))
- self.runq_revdeps.append(set())
- self.runq_hash.append("")
-
- runq_build.append(0)
- runq_recrdepends.append(recrdepends)
-
- #
- # Build a list of recursive cumulative dependencies for each fnid
- # We do this by fnid, since if A depends on some task in B
- # we're interested in later tasks B's fnid might have but B itself
- # doesn't depend on
- #
- # Algorithm is O(tasks) + O(tasks)*O(fnids)
- #
- reccumdepends = {}
- for task in xrange(len(self.runq_fnid)):
- fnid = self.runq_fnid[task]
- if fnid not in reccumdepends:
- if fnid in tdepends_fnid:
- reccumdepends[fnid] = tdepends_fnid[fnid]
- else:
- reccumdepends[fnid] = set()
- reccumdepends[fnid].update(self.runq_depends[task])
- for task in xrange(len(self.runq_fnid)):
- taskfnid = self.runq_fnid[task]
- for fnid in reccumdepends:
- if task in reccumdepends[fnid]:
- reccumdepends[fnid].add(task)
- if taskfnid in reccumdepends:
- reccumdepends[fnid].update(reccumdepends[taskfnid])
-
-
- # Resolve recursive 'recrdeptask' dependencies (B)
- #
- # e.g. do_sometask[recrdeptask] = "do_someothertask"
- # (makes sure sometask runs after someothertask of all DEPENDS, RDEPENDS and intertask dependencies, recursively)
- for task in xrange(len(self.runq_fnid)):
- if len(runq_recrdepends[task]) > 0:
- taskfnid = self.runq_fnid[task]
- for dep in reccumdepends[taskfnid]:
- # Ignore self references
- if dep == task:
- continue
- for taskname in runq_recrdepends[task]:
- if taskData.tasks_name[dep] == taskname:
- self.runq_depends[task].add(dep)
-
- # Step B - Mark all active tasks
- #
- # Start with the tasks we were asked to run and mark all dependencies
- # as active too. If the task is to be 'forced', clear its stamp. Once
- # all active tasks are marked, prune the ones we don't need.
-
- logger.verbose("Marking Active Tasks")
-
- def mark_active(listid, depth):
- """
- Mark an item as active along with its depends
- (calls itself recursively)
- """
-
- if runq_build[listid] == 1:
- return
-
- runq_build[listid] = 1
-
- depends = self.runq_depends[listid]
- for depend in depends:
- mark_active(depend, depth+1)
-
- self.target_pairs = []
- for target in self.targets:
- targetid = taskData.getbuild_id(target[0])
-
- if targetid not in taskData.build_targets:
- continue
-
- if targetid in taskData.failed_deps:
- continue
-
- fnid = taskData.build_targets[targetid][0]
- fn = taskData.fn_index[fnid]
- self.target_pairs.append((fn, target[1]))
-
- if fnid in taskData.failed_fnids:
- continue
-
- if target[1] not in taskData.tasks_lookup[fnid]:
- bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s does not exist for target %s" % (target[1], target[0]))
-
- listid = taskData.tasks_lookup[fnid][target[1]]
-
- mark_active(listid, 1)
-
- # Step C - Prune all inactive tasks
- #
- # Once all active tasks are marked, prune the ones we don't need.
-
- maps = []
- delcount = 0
- for listid in xrange(len(self.runq_fnid)):
- if runq_build[listid-delcount] == 1:
- maps.append(listid-delcount)
- else:
- del self.runq_fnid[listid-delcount]
- del self.runq_task[listid-delcount]
- del self.runq_depends[listid-delcount]
- del runq_build[listid-delcount]
- del self.runq_revdeps[listid-delcount]
- del self.runq_hash[listid-delcount]
- delcount = delcount + 1
- maps.append(-1)
-
- #
- # Step D - Sanity checks and computation
- #
-
- # Check to make sure we still have tasks to run
- if len(self.runq_fnid) == 0:
- if not taskData.abort:
- bb.msg.fatal(bb.msg.domain.RunQueue, "All buildable tasks have been run but the build is incomplete (--continue mode). Errors for the tasks that failed will have been printed above.")
- else:
- bb.msg.fatal(bb.msg.domain.RunQueue, "No active tasks and not in --continue mode?! Please report this bug.")
-
- logger.verbose("Pruned %s inactive tasks, %s left", delcount, len(self.runq_fnid))
-
- # Remap the dependencies to account for the deleted tasks
- # Check we didn't delete a task we depend on
- for listid in xrange(len(self.runq_fnid)):
- newdeps = []
- origdeps = self.runq_depends[listid]
- for origdep in origdeps:
- if maps[origdep] == -1:
- bb.msg.fatal(bb.msg.domain.RunQueue, "Invalid mapping - Should never happen!")
- newdeps.append(maps[origdep])
- self.runq_depends[listid] = set(newdeps)
-
- logger.verbose("Assign Weightings")
-
- # Generate a list of reverse dependencies to ease future calculations
- for listid in xrange(len(self.runq_fnid)):
- for dep in self.runq_depends[listid]:
- self.runq_revdeps[dep].add(listid)
-
- # Identify tasks at the end of dependency chains
- # Error on circular dependency loops (length two)
- endpoints = []
- for listid in xrange(len(self.runq_fnid)):
- revdeps = self.runq_revdeps[listid]
- if len(revdeps) == 0:
- endpoints.append(listid)
- for dep in revdeps:
- if dep in self.runq_depends[listid]:
- #self.dump_data(taskData)
- bb.msg.fatal(bb.msg.domain.RunQueue, "Task %s (%s) has circular dependency on %s (%s)" % (taskData.fn_index[self.runq_fnid[dep]], self.runq_task[dep], taskData.fn_index[self.runq_fnid[listid]], self.runq_task[listid]))
-
- logger.verbose("Compute totals (have %s endpoint(s))", len(endpoints))
-
- # Calculate task weights
- # Check of higher length circular dependencies
- self.runq_weight = self.calculate_task_weights(endpoints)
-
- # Sanity Check - Check for multiple tasks building the same provider
- prov_list = {}
- seen_fn = []
- for task in xrange(len(self.runq_fnid)):
- fn = taskData.fn_index[self.runq_fnid[task]]
- if fn in seen_fn:
- continue
- seen_fn.append(fn)
- for prov in self.dataCache.fn_provides[fn]:
- if prov not in prov_list:
- prov_list[prov] = [fn]
- elif fn not in prov_list[prov]:
- prov_list[prov].append(fn)
- error = False
- for prov in prov_list:
- if len(prov_list[prov]) > 1 and prov not in self.multi_provider_whitelist:
- error = True
- logger.error("Multiple .bb files are due to be built which each provide %s (%s).\n This usually means one provides something the other doesn't and should.", prov, " ".join(prov_list[prov]))
-
-
- # Create a whitelist usable by the stamp checks
- stampfnwhitelist = []
- for entry in self.stampwhitelist.split():
- entryid = self.taskData.getbuild_id(entry)
- if entryid not in self.taskData.build_targets:
- continue
- fnid = self.taskData.build_targets[entryid][0]
- fn = self.taskData.fn_index[fnid]
- stampfnwhitelist.append(fn)
- self.stampfnwhitelist = stampfnwhitelist
-
- # Interate over the task list looking for tasks with a 'setscene' function
- self.runq_setscene = []
- for task in range(len(self.runq_fnid)):
- setscene = taskData.gettask_id(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task] + "_setscene", False)
- if not setscene:
- continue
- self.runq_setscene.append(task)
-
- # Interate over the task list and call into the siggen code
- dealtwith = set()
- todeal = set(range(len(self.runq_fnid)))
- while len(todeal) > 0:
- for task in todeal.copy():
- if len(self.runq_depends[task] - dealtwith) == 0:
- dealtwith.add(task)
- todeal.remove(task)
- procdep = []
- for dep in self.runq_depends[task]:
- procdep.append(self.taskData.fn_index[self.runq_fnid[dep]] + "." + self.runq_task[dep])
- self.runq_hash[task] = bb.parse.siggen.get_taskhash(self.taskData.fn_index[self.runq_fnid[task]], self.runq_task[task], procdep, self.dataCache)
-
- self.hashes = {}
- self.hash_deps = {}
- for task in xrange(len(self.runq_fnid)):
- identifier = '%s.%s' % (self.taskData.fn_index[self.runq_fnid[task]],
- self.runq_task[task])
- self.hashes[identifier] = self.runq_hash[task]
- deps = []
- for dep in self.runq_depends[task]:
- depidentifier = '%s.%s' % (self.taskData.fn_index[self.runq_fnid[dep]],
- self.runq_task[dep])
- deps.append(depidentifier)
- self.hash_deps[identifier] = deps
-
- # Remove stamps for targets if force mode active
- if self.cooker.configuration.force:
- for (fn, target) in self.target_pairs:
- logger.verbose("Remove stamp %s, %s", target, fn)
- bb.build.del_stamp(target, self.dataCache, fn)
-
- return len(self.runq_fnid)
-
- def dump_data(self, taskQueue):
- """
- Dump some debug information on the internal data structures
- """
- logger.debug(3, "run_tasks:")
- for task in xrange(len(self.rqdata.runq_task)):
- logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task])
-
- logger.debug(3, "sorted_tasks:")
- for task1 in xrange(len(self.rqdata.runq_task)):
- if task1 in self.prio_map:
- task = self.prio_map[task1]
- logger.debug(3, " (%s)%s - %s: %s Deps %s RevDeps %s", task,
- taskQueue.fn_index[self.rqdata.runq_fnid[task]],
- self.rqdata.runq_task[task],
- self.rqdata.runq_weight[task],
- self.rqdata.runq_depends[task],
- self.rqdata.runq_revdeps[task])
-
-
-class RunQueue:
- def __init__(self, cooker, cfgData, dataCache, taskData, targets):
-
- self.cooker = cooker
- self.cfgData = cfgData
- self.rqdata = RunQueueData(self, cooker, cfgData, dataCache, taskData, targets)
-
- self.stamppolicy = bb.data.getVar("BB_STAMP_POLICY", cfgData, True) or "perfile"
- self.hashvalidate = bb.data.getVar("BB_HASHCHECK_FUNCTION", cfgData, True) or None
-
- self.state = runQueuePrepare
-
- def check_stamps(self):
- unchecked = {}
- current = []
- notcurrent = []
- buildable = []
-
- if self.stamppolicy == "perfile":
- fulldeptree = False
- else:
- fulldeptree = True
- stampwhitelist = []
- if self.stamppolicy == "whitelist":
- stampwhitelist = self.rqdata.stampfnwhitelist
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- unchecked[task] = ""
- if len(self.rqdata.runq_depends[task]) == 0:
- buildable.append(task)
-
- def check_buildable(self, task, buildable):
- for revdep in self.rqdata.runq_revdeps[task]:
- alldeps = 1
- for dep in self.rqdata.runq_depends[revdep]:
- if dep in unchecked:
- alldeps = 0
- if alldeps == 1:
- if revdep in unchecked:
- buildable.append(revdep)
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- if task not in unchecked:
- continue
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
- # If the stamp is missing its not current
- if not os.access(stampfile, os.F_OK):
- del unchecked[task]
- notcurrent.append(task)
- check_buildable(self, task, buildable)
- continue
- # If its a 'nostamp' task, it's not current
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'nostamp' in taskdep and task in taskdep['nostamp']:
- del unchecked[task]
- notcurrent.append(task)
- check_buildable(self, task, buildable)
- continue
-
- while (len(buildable) > 0):
- nextbuildable = []
- for task in buildable:
- if task in unchecked:
- fn = self.taskData.fn_index[self.rqdata.runq_fnid[task]]
- taskname = self.rqdata.runq_task[task]
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
- iscurrent = True
-
- t1 = os.stat(stampfile)[stat.ST_MTIME]
- for dep in self.rqdata.runq_depends[task]:
- if iscurrent:
- fn2 = self.taskData.fn_index[self.rqdata.runq_fnid[dep]]
- taskname2 = self.rqdata.runq_task[dep]
- stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
- if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
- if dep in notcurrent:
- iscurrent = False
- else:
- t2 = os.stat(stampfile2)[stat.ST_MTIME]
- if t1 < t2:
- iscurrent = False
- del unchecked[task]
- if iscurrent:
- current.append(task)
- else:
- notcurrent.append(task)
-
- check_buildable(self, task, nextbuildable)
-
- buildable = nextbuildable
-
- #for task in range(len(self.runq_fnid)):
- # fn = self.taskData.fn_index[self.runq_fnid[task]]
- # taskname = self.runq_task[task]
- # print "%s %s.%s" % (task, taskname, fn)
-
- #print "Unchecked: %s" % unchecked
- #print "Current: %s" % current
- #print "Not current: %s" % notcurrent
-
- if len(unchecked) > 0:
- bb.msg.fatal(bb.msg.domain.RunQueue, "check_stamps fatal internal error")
- return current
-
- def check_stamp_task(self, task, taskname = None):
- def get_timestamp(f):
- try:
- if not os.access(f, os.F_OK):
- return None
- return os.stat(f)[stat.ST_MTIME]
- except:
- return None
-
- if self.stamppolicy == "perfile":
- fulldeptree = False
- else:
- fulldeptree = True
- stampwhitelist = []
- if self.stamppolicy == "whitelist":
- stampwhitelist = self.rqdata.stampfnwhitelist
-
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- if taskname is None:
- taskname = self.rqdata.runq_task[task]
-
- stampfile = bb.build.stampfile(taskname, self.rqdata.dataCache, fn)
-
- # If the stamp is missing its not current
- if not os.access(stampfile, os.F_OK):
- logger.debug(2, "Stampfile %s not available", stampfile)
- return False
- # If its a 'nostamp' task, it's not current
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'nostamp' in taskdep and taskname in taskdep['nostamp']:
- logger.debug(2, "%s.%s is nostamp\n", fn, taskname)
- return False
-
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
- return True
-
- iscurrent = True
- t1 = get_timestamp(stampfile)
- for dep in self.rqdata.runq_depends[task]:
- if iscurrent:
- fn2 = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[dep]]
- taskname2 = self.rqdata.runq_task[dep]
- stampfile2 = bb.build.stampfile(taskname2, self.rqdata.dataCache, fn2)
- stampfile3 = bb.build.stampfile(taskname2 + "_setscene", self.rqdata.dataCache, fn2)
- t2 = get_timestamp(stampfile2)
- t3 = get_timestamp(stampfile3)
- if t3 and t3 > t2:
- continue
- if fn == fn2 or (fulldeptree and fn2 not in stampwhitelist):
- if not t2:
- logger.debug(2, 'Stampfile %s does not exist', stampfile2)
- iscurrent = False
- if t1 < t2:
- logger.debug(2, 'Stampfile %s < %s', stampfile, stampfile2)
- iscurrent = False
-
- return iscurrent
-
- def execute_runqueue(self):
- """
- Run the tasks in a queue prepared by rqdata.prepare()
- Upon failure, optionally try to recover the build using any alternate providers
- (if the abort on failure configuration option isn't set)
- """
-
- retval = 0.5
-
- if self.state is runQueuePrepare:
- self.rqexe = RunQueueExecuteDummy(self)
- if self.rqdata.prepare() is 0:
- self.state = runQueueComplete
- else:
- self.state = runQueueSceneInit
-
- if self.state is runQueueSceneInit:
- if self.cooker.configuration.dump_signatures:
- self.dump_signatures()
- else:
- self.rqexe = RunQueueExecuteScenequeue(self)
-
- if self.state is runQueueSceneRun:
- retval = self.rqexe.execute()
-
- if self.state is runQueueRunInit:
- logger.info("Executing RunQueue Tasks")
- self.rqexe = RunQueueExecuteTasks(self)
- self.state = runQueueRunning
-
- if self.state is runQueueRunning:
- retval = self.rqexe.execute()
-
- if self.state is runQueueCleanUp:
- self.rqexe.finish()
-
- if self.state is runQueueFailed:
- if not self.rqdata.taskData.tryaltconfigs:
- raise bb.runqueue.TaskFailure(self.rqexe.failed_fnids)
- for fnid in self.rqexe.failed_fnids:
- self.rqdata.taskData.fail_fnid(fnid)
- self.rqdata.reset()
-
- if self.state is runQueueComplete:
- # All done
- logger.info("Tasks Summary: Attempted %d tasks of which %d didn't need to be rerun and %d failed.", self.rqexe.stats.completed, self.rqexe.stats.skipped, self.rqexe.stats.failed)
- return False
-
- if self.state is runQueueChildProcess:
- print("Child process, eeek, shouldn't happen!")
- return False
-
- # Loop
- return retval
-
- def finish_runqueue(self, now = False):
- if now:
- self.rqexe.finish_now()
- else:
- self.rqexe.finish()
-
- def dump_signatures(self):
- self.state = runQueueComplete
- done = set()
- bb.note("Reparsing files to collect dependency data")
- for task in range(len(self.rqdata.runq_fnid)):
- if self.rqdata.runq_fnid[task] not in done:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
- the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
- done.add(self.rqdata.runq_fnid[task])
-
- bb.parse.siggen.dump_sigs(self.rqdata.dataCache)
-
- return
-
-
-class RunQueueExecute:
-
- def __init__(self, rq):
- self.rq = rq
- self.cooker = rq.cooker
- self.cfgData = rq.cfgData
- self.rqdata = rq.rqdata
-
- self.number_tasks = int(bb.data.getVar("BB_NUMBER_THREADS", self.cfgData, 1) or 1)
- self.scheduler = bb.data.getVar("BB_SCHEDULER", self.cfgData, 1) or "speed"
-
- self.runq_buildable = []
- self.runq_running = []
- self.runq_complete = []
- self.build_pids = {}
- self.build_pipes = {}
- self.failed_fnids = []
-
- def runqueue_process_waitpid(self):
- """
- Return none is there are no processes awaiting result collection, otherwise
- collect the process exit codes and close the information pipe.
- """
- result = os.waitpid(-1, os.WNOHANG)
- if result[0] is 0 and result[1] is 0:
- return None
- task = self.build_pids[result[0]]
- del self.build_pids[result[0]]
- self.build_pipes[result[0]].close()
- del self.build_pipes[result[0]]
- if result[1] != 0:
- self.task_fail(task, result[1]>>8)
- else:
- self.task_complete(task)
- return True
-
- def finish_now(self):
- if self.stats.active:
- logger.info("Sending SIGTERM to remaining %s tasks", self.stats.active)
- for k, v in self.build_pids.iteritems():
- try:
- os.kill(-k, signal.SIGTERM)
- except:
- pass
- for pipe in self.build_pipes:
- self.build_pipes[pipe].read()
-
- def finish(self):
- self.rq.state = runQueueCleanUp
-
- for pipe in self.build_pipes:
- self.build_pipes[pipe].read()
-
- if self.stats.active > 0:
- bb.event.fire(runQueueExitWait(self.stats.active), self.cfgData)
- self.runqueue_process_waitpid()
- return
-
- if len(self.failed_fnids) != 0:
- self.rq.state = runQueueFailed
- return
-
- self.rq.state = runQueueComplete
- return
-
- def fork_off_task(self, fn, task, taskname, quieterrors=False):
- the_data = bb.cache.Cache.loadDataFull(fn, self.cooker.get_file_appends(fn), self.cooker.configuration.data)
-
- env = bb.data.export_vars(the_data)
- env = bb.data.export_envvars(env, the_data)
-
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'fakeroot' in taskdep and taskname in taskdep['fakeroot']:
- envvars = the_data.getVar("FAKEROOTENV", True).split()
- for var in envvars:
- comps = var.split("=")
- env[comps[0]] = comps[1]
- fakedirs = (the_data.getVar("FAKEROOTDIRS", True) or "").split()
- for p in fakedirs:
- bb.mkdirhier(p)
- logger.debug(2, "Running %s:%s under fakeroot, state dir is %s" % (fn, taskname, fakedirs))
-
- envbackup = os.environ.copy()
- for e in envbackup:
- os.unsetenv(e)
- for e in env:
- os.putenv(e, env[e])
-
- sys.stdout.flush()
- sys.stderr.flush()
- try:
- pipein, pipeout = os.pipe()
- pipein = os.fdopen(pipein, 'rb', 4096)
- pipeout = os.fdopen(pipeout, 'wb', 0)
- pid = os.fork()
- except OSError as e:
- bb.msg.fatal(bb.msg.domain.RunQueue, "fork failed: %d (%s)" % (e.errno, e.strerror))
- if pid == 0:
- pipein.close()
-
- # Save out the PID so that the event can include it the
- # events
- bb.event.worker_pid = os.getpid()
- bb.event.worker_pipe = pipeout
- bb.event.useStdout = False
-
- # Child processes should send their messages to the UI
- # process via the server process, not print them
- # themselves
- bblogger.handlers = [bb.event.LogHandler()]
-
- self.rq.state = runQueueChildProcess
- # Make the child the process group leader
- os.setpgid(0, 0)
- # No stdin
- newsi = os.open(os.devnull, os.O_RDWR)
- os.dup2(newsi, sys.stdin.fileno())
- if quieterrors:
- the_data.setVarFlag(taskname, "quieterrors", "1")
-
- bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", self, self.cooker.configuration.data)
- bb.data.setVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", fn, self.cooker.configuration.data)
- bb.data.setVar("BB_WORKERCONTEXT", "1", the_data)
- bb.parse.siggen.set_taskdata(self.rqdata.hashes, self.rqdata.hash_deps)
-
- for h in self.rqdata.hashes:
- bb.data.setVar("BBHASH_%s" % h, self.rqdata.hashes[h], the_data)
- for h in self.rqdata.hash_deps:
- bb.data.setVar("BBHASHDEPS_%s" % h, self.rqdata.hash_deps[h], the_data)
-
- bb.data.setVar("BB_TASKHASH", self.rqdata.runq_hash[task], the_data)
-
- ret = 0
- try:
- if not self.cooker.configuration.dry_run:
- ret = bb.build.exec_task(fn, taskname, the_data)
- os._exit(ret)
- except:
- os._exit(1)
-
- for e in env:
- os.unsetenv(e)
- for e in envbackup:
- os.putenv(e, envbackup[e])
-
- return pid, pipein, pipeout
-
-class RunQueueExecuteDummy(RunQueueExecute):
- def __init__(self, rq):
- self.rq = rq
- self.stats = RunQueueStats(0)
-
- def finish(self):
- self.rq.state = runQueueComplete
- return
-
-class RunQueueExecuteTasks(RunQueueExecute):
- def __init__(self, rq):
- RunQueueExecute.__init__(self, rq)
-
- self.stats = RunQueueStats(len(self.rqdata.runq_fnid))
-
- # Mark initial buildable tasks
- for task in xrange(self.stats.total):
- self.runq_running.append(0)
- self.runq_complete.append(0)
- if len(self.rqdata.runq_depends[task]) == 0:
- self.runq_buildable.append(1)
- else:
- self.runq_buildable.append(0)
- if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
- self.rq.scenequeue_covered.add(task)
-
- found = True
- while found:
- found = False
- for task in xrange(self.stats.total):
- if task in self.rq.scenequeue_covered:
- continue
- if len(self.rqdata.runq_revdeps[task]) > 0 and self.rqdata.runq_revdeps[task].issubset(self.rq.scenequeue_covered):
- self.rq.scenequeue_covered.add(task)
- found = True
-
- logger.debug(1, 'Full skip list %s', self.rq.scenequeue_covered)
-
- for task in self.rq.scenequeue_covered:
- self.task_skip(task)
-
- event.fire(bb.event.StampUpdate(self.rqdata.target_pairs, self.rqdata.dataCache.stamp), self.cfgData)
-
- schedulers = self.get_schedulers()
- for scheduler in schedulers:
- if self.scheduler == scheduler.name:
- self.sched = scheduler(self, self.rqdata)
- logger.debug(1, "Using runqueue scheduler '%s'", scheduler.name)
- break
- else:
- bb.fatal("Invalid scheduler '%s'. Available schedulers: %s" %
- (self.scheduler, ", ".join(obj.name for obj in schedulers)))
-
-
- def get_schedulers(self):
- schedulers = set(obj for obj in globals().values()
- if type(obj) is type and
- issubclass(obj, RunQueueScheduler))
-
- user_schedulers = bb.data.getVar("BB_SCHEDULERS", self.cfgData, True)
- if user_schedulers:
- for sched in user_schedulers.split():
- if not "." in sched:
- bb.note("Ignoring scheduler '%s' from BB_SCHEDULERS: not an import" % sched)
- continue
-
- modname, name = sched.rsplit(".", 1)
- try:
- module = __import__(modname, fromlist=(name,))
- except ImportError, exc:
- logger.critical("Unable to import scheduler '%s' from '%s': %s" % (name, modname, exc))
- raise SystemExit(1)
- else:
- schedulers.add(getattr(module, name))
- return schedulers
-
- def task_completeoutright(self, task):
- """
- Mark a task as completed
- Look at the reverse dependencies and mark any task with
- completed dependencies as buildable
- """
- self.runq_complete[task] = 1
- for revdep in self.rqdata.runq_revdeps[task]:
- if self.runq_running[revdep] == 1:
- continue
- if self.runq_buildable[revdep] == 1:
- continue
- alldeps = 1
- for dep in self.rqdata.runq_depends[revdep]:
- if self.runq_complete[dep] != 1:
- alldeps = 0
- if alldeps == 1:
- self.runq_buildable[revdep] = 1
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[revdep]]
- taskname = self.rqdata.runq_task[revdep]
- logger.debug(1, "Marking task %s (%s, %s) as buildable", revdep, fn, taskname)
-
- def task_complete(self, task):
- self.stats.taskCompleted()
- bb.event.fire(runQueueTaskCompleted(task, self.stats, self.rq), self.cfgData)
- self.task_completeoutright(task)
-
- def task_fail(self, task, exitcode):
- """
- Called when a task has failed
- Updates the state engine with the failure
- """
- self.stats.taskFailed()
- fnid = self.rqdata.runq_fnid[task]
- self.failed_fnids.append(fnid)
- bb.event.fire(runQueueTaskFailed(task, self.stats, exitcode, self.rq), self.cfgData)
- if self.rqdata.taskData.abort:
- self.rq.state = runQueueCleanUp
-
- def task_skip(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
- self.task_completeoutright(task)
- self.stats.taskCompleted()
- self.stats.taskSkipped()
-
- def execute(self):
- """
- Run the tasks in a queue prepared by rqdata.prepare()
- """
-
- if self.stats.total == 0:
- # nothing to do
- self.rq.state = runQueueCleanUp
-
- task = self.sched.next()
- if task is not None:
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[task]]
-
- taskname = self.rqdata.runq_task[task]
- if self.rq.check_stamp_task(task, taskname):
- logger.debug(2, "Stamp current task %s (%s)", task,
- self.rqdata.get_user_idstring(task))
- self.task_skip(task)
- return True
-
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'noexec' in taskdep and taskname in taskdep['noexec']:
- startevent = runQueueTaskStarted(task, self.stats, self.rq,
- noexec=True)
- bb.event.fire(startevent, self.cfgData)
- self.runq_running[task] = 1
- self.stats.taskActive()
- bb.build.make_stamp(taskname, self.rqdata.dataCache, fn)
- self.task_complete(task)
- return True
- else:
- startevent = runQueueTaskStarted(task, self.stats, self.rq)
- bb.event.fire(startevent, self.cfgData)
-
- pid, pipein, pipeout = self.fork_off_task(fn, task, taskname)
-
- self.build_pids[pid] = task
- self.build_pipes[pid] = runQueuePipe(pipein, pipeout, self.cfgData)
- self.runq_running[task] = 1
- self.stats.taskActive()
- if self.stats.active < self.number_tasks:
- return True
-
- for pipe in self.build_pipes:
- self.build_pipes[pipe].read()
-
- if self.stats.active > 0:
- if self.runqueue_process_waitpid() is None:
- return 0.5
- return True
-
- if len(self.failed_fnids) != 0:
- self.rq.state = runQueueFailed
- return True
-
- # Sanity Checks
- for task in xrange(self.stats.total):
- if self.runq_buildable[task] == 0:
- logger.error("Task %s never buildable!", task)
- if self.runq_running[task] == 0:
- logger.error("Task %s never ran!", task)
- if self.runq_complete[task] == 0:
- logger.error("Task %s never completed!", task)
- self.rq.state = runQueueComplete
- return True
-
-class RunQueueExecuteScenequeue(RunQueueExecute):
- def __init__(self, rq):
- RunQueueExecute.__init__(self, rq)
-
- self.scenequeue_covered = set()
- self.scenequeue_notcovered = set()
-
- # If we don't have any setscene functions, skip this step
- if len(self.rqdata.runq_setscene) == 0:
- rq.scenequeue_covered = set()
- rq.state = runQueueRunInit
- return
-
- self.stats = RunQueueStats(len(self.rqdata.runq_setscene))
-
- endpoints = {}
- sq_revdeps = []
- sq_revdeps_new = []
- sq_revdeps_squash = []
-
- # We need to construct a dependency graph for the setscene functions. Intermediate
- # dependencies between the setscene tasks only complicate the code. This code
- # therefore aims to collapse the huge runqueue dependency tree into a smaller one
- # only containing the setscene functions.
-
- for task in xrange(self.stats.total):
- self.runq_running.append(0)
- self.runq_complete.append(0)
- self.runq_buildable.append(0)
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- sq_revdeps.append(copy.copy(self.rqdata.runq_revdeps[task]))
- sq_revdeps_new.append(set())
- if (len(self.rqdata.runq_revdeps[task]) == 0) and task not in self.rqdata.runq_setscene:
- endpoints[task] = None
-
- for task in self.rqdata.runq_setscene:
- for dep in self.rqdata.runq_depends[task]:
- endpoints[dep] = task
-
- def process_endpoints(endpoints):
- newendpoints = {}
- for point, task in endpoints.items():
- tasks = set()
- if task:
- tasks.add(task)
- if sq_revdeps_new[point]:
- tasks |= sq_revdeps_new[point]
- sq_revdeps_new[point] = set()
- for dep in self.rqdata.runq_depends[point]:
- if point in sq_revdeps[dep]:
- sq_revdeps[dep].remove(point)
- if tasks:
- sq_revdeps_new[dep] |= tasks
- if (len(sq_revdeps[dep]) == 0 or len(sq_revdeps_new[dep]) != 0) and dep not in self.rqdata.runq_setscene:
- newendpoints[dep] = task
- if len(newendpoints) != 0:
- process_endpoints(newendpoints)
-
- process_endpoints(endpoints)
-
- for task in xrange(len(self.rqdata.runq_fnid)):
- if task in self.rqdata.runq_setscene:
- deps = set()
- for dep in sq_revdeps_new[task]:
- deps.add(self.rqdata.runq_setscene.index(dep))
- sq_revdeps_squash.append(deps)
- elif len(sq_revdeps_new[task]) != 0:
- bb.msg.fatal(bb.msg.domain.RunQueue, "Something went badly wrong during scenequeue generation, aborting. Please report this problem.")
-
- #for task in xrange(len(sq_revdeps_squash)):
- # print "Task %s: %s.%s is %s " % (task, self.taskData.fn_index[self.runq_fnid[self.runq_setscene[task]]], self.runq_task[self.runq_setscene[task]] + "_setscene", sq_revdeps_squash[task])
-
- self.sq_deps = []
- self.sq_revdeps = sq_revdeps_squash
- self.sq_revdeps2 = copy.deepcopy(self.sq_revdeps)
-
- for task in xrange(len(self.sq_revdeps)):
- self.sq_deps.append(set())
- for task in xrange(len(self.sq_revdeps)):
- for dep in self.sq_revdeps[task]:
- self.sq_deps[dep].add(task)
-
- for task in xrange(len(self.sq_revdeps)):
- if len(self.sq_revdeps[task]) == 0:
- self.runq_buildable[task] = 1
-
- if self.rq.hashvalidate:
- sq_hash = []
- sq_hashfn = []
- sq_fn = []
- sq_taskname = []
- sq_task = []
- noexec = []
- for task in xrange(len(self.sq_revdeps)):
- realtask = self.rqdata.runq_setscene[task]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
- taskname = self.rqdata.runq_task[realtask]
- taskdep = self.rqdata.dataCache.task_deps[fn]
- if 'noexec' in taskdep and taskname in taskdep['noexec']:
- noexec.append(task)
- self.task_skip(task)
- bb.build.make_stamp(taskname + "_setscene", self.rqdata.dataCache, fn)
- continue
- sq_fn.append(fn)
- sq_hashfn.append(self.rqdata.dataCache.hashfn[fn])
- sq_hash.append(self.rqdata.runq_hash[realtask])
- sq_taskname.append(taskname)
- sq_task.append(task)
- call = self.rq.hashvalidate + "(sq_fn, sq_task, sq_hash, sq_hashfn, d)"
- locs = { "sq_fn" : sq_fn, "sq_task" : sq_taskname, "sq_hash" : sq_hash, "sq_hashfn" : sq_hashfn, "d" : self.cooker.configuration.data }
- valid = bb.utils.better_eval(call, locs)
-
- valid_new = []
- for v in valid:
- valid_new.append(sq_task[v])
-
- for task in xrange(len(self.sq_revdeps)):
- if task not in valid_new and task not in noexec:
- logger.debug(2, 'No package found, so skipping setscene task %s',
- self.rqdata.get_user_idstring(task))
- self.task_failoutright(task)
-
- logger.info('Executing SetScene Tasks')
-
- self.rq.state = runQueueSceneRun
-
- def scenequeue_updatecounters(self, task):
- for dep in self.sq_deps[task]:
- self.sq_revdeps2[dep].remove(task)
- if len(self.sq_revdeps2[dep]) == 0:
- self.runq_buildable[dep] = 1
-
- def task_completeoutright(self, task):
- """
- Mark a task as completed
- Look at the reverse dependencies and mark any task with
- completed dependencies as buildable
- """
-
- index = self.rqdata.runq_setscene[task]
- logger.debug(1, 'Found task %s which could be accelerated',
- self.rqdata.get_user_idstring(index))
-
- self.scenequeue_covered.add(task)
- self.scenequeue_updatecounters(task)
-
- def task_complete(self, task):
- self.stats.taskCompleted()
- self.task_completeoutright(task)
-
- def task_fail(self, task, result):
- self.stats.taskFailed()
- index = self.rqdata.runq_setscene[task]
- bb.event.fire(runQueueTaskFailed(task, self.stats, result, self), self.cfgData)
- self.scenequeue_notcovered.add(task)
- self.scenequeue_updatecounters(task)
-
- def task_failoutright(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
- self.stats.taskCompleted()
- self.stats.taskSkipped()
- index = self.rqdata.runq_setscene[task]
- self.scenequeue_notcovered.add(task)
- self.scenequeue_updatecounters(task)
-
- def task_skip(self, task):
- self.runq_running[task] = 1
- self.runq_buildable[task] = 1
- self.task_completeoutright(task)
- self.stats.taskCompleted()
- self.stats.taskSkipped()
-
- def execute(self):
- """
- Run the tasks in a queue prepared by prepare_runqueue
- """
-
- task = None
- if self.stats.active < self.number_tasks:
- # Find the next setscene to run
- for nexttask in xrange(self.stats.total):
- if self.runq_buildable[nexttask] == 1 and self.runq_running[nexttask] != 1:
- task = nexttask
- break
- if task is not None:
- realtask = self.rqdata.runq_setscene[task]
- fn = self.rqdata.taskData.fn_index[self.rqdata.runq_fnid[realtask]]
-
- taskname = self.rqdata.runq_task[realtask] + "_setscene"
- if self.rq.check_stamp_task(realtask, self.rqdata.runq_task[realtask]):
- logger.debug(2, 'Stamp for underlying task %s(%s) is current, so skipping setscene variant',
- task, self.rqdata.get_user_idstring(task))
- self.task_failoutright(task)
- return True
-
- if self.cooker.configuration.force:
- for target in self.rqdata.target_pairs:
- if target[0] == fn and target[1] == self.rqdata.runq_task[realtask]:
- self.task_failoutright(task)
- return True
-
- if self.rq.check_stamp_task(realtask, taskname):
- logger.debug(2, 'Setscene stamp current task %s(%s), so skip it and its dependencies',
- task, self.rqdata.get_user_idstring(realtask))
- self.task_skip(task)
- return True
-
- logger.info("Running setscene task %d of %d (%s:%s)" % (self.stats.completed + self.stats.active + self.stats.failed + 1,
- self.stats.total, fn, taskname))
-
- pid, pipein, pipeout = self.fork_off_task(fn, realtask, taskname)
-
- self.build_pids[pid] = task
- self.build_pipes[pid] = runQueuePipe(pipein, pipeout, self.cfgData)
- self.runq_running[task] = 1
- self.stats.taskActive()
- if self.stats.active < self.number_tasks:
- return True
-
- for pipe in self.build_pipes:
- self.build_pipes[pipe].read()
-
- if self.stats.active > 0:
- if self.runqueue_process_waitpid() is None:
- return 0.5
- return True
-
- # Convert scenequeue_covered task numbers into full taskgraph ids
- oldcovered = self.scenequeue_covered
- self.rq.scenequeue_covered = set()
- for task in oldcovered:
- self.rq.scenequeue_covered.add(self.rqdata.runq_setscene[task])
-
- logger.debug(1, 'We can skip tasks %s', self.rq.scenequeue_covered)
-
- self.rq.state = runQueueRunInit
- return True
-
- def fork_off_task(self, fn, task, taskname):
- return RunQueueExecute.fork_off_task(self, fn, task, taskname, quieterrors=True)
-
-class TaskFailure(Exception):
- """
- Exception raised when a task in a runqueue fails
- """
- def __init__(self, x):
- self.args = x
-
-
-class runQueueExitWait(bb.event.Event):
- """
- Event when waiting for task processes to exit
- """
-
- def __init__(self, remain):
- self.remain = remain
- self.message = "Waiting for %s active tasks to finish" % remain
- bb.event.Event.__init__(self)
-
-class runQueueEvent(bb.event.Event):
- """
- Base runQueue event class
- """
- def __init__(self, task, stats, rq):
- self.taskid = task
- self.taskstring = rq.rqdata.get_user_idstring(task)
- self.stats = stats.copy()
- bb.event.Event.__init__(self)
-
-class runQueueTaskStarted(runQueueEvent):
- """
- Event notifing a task was started
- """
- def __init__(self, task, stats, rq, noexec=False):
- runQueueEvent.__init__(self, task, stats, rq)
- self.noexec = noexec
-
-class runQueueTaskFailed(runQueueEvent):
- """
- Event notifing a task failed
- """
- def __init__(self, task, stats, exitcode, rq):
- runQueueEvent.__init__(self, task, stats, rq)
- self.exitcode = exitcode
-
-class runQueueTaskCompleted(runQueueEvent):
- """
- Event notifing a task completed
- """
-
-def check_stamp_fn(fn, taskname, d):
- rqexe = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY", d)
- fn = bb.data.getVar("__RUNQUEUE_DO_NOT_USE_EXTERNALLY2", d)
- fnid = rqexe.rqdata.taskData.getfn_id(fn)
- taskid = rqexe.rqdata.get_task_id(fnid, taskname)
- if taskid is not None:
- return rqexe.rq.check_stamp_task(taskid)
- return None
-
-class runQueuePipe():
- """
- Abstraction for a pipe between a worker thread and the server
- """
- def __init__(self, pipein, pipeout, d):
- self.input = pipein
- pipeout.close()
- fcntl.fcntl(self.input, fcntl.F_SETFL, fcntl.fcntl(self.input, fcntl.F_GETFL) | os.O_NONBLOCK)
- self.queue = ""
- self.d = d
-
- def read(self):
- start = len(self.queue)
- try:
- self.queue = self.queue + self.input.read(102400)
- except (OSError, IOError):
- pass
- end = len(self.queue)
- index = self.queue.find("</event>")
- while index != -1:
- bb.event.fire_from_worker(self.queue[:index+8], self.d)
- self.queue = self.queue[index+8:]
- index = self.queue.find("</event>")
- return (end > start)
-
- def close(self):
- while self.read():
- continue
- if len(self.queue) > 0:
- print("Warning, worker left partial message: %s" % self.queue)
- self.input.close()
diff --git a/bitbake/lib/bb/server/__init__.py b/bitbake/lib/bb/server/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
--- a/bitbake/lib/bb/server/__init__.py
+++ /dev/null
diff --git a/bitbake/lib/bb/server/none.py b/bitbake/lib/bb/server/none.py
deleted file mode 100644
index be0fb8f776..0000000000
--- a/bitbake/lib/bb/server/none.py
+++ /dev/null
@@ -1,195 +0,0 @@
-#
-# BitBake 'dummy' Passthrough Server
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2008 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
- This module implements an xmlrpc server for BitBake.
-
- Use this by deriving a class from BitBakeXMLRPCServer and then adding
- methods which you want to "export" via XMLRPC. If the methods have the
- prefix xmlrpc_, then registering those function will happen automatically,
- if not, you need to call register_function.
-
- Use register_idle_function() to add a function which the xmlrpc server
- calls from within server_forever when no requests are pending. Make sure
- that those functions are non-blocking or else you will introduce latency
- in the server's main loop.
-"""
-
-import time
-import bb
-from bb.ui import uievent
-import xmlrpclib
-import pickle
-import signal
-
-DEBUG = False
-
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-import inspect, select
-
-class BitBakeServerCommands():
- def __init__(self, server, cooker):
- self.cooker = cooker
- self.server = server
-
- def runCommand(self, command):
- """
- Run a cooker command on the server
- """
- #print "Running Command %s" % command
- return self.cooker.command.runCommand(command)
-
- def terminateServer(self):
- """
- Trigger the server to quit
- """
- self.server.server_exit()
- #print "Server (cooker) exitting"
- return
-
- def ping(self):
- """
- Dummy method which can be used to check the server is still alive
- """
- return True
-
-eventQueue = []
-
-class BBUIEventQueue:
- class event:
- def __init__(self, parent):
- self.parent = parent
- @staticmethod
- def send(event):
- bb.server.none.eventQueue.append(pickle.loads(event))
- @staticmethod
- def quit():
- return
-
- def __init__(self, BBServer):
- self.eventQueue = bb.server.none.eventQueue
- self.BBServer = BBServer
- self.EventHandle = bb.event.register_UIHhandler(self)
-
- def getEvent(self):
- if len(self.eventQueue) == 0:
- return None
-
- return self.eventQueue.pop(0)
-
- def waitEvent(self, delay):
- event = self.getEvent()
- if event:
- return event
- self.BBServer.idle_commands(delay)
- return self.getEvent()
-
- def queue_event(self, event):
- self.eventQueue.append(event)
-
- def system_quit( self ):
- bb.event.unregister_UIHhandler(self.EventHandle)
-
-# Dummy signal handler to ensure we break out of sleep upon SIGCHLD
-def chldhandler(signum, stackframe):
- pass
-
-class BitBakeServer():
- # remove this when you're done with debugging
- # allow_reuse_address = True
-
- def __init__(self, cooker):
- self._idlefuns = {}
- self.commands = BitBakeServerCommands(self, cooker)
-
- def register_idle_function(self, function, data):
- """Register a function to be called while the server is idle"""
- assert hasattr(function, '__call__')
- self._idlefuns[function] = data
-
- def idle_commands(self, delay):
- #print "Idle queue length %s" % len(self._idlefuns)
- #print "Idle timeout, running idle functions"
- #if len(self._idlefuns) == 0:
- nextsleep = delay
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, False)
- #print "Idle function returned %s" % (retval)
- if retval is False:
- del self._idlefuns[function]
- elif retval is True:
- nextsleep = None
- elif nextsleep is None:
- continue
- elif retval < nextsleep:
- nextsleep = retval
- except SystemExit:
- raise
- except:
- import traceback
- traceback.print_exc()
- self.commands.runCommand(["stateShutdown"])
- pass
- if nextsleep is not None:
- #print "Sleeping for %s (%s)" % (nextsleep, delay)
- signal.signal(signal.SIGCHLD, chldhandler)
- time.sleep(nextsleep)
- signal.signal(signal.SIGCHLD, signal.SIG_DFL)
-
- def server_exit(self):
- # Tell idle functions we're exiting
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, True)
- except:
- pass
-
-class BitbakeServerInfo():
- def __init__(self, server):
- self.server = server
- self.commands = server.commands
-
-class BitBakeServerFork():
- def __init__(self, cooker, server, serverinfo, logfile):
- serverinfo.logfile = logfile
- serverinfo.cooker = cooker
- serverinfo.server = server
-
-class BitbakeUILauch():
- def launch(self, serverinfo, uifunc, *args):
- return bb.cooker.server_main(serverinfo.cooker, uifunc, *args)
-
-class BitBakeServerConnection():
- def __init__(self, serverinfo):
- self.server = serverinfo.server
- self.connection = serverinfo.commands
- self.events = bb.server.none.BBUIEventQueue(self.server)
- for event in bb.event.ui_queue:
- self.events.queue_event(event)
-
- def terminate(self):
- try:
- self.events.system_quit()
- except:
- pass
- try:
- self.connection.terminateServer()
- except:
- pass
diff --git a/bitbake/lib/bb/server/xmlrpc.py b/bitbake/lib/bb/server/xmlrpc.py
deleted file mode 100644
index 0d03e308d0..0000000000
--- a/bitbake/lib/bb/server/xmlrpc.py
+++ /dev/null
@@ -1,260 +0,0 @@
-#
-# BitBake XMLRPC Server
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2008 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
- This module implements an xmlrpc server for BitBake.
-
- Use this by deriving a class from BitBakeXMLRPCServer and then adding
- methods which you want to "export" via XMLRPC. If the methods have the
- prefix xmlrpc_, then registering those function will happen automatically,
- if not, you need to call register_function.
-
- Use register_idle_function() to add a function which the xmlrpc server
- calls from within server_forever when no requests are pending. Make sure
- that those functions are non-blocking or else you will introduce latency
- in the server's main loop.
-"""
-
-import bb
-import xmlrpclib, sys
-from bb import daemonize
-from bb.ui import uievent
-
-DEBUG = False
-
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-import inspect, select
-
-if sys.hexversion < 0x020600F0:
- print("Sorry, python 2.6 or later is required for bitbake's XMLRPC mode")
- sys.exit(1)
-
-##
-# The xmlrpclib.Transport class has undergone various changes in Python 2.7
-# which break BitBake's XMLRPC implementation.
-# To work around this we subclass Transport and have a copy/paste of method
-# implementations from Python 2.6.6's xmlrpclib.
-#
-# Upstream Python bug is #8194 (http://bugs.python.org/issue8194)
-##
-
-class BBTransport(xmlrpclib.Transport):
- def request(self, host, handler, request_body, verbose=0):
- h = self.make_connection(host)
- if verbose:
- h.set_debuglevel(1)
-
- self.send_request(h, handler, request_body)
- self.send_host(h, host)
- self.send_user_agent(h)
- self.send_content(h, request_body)
-
- errcode, errmsg, headers = h.getreply()
-
- if errcode != 200:
- raise ProtocolError(
- host + handler,
- errcode, errmsg,
- headers
- )
-
- self.verbose = verbose
-
- try:
- sock = h._conn.sock
- except AttributeError:
- sock = None
-
- return self._parse_response(h.getfile(), sock)
-
- def make_connection(self, host):
- import httplib
- host, extra_headers, x509 = self.get_host_info(host)
- return httplib.HTTP(host)
-
- def _parse_response(self, file, sock):
- p, u = self.getparser()
-
- while 1:
- if sock:
- response = sock.recv(1024)
- else:
- response = file.read(1024)
- if not response:
- break
- if self.verbose:
- print "body:", repr(response)
- p.feed(response)
-
- file.close()
- p.close()
-
- return u.close()
-
-class BitBakeServerCommands():
- def __init__(self, server, cooker):
- self.cooker = cooker
- self.server = server
-
- def registerEventHandler(self, host, port):
- """
- Register a remote UI Event Handler
- """
- t = BBTransport()
- s = xmlrpclib.Server("http://%s:%d/" % (host, port), transport=t, allow_none=True)
- return bb.event.register_UIHhandler(s)
-
- def unregisterEventHandler(self, handlerNum):
- """
- Unregister a remote UI Event Handler
- """
- return bb.event.unregister_UIHhandler(handlerNum)
-
- def runCommand(self, command):
- """
- Run a cooker command on the server
- """
- return self.cooker.command.runCommand(command)
-
- def terminateServer(self):
- """
- Trigger the server to quit
- """
- self.server.quit = True
- print("Server (cooker) exitting")
- return
-
- def ping(self):
- """
- Dummy method which can be used to check the server is still alive
- """
- return True
-
-class BitBakeServer(SimpleXMLRPCServer):
- # remove this when you're done with debugging
- # allow_reuse_address = True
-
- def __init__(self, cooker, interface = ("localhost", 0)):
- """
- Constructor
- """
- SimpleXMLRPCServer.__init__(self, interface,
- requestHandler=SimpleXMLRPCRequestHandler,
- logRequests=False, allow_none=True)
- self._idlefuns = {}
- self.host, self.port = self.socket.getsockname()
- #self.register_introspection_functions()
- commands = BitBakeServerCommands(self, cooker)
- self.autoregister_all_functions(commands, "")
- self.cooker = cooker
-
- def autoregister_all_functions(self, context, prefix):
- """
- Convenience method for registering all functions in the scope
- of this class that start with a common prefix
- """
- methodlist = inspect.getmembers(context, inspect.ismethod)
- for name, method in methodlist:
- if name.startswith(prefix):
- self.register_function(method, name[len(prefix):])
-
- def register_idle_function(self, function, data):
- """Register a function to be called while the server is idle"""
- assert hasattr(function, '__call__')
- self._idlefuns[function] = data
-
- def serve_forever(self):
- bb.cooker.server_main(self.cooker, self._serve_forever)
-
- def _serve_forever(self):
- """
- Serve Requests. Overloaded to honor a quit command
- """
- self.quit = False
- self.timeout = 0 # Run Idle calls for our first callback
- while not self.quit:
- #print "Idle queue length %s" % len(self._idlefuns)
- self.handle_request()
- #print "Idle timeout, running idle functions"
- nextsleep = None
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, False)
- if retval is False:
- del self._idlefuns[function]
- elif retval is True:
- nextsleep = 0
- elif nextsleep is 0:
- continue
- elif nextsleep is None:
- nextsleep = retval
- elif retval < nextsleep:
- nextsleep = retval
- except SystemExit:
- raise
- except:
- import traceback
- traceback.print_exc()
- pass
- if nextsleep is None and len(self._idlefuns) > 0:
- nextsleep = 0
- self.timeout = nextsleep
- # Tell idle functions we're exiting
- for function, data in self._idlefuns.items():
- try:
- retval = function(self, data, True)
- except:
- pass
-
- self.server_close()
- return
-
-class BitbakeServerInfo():
- def __init__(self, server):
- self.host = server.host
- self.port = server.port
-
-class BitBakeServerFork():
- def __init__(self, cooker, server, serverinfo, logfile):
- daemonize.createDaemon(server.serve_forever, logfile)
-
-class BitbakeUILauch():
- def launch(self, serverinfo, uifunc, *args):
- return uifunc(*args)
-
-class BitBakeServerConnection():
- def __init__(self, serverinfo):
- t = BBTransport()
- self.connection = xmlrpclib.Server("http://%s:%s" % (serverinfo.host, serverinfo.port), transport=t, allow_none=True)
- self.events = uievent.BBUIEventQueue(self.connection)
- for event in bb.event.ui_queue:
- self.events.queue_event(event)
-
- def terminate(self):
- # Don't wait for server indefinitely
- import socket
- socket.setdefaulttimeout(2)
- try:
- self.events.system_quit()
- except:
- pass
- try:
- self.connection.terminateServer()
- except:
- pass
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py
deleted file mode 100644
index 3319e2d1cc..0000000000
--- a/bitbake/lib/bb/shell.py
+++ /dev/null
@@ -1,820 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-##########################################################################
-#
-# Copyright (C) 2005-2006 Michael 'Mickey' Lauer <mickey@Vanille.de>
-# Copyright (C) 2005-2006 Vanille Media
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-#
-##########################################################################
-#
-# Thanks to:
-# * Holger Freyther <zecke@handhelds.org>
-# * Justin Patrin <papercrane@reversefold.com>
-#
-##########################################################################
-
-"""
-BitBake Shell
-
-IDEAS:
- * list defined tasks per package
- * list classes
- * toggle force
- * command to reparse just one (or more) bbfile(s)
- * automatic check if reparsing is necessary (inotify?)
- * frontend for bb file manipulation
- * more shell-like features:
- - output control, i.e. pipe output into grep, sort, etc.
- - job control, i.e. bring running commands into background and foreground
- * start parsing in background right after startup
- * ncurses interface
-
-PROBLEMS:
- * force doesn't always work
- * readline completion for commands with more than one parameters
-
-"""
-
-##########################################################################
-# Import and setup global variables
-##########################################################################
-
-from __future__ import print_function
-from functools import reduce
-try:
- set
-except NameError:
- from sets import Set as set
-import sys, os, readline, socket, httplib, urllib, commands, popen2, shlex, Queue, fnmatch
-from bb import data, parse, build, cache, taskdata, runqueue, providers as Providers
-
-__version__ = "0.5.3.1"
-__credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>
-Type 'help' for more information, press CTRL-D to exit.""" % __version__
-
-cmds = {}
-leave_mainloop = False
-last_exception = None
-cooker = None
-parsed = False
-debug = os.environ.get( "BBSHELL_DEBUG", "" )
-
-##########################################################################
-# Class BitBakeShellCommands
-##########################################################################
-
-class BitBakeShellCommands:
- """This class contains the valid commands for the shell"""
-
- def __init__( self, shell ):
- """Register all the commands"""
- self._shell = shell
- for attr in BitBakeShellCommands.__dict__:
- if not attr.startswith( "_" ):
- if attr.endswith( "_" ):
- command = attr[:-1].lower()
- else:
- command = attr[:].lower()
- method = getattr( BitBakeShellCommands, attr )
- debugOut( "registering command '%s'" % command )
- # scan number of arguments
- usage = getattr( method, "usage", "" )
- if usage != "<...>":
- numArgs = len( usage.split() )
- else:
- numArgs = -1
- shell.registerCommand( command, method, numArgs, "%s %s" % ( command, usage ), method.__doc__ )
-
- def _checkParsed( self ):
- if not parsed:
- print("SHELL: This command needs to parse bbfiles...")
- self.parse( None )
-
- def _findProvider( self, item ):
- self._checkParsed()
- # Need to use taskData for this information
- preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
- if not preferred: preferred = item
- try:
- lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
- except KeyError:
- if item in cooker.status.providers:
- pf = cooker.status.providers[item][0]
- else:
- pf = None
- return pf
-
- def alias( self, params ):
- """Register a new name for a command"""
- new, old = params
- if not old in cmds:
- print("ERROR: Command '%s' not known" % old)
- else:
- cmds[new] = cmds[old]
- print("OK")
- alias.usage = "<alias> <command>"
-
- def buffer( self, params ):
- """Dump specified output buffer"""
- index = params[0]
- print(self._shell.myout.buffer( int( index ) ))
- buffer.usage = "<index>"
-
- def buffers( self, params ):
- """Show the available output buffers"""
- commands = self._shell.myout.bufferedCommands()
- if not commands:
- print("SHELL: No buffered commands available yet. Start doing something.")
- else:
- print("="*35, "Available Output Buffers", "="*27)
- for index, cmd in enumerate( commands ):
- print("| %s %s" % ( str( index ).ljust( 3 ), cmd ))
- print("="*88)
-
- def build( self, params, cmd = "build" ):
- """Build a providee"""
- global last_exception
- globexpr = params[0]
- self._checkParsed()
- names = globfilter( cooker.status.pkg_pn, globexpr )
- if len( names ) == 0: names = [ globexpr ]
- print("SHELL: Building %s" % ' '.join( names ))
-
- td = taskdata.TaskData(cooker.configuration.abort)
- localdata = data.createCopy(cooker.configuration.data)
- data.update_data(localdata)
- data.expandKeys(localdata)
-
- try:
- tasks = []
- for name in names:
- td.add_provider(localdata, cooker.status, name)
- providers = td.get_provider(name)
-
- if len(providers) == 0:
- raise Providers.NoProvider
-
- tasks.append([name, "do_%s" % cmd])
-
- td.add_unresolved(localdata, cooker.status)
-
- rq = runqueue.RunQueue(cooker, localdata, cooker.status, td, tasks)
- rq.prepare_runqueue()
- rq.execute_runqueue()
-
- except Providers.NoProvider:
- print("ERROR: No Provider")
- last_exception = Providers.NoProvider
-
- except runqueue.TaskFailure as fnids:
- last_exception = runqueue.TaskFailure
-
- except build.FuncFailed as e:
- print("ERROR: Couldn't build '%s'" % names)
- last_exception = e
-
-
- build.usage = "<providee>"
-
- def clean( self, params ):
- """Clean a providee"""
- self.build( params, "clean" )
- clean.usage = "<providee>"
-
- def compile( self, params ):
- """Execute 'compile' on a providee"""
- self.build( params, "compile" )
- compile.usage = "<providee>"
-
- def configure( self, params ):
- """Execute 'configure' on a providee"""
- self.build( params, "configure" )
- configure.usage = "<providee>"
-
- def install( self, params ):
- """Execute 'install' on a providee"""
- self.build( params, "install" )
- install.usage = "<providee>"
-
- def edit( self, params ):
- """Call $EDITOR on a providee"""
- name = params[0]
- bbfile = self._findProvider( name )
- if bbfile is not None:
- os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) )
- else:
- print("ERROR: Nothing provides '%s'" % name)
- edit.usage = "<providee>"
-
- def environment( self, params ):
- """Dump out the outer BitBake environment"""
- cooker.showEnvironment()
-
- def exit_( self, params ):
- """Leave the BitBake Shell"""
- debugOut( "setting leave_mainloop to true" )
- global leave_mainloop
- leave_mainloop = True
-
- def fetch( self, params ):
- """Fetch a providee"""
- self.build( params, "fetch" )
- fetch.usage = "<providee>"
-
- def fileBuild( self, params, cmd = "build" ):
- """Parse and build a .bb file"""
- global last_exception
- name = params[0]
- bf = completeFilePath( name )
- print("SHELL: Calling '%s' on '%s'" % ( cmd, bf ))
-
- try:
- cooker.buildFile(bf, cmd)
- except parse.ParseError:
- print("ERROR: Unable to open or parse '%s'" % bf)
- except build.FuncFailed as e:
- print("ERROR: Couldn't build '%s'" % name)
- last_exception = e
-
- fileBuild.usage = "<bbfile>"
-
- def fileClean( self, params ):
- """Clean a .bb file"""
- self.fileBuild( params, "clean" )
- fileClean.usage = "<bbfile>"
-
- def fileEdit( self, params ):
- """Call $EDITOR on a .bb file"""
- name = params[0]
- os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), completeFilePath( name ) ) )
- fileEdit.usage = "<bbfile>"
-
- def fileRebuild( self, params ):
- """Rebuild (clean & build) a .bb file"""
- self.fileBuild( params, "rebuild" )
- fileRebuild.usage = "<bbfile>"
-
- def fileReparse( self, params ):
- """(re)Parse a bb file"""
- bbfile = params[0]
- print("SHELL: Parsing '%s'" % bbfile)
- parse.update_mtime( bbfile )
- cooker.parser.reparse(bbfile)
- if False: #fromCache:
- print("SHELL: File has not been updated, not reparsing")
- else:
- print("SHELL: Parsed")
- fileReparse.usage = "<bbfile>"
-
- def abort( self, params ):
- """Toggle abort task execution flag (see bitbake -k)"""
- cooker.configuration.abort = not cooker.configuration.abort
- print("SHELL: Abort Flag is now '%s'" % repr( cooker.configuration.abort ))
-
- def force( self, params ):
- """Toggle force task execution flag (see bitbake -f)"""
- cooker.configuration.force = not cooker.configuration.force
- print("SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force ))
-
- def help( self, params ):
- """Show a comprehensive list of commands and their purpose"""
- print("="*30, "Available Commands", "="*30)
- for cmd in sorted(cmds):
- function, numparams, usage, helptext = cmds[cmd]
- print("| %s | %s" % (usage.ljust(30), helptext))
- print("="*78)
-
- def lastError( self, params ):
- """Show the reason or log that was produced by the last BitBake event exception"""
- if last_exception is None:
- print("SHELL: No Errors yet (Phew)...")
- else:
- reason, event = last_exception.args
- print("SHELL: Reason for the last error: '%s'" % reason)
- if ':' in reason:
- msg, filename = reason.split( ':' )
- filename = filename.strip()
- print("SHELL: Dumping log file for last error:")
- try:
- print(open( filename ).read())
- except IOError:
- print("ERROR: Couldn't open '%s'" % filename)
-
- def match( self, params ):
- """Dump all files or providers matching a glob expression"""
- what, globexpr = params
- if what == "files":
- self._checkParsed()
- for key in globfilter( cooker.status.pkg_fn, globexpr ): print(key)
- elif what == "providers":
- self._checkParsed()
- for key in globfilter( cooker.status.pkg_pn, globexpr ): print(key)
- else:
- print("Usage: match %s" % self.print_.usage)
- match.usage = "<files|providers> <glob>"
-
- def new( self, params ):
- """Create a new .bb file and open the editor"""
- dirname, filename = params
- packages = '/'.join( data.getVar( "BBFILES", cooker.configuration.data, 1 ).split('/')[:-2] )
- fulldirname = "%s/%s" % ( packages, dirname )
-
- if not os.path.exists( fulldirname ):
- print("SHELL: Creating '%s'" % fulldirname)
- os.mkdir( fulldirname )
- if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ):
- if os.path.exists( "%s/%s" % ( fulldirname, filename ) ):
- print("SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename ))
- return False
- print("SHELL: Creating '%s/%s'" % ( fulldirname, filename ))
- newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" )
- print("""DESCRIPTION = ""
-SECTION = ""
-AUTHOR = ""
-HOMEPAGE = ""
-MAINTAINER = ""
-LICENSE = "GPL"
-PR = "r0"
-
-SRC_URI = ""
-
-#inherit base
-
-#do_configure() {
-#
-#}
-
-#do_compile() {
-#
-#}
-
-#do_stage() {
-#
-#}
-
-#do_install() {
-#
-#}
-""", file=newpackage)
- newpackage.close()
- os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) )
- new.usage = "<directory> <filename>"
-
- def package( self, params ):
- """Execute 'package' on a providee"""
- self.build( params, "package" )
- package.usage = "<providee>"
-
- def pasteBin( self, params ):
- """Send a command + output buffer to the pastebin at http://rafb.net/paste"""
- index = params[0]
- contents = self._shell.myout.buffer( int( index ) )
- sendToPastebin( "output of " + params[0], contents )
- pasteBin.usage = "<index>"
-
- def pasteLog( self, params ):
- """Send the last event exception error log (if there is one) to http://rafb.net/paste"""
- if last_exception is None:
- print("SHELL: No Errors yet (Phew)...")
- else:
- reason, event = last_exception.args
- print("SHELL: Reason for the last error: '%s'" % reason)
- if ':' in reason:
- msg, filename = reason.split( ':' )
- filename = filename.strip()
- print("SHELL: Pasting log file to pastebin...")
-
- file = open( filename ).read()
- sendToPastebin( "contents of " + filename, file )
-
- def patch( self, params ):
- """Execute 'patch' command on a providee"""
- self.build( params, "patch" )
- patch.usage = "<providee>"
-
- def parse( self, params ):
- """(Re-)parse .bb files and calculate the dependency graph"""
- cooker.status = cache.CacheData()
- ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or ""
- cooker.status.ignored_dependencies = set( ignore.split() )
- cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) )
-
- (filelist, masked) = cooker.collect_bbfiles()
- cooker.parse_bbfiles(filelist, masked, cooker.myProgressCallback)
- cooker.buildDepgraph()
- global parsed
- parsed = True
- print()
-
- def reparse( self, params ):
- """(re)Parse a providee's bb file"""
- bbfile = self._findProvider( params[0] )
- if bbfile is not None:
- print("SHELL: Found bbfile '%s' for '%s'" % ( bbfile, params[0] ))
- self.fileReparse( [ bbfile ] )
- else:
- print("ERROR: Nothing provides '%s'" % params[0])
- reparse.usage = "<providee>"
-
- def getvar( self, params ):
- """Dump the contents of an outer BitBake environment variable"""
- var = params[0]
- value = data.getVar( var, cooker.configuration.data, 1 )
- print(value)
- getvar.usage = "<variable>"
-
- def peek( self, params ):
- """Dump contents of variable defined in providee's metadata"""
- name, var = params
- bbfile = self._findProvider( name )
- if bbfile is not None:
- the_data = cache.Cache.loadDataFull(bbfile, cooker.configuration.data)
- value = the_data.getVar( var, 1 )
- print(value)
- else:
- print("ERROR: Nothing provides '%s'" % name)
- peek.usage = "<providee> <variable>"
-
- def poke( self, params ):
- """Set contents of variable defined in providee's metadata"""
- name, var, value = params
- bbfile = self._findProvider( name )
- if bbfile is not None:
- print("ERROR: Sorry, this functionality is currently broken")
- #d = cooker.pkgdata[bbfile]
- #data.setVar( var, value, d )
-
- # mark the change semi persistant
- #cooker.pkgdata.setDirty(bbfile, d)
- #print "OK"
- else:
- print("ERROR: Nothing provides '%s'" % name)
- poke.usage = "<providee> <variable> <value>"
-
- def print_( self, params ):
- """Dump all files or providers"""
- what = params[0]
- if what == "files":
- self._checkParsed()
- for key in cooker.status.pkg_fn: print(key)
- elif what == "providers":
- self._checkParsed()
- for key in cooker.status.providers: print(key)
- else:
- print("Usage: print %s" % self.print_.usage)
- print_.usage = "<files|providers>"
-
- def python( self, params ):
- """Enter the expert mode - an interactive BitBake Python Interpreter"""
- sys.ps1 = "EXPERT BB>>> "
- sys.ps2 = "EXPERT BB... "
- import code
- interpreter = code.InteractiveConsole( dict( globals() ) )
- interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version )
-
- def showdata( self, params ):
- """Execute 'showdata' on a providee"""
- cooker.showEnvironment(None, params)
- showdata.usage = "<providee>"
-
- def setVar( self, params ):
- """Set an outer BitBake environment variable"""
- var, value = params
- data.setVar( var, value, cooker.configuration.data )
- print("OK")
- setVar.usage = "<variable> <value>"
-
- def rebuild( self, params ):
- """Clean and rebuild a .bb file or a providee"""
- self.build( params, "clean" )
- self.build( params, "build" )
- rebuild.usage = "<providee>"
-
- def shell( self, params ):
- """Execute a shell command and dump the output"""
- if params != "":
- print(commands.getoutput( " ".join( params ) ))
- shell.usage = "<...>"
-
- def stage( self, params ):
- """Execute 'stage' on a providee"""
- self.build( params, "populate_staging" )
- stage.usage = "<providee>"
-
- def status( self, params ):
- """<just for testing>"""
- print("-" * 78)
- print("building list = '%s'" % cooker.building_list)
- print("build path = '%s'" % cooker.build_path)
- print("consider_msgs_cache = '%s'" % cooker.consider_msgs_cache)
- print("build stats = '%s'" % cooker.stats)
- if last_exception is not None: print("last_exception = '%s'" % repr( last_exception.args ))
- print("memory output contents = '%s'" % self._shell.myout._buffer)
-
- def test( self, params ):
- """<just for testing>"""
- print("testCommand called with '%s'" % params)
-
- def unpack( self, params ):
- """Execute 'unpack' on a providee"""
- self.build( params, "unpack" )
- unpack.usage = "<providee>"
-
- def which( self, params ):
- """Computes the providers for a given providee"""
- # Need to use taskData for this information
- item = params[0]
-
- self._checkParsed()
-
- preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 )
- if not preferred: preferred = item
-
- try:
- lv, lf, pv, pf = Providers.findBestProvider(preferred, cooker.configuration.data, cooker.status)
- except KeyError:
- lv, lf, pv, pf = (None,)*4
-
- try:
- providers = cooker.status.providers[item]
- except KeyError:
- print("SHELL: ERROR: Nothing provides", preferred)
- else:
- for provider in providers:
- if provider == pf: provider = " (***) %s" % provider
- else: provider = " %s" % provider
- print(provider)
- which.usage = "<providee>"
-
-##########################################################################
-# Common helper functions
-##########################################################################
-
-def completeFilePath( bbfile ):
- """Get the complete bbfile path"""
- if not cooker.status: return bbfile
- if not cooker.status.pkg_fn: return bbfile
- for key in cooker.status.pkg_fn:
- if key.endswith( bbfile ):
- return key
- return bbfile
-
-def sendToPastebin( desc, content ):
- """Send content to http://oe.pastebin.com"""
- mydata = {}
- mydata["lang"] = "Plain Text"
- mydata["desc"] = desc
- mydata["cvt_tabs"] = "No"
- mydata["nick"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" )
- mydata["text"] = content
- params = urllib.urlencode( mydata )
- headers = {"Content-type": "application/x-www-form-urlencoded", "Accept": "text/plain"}
-
- host = "rafb.net"
- conn = httplib.HTTPConnection( "%s:80" % host )
- conn.request("POST", "/paste/paste.php", params, headers )
-
- response = conn.getresponse()
- conn.close()
-
- if response.status == 302:
- location = response.getheader( "location" ) or "unknown"
- print("SHELL: Pasted to http://%s%s" % ( host, location ))
- else:
- print("ERROR: %s %s" % ( response.status, response.reason ))
-
-def completer( text, state ):
- """Return a possible readline completion"""
- debugOut( "completer called with text='%s', state='%d'" % ( text, state ) )
-
- if state == 0:
- line = readline.get_line_buffer()
- if " " in line:
- line = line.split()
- # we are in second (or more) argument
- if line[0] in cmds and hasattr( cmds[line[0]][0], "usage" ): # known command and usage
- u = getattr( cmds[line[0]][0], "usage" ).split()[0]
- if u == "<variable>":
- allmatches = cooker.configuration.data.keys()
- elif u == "<bbfile>":
- if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
- else: allmatches = [ x.split("/")[-1] for x in cooker.status.pkg_fn ]
- elif u == "<providee>":
- if cooker.status.pkg_fn is None: allmatches = [ "(No Matches Available. Parsed yet?)" ]
- else: allmatches = cooker.status.providers.iterkeys()
- else: allmatches = [ "(No tab completion available for this command)" ]
- else: allmatches = [ "(No tab completion available for this command)" ]
- else:
- # we are in first argument
- allmatches = cmds.iterkeys()
-
- completer.matches = [ x for x in allmatches if x[:len(text)] == text ]
- #print "completer.matches = '%s'" % completer.matches
- if len( completer.matches ) > state:
- return completer.matches[state]
- else:
- return None
-
-def debugOut( text ):
- if debug:
- sys.stderr.write( "( %s )\n" % text )
-
-def columnize( alist, width = 80 ):
- """
- A word-wrap function that preserves existing line breaks
- and most spaces in the text. Expects that existing line
- breaks are posix newlines (\n).
- """
- return reduce(lambda line, word, width=width: '%s%s%s' %
- (line,
- ' \n'[(len(line[line.rfind('\n')+1:])
- + len(word.split('\n', 1)[0]
- ) >= width)],
- word),
- alist
- )
-
-def globfilter( names, pattern ):
- return fnmatch.filter( names, pattern )
-
-##########################################################################
-# Class MemoryOutput
-##########################################################################
-
-class MemoryOutput:
- """File-like output class buffering the output of the last 10 commands"""
- def __init__( self, delegate ):
- self.delegate = delegate
- self._buffer = []
- self.text = []
- self._command = None
-
- def startCommand( self, command ):
- self._command = command
- self.text = []
- def endCommand( self ):
- if self._command is not None:
- if len( self._buffer ) == 10: del self._buffer[0]
- self._buffer.append( ( self._command, self.text ) )
- def removeLast( self ):
- if self._buffer:
- del self._buffer[ len( self._buffer ) - 1 ]
- self.text = []
- self._command = None
- def lastBuffer( self ):
- if self._buffer:
- return self._buffer[ len( self._buffer ) -1 ][1]
- def bufferedCommands( self ):
- return [ cmd for cmd, output in self._buffer ]
- def buffer( self, i ):
- if i < len( self._buffer ):
- return "BB>> %s\n%s" % ( self._buffer[i][0], "".join( self._buffer[i][1] ) )
- else: return "ERROR: Invalid buffer number. Buffer needs to be in (0, %d)" % ( len( self._buffer ) - 1 )
- def write( self, text ):
- if self._command is not None and text != "BB>> ": self.text.append( text )
- if self.delegate is not None: self.delegate.write( text )
- def flush( self ):
- return self.delegate.flush()
- def fileno( self ):
- return self.delegate.fileno()
- def isatty( self ):
- return self.delegate.isatty()
-
-##########################################################################
-# Class BitBakeShell
-##########################################################################
-
-class BitBakeShell:
-
- def __init__( self ):
- """Register commands and set up readline"""
- self.commandQ = Queue.Queue()
- self.commands = BitBakeShellCommands( self )
- self.myout = MemoryOutput( sys.stdout )
- self.historyfilename = os.path.expanduser( "~/.bbsh_history" )
- self.startupfilename = os.path.expanduser( "~/.bbsh_startup" )
-
- readline.set_completer( completer )
- readline.set_completer_delims( " " )
- readline.parse_and_bind("tab: complete")
-
- try:
- readline.read_history_file( self.historyfilename )
- except IOError:
- pass # It doesn't exist yet.
-
- print(__credits__)
-
- def cleanup( self ):
- """Write readline history and clean up resources"""
- debugOut( "writing command history" )
- try:
- readline.write_history_file( self.historyfilename )
- except:
- print("SHELL: Unable to save command history")
-
- def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ):
- """Register a command"""
- if usage == "": usage = command
- if helptext == "": helptext = function.__doc__ or "<not yet documented>"
- cmds[command] = ( function, numparams, usage, helptext )
-
- def processCommand( self, command, params ):
- """Process a command. Check number of params and print a usage string, if appropriate"""
- debugOut( "processing command '%s'..." % command )
- try:
- function, numparams, usage, helptext = cmds[command]
- except KeyError:
- print("SHELL: ERROR: '%s' command is not a valid command." % command)
- self.myout.removeLast()
- else:
- if (numparams != -1) and (not len( params ) == numparams):
- print("Usage: '%s'" % usage)
- return
-
- result = function( self.commands, params )
- debugOut( "result was '%s'" % result )
-
- def processStartupFile( self ):
- """Read and execute all commands found in $HOME/.bbsh_startup"""
- if os.path.exists( self.startupfilename ):
- startupfile = open( self.startupfilename, "r" )
- for cmdline in startupfile:
- debugOut( "processing startup line '%s'" % cmdline )
- if not cmdline:
- continue
- if "|" in cmdline:
- print("ERROR: '|' in startup file is not allowed. Ignoring line")
- continue
- self.commandQ.put( cmdline.strip() )
-
- def main( self ):
- """The main command loop"""
- while not leave_mainloop:
- try:
- if self.commandQ.empty():
- sys.stdout = self.myout.delegate
- cmdline = raw_input( "BB>> " )
- sys.stdout = self.myout
- else:
- cmdline = self.commandQ.get()
- if cmdline:
- allCommands = cmdline.split( ';' )
- for command in allCommands:
- pipecmd = None
- #
- # special case for expert mode
- if command == 'python':
- sys.stdout = self.myout.delegate
- self.processCommand( command, "" )
- sys.stdout = self.myout
- else:
- self.myout.startCommand( command )
- if '|' in command: # disable output
- command, pipecmd = command.split( '|' )
- delegate = self.myout.delegate
- self.myout.delegate = None
- tokens = shlex.split( command, True )
- self.processCommand( tokens[0], tokens[1:] or "" )
- self.myout.endCommand()
- if pipecmd is not None: # restore output
- self.myout.delegate = delegate
-
- pipe = popen2.Popen4( pipecmd )
- pipe.tochild.write( "\n".join( self.myout.lastBuffer() ) )
- pipe.tochild.close()
- sys.stdout.write( pipe.fromchild.read() )
- #
- except EOFError:
- print()
- return
- except KeyboardInterrupt:
- print()
-
-##########################################################################
-# Start function - called from the BitBake command line utility
-##########################################################################
-
-def start( aCooker ):
- global cooker
- cooker = aCooker
- bbshell = BitBakeShell()
- bbshell.processStartupFile()
- bbshell.main()
- bbshell.cleanup()
-
-if __name__ == "__main__":
- print("SHELL: Sorry, this program should only be called by BitBake.")
diff --git a/bitbake/lib/bb/siggen.py b/bitbake/lib/bb/siggen.py
deleted file mode 100644
index e804d611b9..0000000000
--- a/bitbake/lib/bb/siggen.py
+++ /dev/null
@@ -1,298 +0,0 @@
-import hashlib
-import logging
-import re
-import bb.data
-
-logger = logging.getLogger('BitBake.SigGen')
-
-try:
- import cPickle as pickle
-except ImportError:
- import pickle
- logger.info('Importing cPickle failed. Falling back to a very slow implementation.')
-
-def init(d):
- siggens = [obj for obj in globals().itervalues()
- if type(obj) is type and issubclass(obj, SignatureGenerator)]
-
- desired = bb.data.getVar("BB_SIGNATURE_HANDLER", d, True) or "noop"
- for sg in siggens:
- if desired == sg.name:
- return sg(d)
- break
- else:
- logger.error("Invalid signature generator '%s', using default 'noop'\n"
- "Available generators: %s",
- ', '.join(obj.name for obj in siggens))
- return SignatureGenerator(d)
-
-class SignatureGenerator(object):
- """
- """
- name = "noop"
-
- def __init__(self, data):
- return
-
- def finalise(self, fn, d, varient):
- return
-
- def get_taskhash(self, fn, task, deps, dataCache):
- return 0
-
- def set_taskdata(self, hashes, deps):
- return
-
- def stampfile(self, stampbase, file_name, taskname, extrainfo):
- return ("%s.%s.%s" % (stampbase, taskname, extrainfo)).rstrip('.')
-
-class SignatureGeneratorBasic(SignatureGenerator):
- """
- """
- name = "basic"
-
- def __init__(self, data):
- self.basehash = {}
- self.taskhash = {}
- self.taskdeps = {}
- self.runtaskdeps = {}
- self.gendeps = {}
- self.lookupcache = {}
- self.basewhitelist = set((data.getVar("BB_HASHBASE_WHITELIST", True) or "").split())
- self.taskwhitelist = data.getVar("BB_HASHTASK_WHITELIST", True) or None
-
- if self.taskwhitelist:
- self.twl = re.compile(self.taskwhitelist)
- else:
- self.twl = None
-
- def _build_data(self, fn, d):
-
- tasklist, gendeps = bb.data.generate_dependencies(d)
-
- taskdeps = {}
- basehash = {}
- lookupcache = {}
-
- for task in tasklist:
- data = d.getVar(task, False)
- lookupcache[task] = data
-
- newdeps = gendeps[task]
- seen = set()
- while newdeps:
- nextdeps = newdeps
- seen |= nextdeps
- newdeps = set()
- for dep in nextdeps:
- if dep in self.basewhitelist:
- continue
- newdeps |= gendeps[dep]
- newdeps -= seen
-
- alldeps = seen - self.basewhitelist
-
- for dep in sorted(alldeps):
- if dep in lookupcache:
- var = lookupcache[dep]
- else:
- var = d.getVar(dep, False)
- lookupcache[dep] = var
- if var:
- data = data + var
- if data is None:
- bb.error("Task %s from %s seems to be empty?!" % (task, fn))
- self.basehash[fn + "." + task] = hashlib.md5(data).hexdigest()
- taskdeps[task] = sorted(alldeps)
-
- self.taskdeps[fn] = taskdeps
- self.gendeps[fn] = gendeps
- self.lookupcache[fn] = lookupcache
-
- return taskdeps
-
- def finalise(self, fn, d, variant):
-
- if variant:
- fn = "virtual:" + variant + ":" + fn
-
- taskdeps = self._build_data(fn, d)
-
- #Slow but can be useful for debugging mismatched basehashes
- #for task in self.taskdeps[fn]:
- # self.dump_sigtask(fn, task, d.getVar("STAMP", True), False)
-
- for task in taskdeps:
- d.setVar("BB_BASEHASH_task-%s" % task, self.basehash[fn + "." + task])
-
- def get_taskhash(self, fn, task, deps, dataCache):
- k = fn + "." + task
- data = dataCache.basetaskhash[k]
- self.runtaskdeps[k] = []
- for dep in sorted(deps):
- # We only manipulate the dependencies for packages not in the whitelist
- if self.twl and not self.twl.search(dataCache.pkg_fn[fn]):
- # then process the actual dependencies
- dep_fn = re.search("(?P<fn>.*)\..*", dep).group('fn')
- if self.twl.search(dataCache.pkg_fn[dep_fn]):
- continue
- if dep not in self.taskhash:
- bb.fatal("%s is not in taskhash, caller isn't calling in dependency order?", dep)
- data = data + self.taskhash[dep]
- self.runtaskdeps[k].append(dep)
- h = hashlib.md5(data).hexdigest()
- self.taskhash[k] = h
- #d.setVar("BB_TASKHASH_task-%s" % task, taskhash[task])
- return h
-
- def set_taskdata(self, hashes, deps):
- self.runtaskdeps = deps
- self.taskhash = hashes
-
- def dump_sigtask(self, fn, task, stampbase, runtime):
- k = fn + "." + task
- if runtime == "customfile":
- sigfile = stampbase
- elif runtime:
- sigfile = stampbase + "." + task + ".sigdata" + "." + self.taskhash[k]
- else:
- sigfile = stampbase + "." + task + ".sigbasedata" + "." + self.basehash[k]
-
- bb.utils.mkdirhier(os.path.dirname(sigfile))
-
- data = {}
- data['basewhitelist'] = self.basewhitelist
- data['taskwhitelist'] = self.taskwhitelist
- data['taskdeps'] = self.taskdeps[fn][task]
- data['basehash'] = self.basehash[k]
- data['gendeps'] = {}
- data['varvals'] = {}
- data['varvals'][task] = self.lookupcache[fn][task]
- for dep in self.taskdeps[fn][task]:
- if dep in self.basewhitelist:
- continue
- data['gendeps'][dep] = self.gendeps[fn][dep]
- data['varvals'][dep] = self.lookupcache[fn][dep]
-
- if runtime:
- data['runtaskdeps'] = self.runtaskdeps[k]
- data['runtaskhashes'] = {}
- for dep in data['runtaskdeps']:
- data['runtaskhashes'][dep] = self.taskhash[dep]
-
- p = pickle.Pickler(file(sigfile, "wb"), -1)
- p.dump(data)
-
- def dump_sigs(self, dataCache):
- for fn in self.taskdeps:
- for task in self.taskdeps[fn]:
- k = fn + "." + task
- if k not in self.taskhash:
- continue
- if dataCache.basetaskhash[k] != self.basehash[k]:
- bb.error("Bitbake's cached basehash does not match the one we just generated (%s)!" % k)
- bb.error("The mismatched hashes were %s and %s" % (dataCache.basetaskhash[k], self.basehash[k]))
- self.dump_sigtask(fn, task, dataCache.stamp[fn], True)
-
-class SignatureGeneratorBasicHash(SignatureGeneratorBasic):
- name = "basichash"
-
- def stampfile(self, stampbase, fn, taskname, extrainfo):
- if taskname != "do_setscene" and taskname.endswith("_setscene"):
- k = fn + "." + taskname[:-9]
- else:
- k = fn + "." + taskname
- h = self.taskhash[k]
- return ("%s.%s.%s.%s" % (stampbase, taskname, h, extrainfo)).rstrip('.')
-
-def dump_this_task(outfile, d):
- import bb.parse
- fn = d.getVar("BB_FILENAME", True)
- task = "do_" + d.getVar("BB_CURRENTTASK", True)
- bb.parse.siggen.dump_sigtask(fn, task, outfile, "customfile")
-
-def compare_sigfiles(a, b):
- p1 = pickle.Unpickler(file(a, "rb"))
- a_data = p1.load()
- p2 = pickle.Unpickler(file(b, "rb"))
- b_data = p2.load()
-
- def dict_diff(a, b):
- sa = set(a.keys())
- sb = set(b.keys())
- common = sa & sb
- changed = set()
- for i in common:
- if a[i] != b[i]:
- changed.add(i)
- added = sa - sb
- removed = sb - sa
- return changed, added, removed
-
- if 'basewhitelist' in a_data and a_data['basewhitelist'] != b_data['basewhitelist']:
- print "basewhitelist changed from %s to %s" % (a_data['basewhitelist'], b_data['basewhitelist'])
-
- if 'taskwhitelist' in a_data and a_data['taskwhitelist'] != b_data['taskwhitelist']:
- print "taskwhitelist changed from %s to %s" % (a_data['taskwhitelist'], b_data['taskwhitelist'])
-
- if a_data['taskdeps'] != b_data['taskdeps']:
- print "Task dependencies changed from %s to %s" % (sorted(a_data['taskdeps']), sorted(b_data['taskdeps']))
-
- if a_data['basehash'] != b_data['basehash']:
- print "basehash changed from %s to %s" % (a_data['basehash'], b_data['basehash'])
-
- changed, added, removed = dict_diff(a_data['gendeps'], b_data['gendeps'])
- if changed:
- for dep in changed:
- print "List of dependencies for variable %s changed from %s to %s" % (dep, a_data['gendeps'][dep], b_data['gendeps'][dep])
- if added:
- for dep in added:
- print "Dependency on variable %s was added" % (dep)
- if removed:
- for dep in removed:
- print "Dependency on Variable %s was removed" % (dep)
-
-
- changed, added, removed = dict_diff(a_data['varvals'], b_data['varvals'])
- if changed:
- for dep in changed:
- print "Variable %s value changed from %s to %s" % (dep, a_data['varvals'][dep], b_data['varvals'][dep])
-
- if 'runtaskhashes' in a_data and 'runtaskhashes' in b_data:
- changed, added, removed = dict_diff(a_data['runtaskhashes'], b_data['runtaskhashes'])
- if added:
- for dep in added:
- print "Dependency on task %s was added" % (dep)
- if removed:
- for dep in removed:
- print "Dependency on task %s was removed" % (dep)
- if changed:
- for dep in changed:
- print "Hash for dependent task %s changed from %s to %s" % (dep, a_data['runtaskhashes'][dep], b_data['runtaskhashes'][dep])
- elif 'runtaskdeps' in a_data and 'runtaskdeps' in b_data and sorted(a_data['runtaskdeps']) != sorted(b_data['runtaskdeps']):
- print "Tasks this task depends on changed from %s to %s" % (sorted(a_data['runtaskdeps']), sorted(b_data['runtaskdeps']))
-
-def dump_sigfile(a):
- p1 = pickle.Unpickler(file(a, "rb"))
- a_data = p1.load()
-
- print "basewhitelist: %s" % (a_data['basewhitelist'])
-
- print "taskwhitelist: %s" % (a_data['taskwhitelist'])
-
- print "Task dependencies: %s" % (sorted(a_data['taskdeps']))
-
- print "basehash: %s" % (a_data['basehash'])
-
- for dep in a_data['gendeps']:
- print "List of dependencies for variable %s is %s" % (dep, a_data['gendeps'][dep])
-
- for dep in a_data['varvals']:
- print "Variable %s value is %s" % (dep, a_data['varvals'][dep])
-
- if 'runtaskdeps' in a_data:
- print "Tasks this task depends on: %s" % (a_data['runtaskdeps'])
-
- if 'runtaskhashes' in a_data:
- for dep in a_data['runtaskhashes']:
- print "Hash for dependent task %s is %s" % (dep, a_data['runtaskhashes'][dep])
diff --git a/bitbake/lib/bb/taskdata.py b/bitbake/lib/bb/taskdata.py
deleted file mode 100644
index 81a42b7b53..0000000000
--- a/bitbake/lib/bb/taskdata.py
+++ /dev/null
@@ -1,586 +0,0 @@
-#!/usr/bin/env python
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake 'TaskData' implementation
-
-Task data collection and handling
-
-"""
-
-# Copyright (C) 2006 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import logging
-import re
-import bb
-
-logger = logging.getLogger("BitBake.TaskData")
-
-def re_match_strings(target, strings):
- """
- Whether or not the string 'target' matches
- any one string of the strings which can be regular expression string
- """
- return any(name == target or re.match(name, target)
- for name in strings)
-
-class TaskData:
- """
- BitBake Task Data implementation
- """
- def __init__(self, abort = True, tryaltconfigs = False):
- self.build_names_index = []
- self.run_names_index = []
- self.fn_index = []
-
- self.build_targets = {}
- self.run_targets = {}
-
- self.external_targets = []
-
- self.tasks_fnid = []
- self.tasks_name = []
- self.tasks_tdepends = []
- self.tasks_idepends = []
- # Cache to speed up task ID lookups
- self.tasks_lookup = {}
-
- self.depids = {}
- self.rdepids = {}
-
- self.consider_msgs_cache = []
-
- self.failed_deps = []
- self.failed_rdeps = []
- self.failed_fnids = []
-
- self.abort = abort
- self.tryaltconfigs = tryaltconfigs
-
- def getbuild_id(self, name):
- """
- Return an ID number for the build target name.
- If it doesn't exist, create one.
- """
- if not name in self.build_names_index:
- self.build_names_index.append(name)
- return len(self.build_names_index) - 1
-
- return self.build_names_index.index(name)
-
- def getrun_id(self, name):
- """
- Return an ID number for the run target name.
- If it doesn't exist, create one.
- """
- if not name in self.run_names_index:
- self.run_names_index.append(name)
- return len(self.run_names_index) - 1
-
- return self.run_names_index.index(name)
-
- def getfn_id(self, name):
- """
- Return an ID number for the filename.
- If it doesn't exist, create one.
- """
- if not name in self.fn_index:
- self.fn_index.append(name)
- return len(self.fn_index) - 1
-
- return self.fn_index.index(name)
-
- def gettask_ids(self, fnid):
- """
- Return an array of the ID numbers matching a given fnid.
- """
- ids = []
- if fnid in self.tasks_lookup:
- for task in self.tasks_lookup[fnid]:
- ids.append(self.tasks_lookup[fnid][task])
- return ids
-
- def gettask_id(self, fn, task, create = True):
- """
- Return an ID number for the task matching fn and task.
- If it doesn't exist, create one by default.
- Optionally return None instead.
- """
- fnid = self.getfn_id(fn)
-
- if fnid in self.tasks_lookup:
- if task in self.tasks_lookup[fnid]:
- return self.tasks_lookup[fnid][task]
-
- if not create:
- return None
-
- self.tasks_name.append(task)
- self.tasks_fnid.append(fnid)
- self.tasks_tdepends.append([])
- self.tasks_idepends.append([])
-
- listid = len(self.tasks_name) - 1
-
- if fnid not in self.tasks_lookup:
- self.tasks_lookup[fnid] = {}
- self.tasks_lookup[fnid][task] = listid
-
- return listid
-
- def add_tasks(self, fn, dataCache):
- """
- Add tasks for a given fn to the database
- """
-
- task_deps = dataCache.task_deps[fn]
-
- fnid = self.getfn_id(fn)
-
- if fnid in self.failed_fnids:
- bb.msg.fatal(bb.msg.domain.TaskData, "Trying to re-add a failed file? Something is broken...")
-
- # Check if we've already seen this fn
- if fnid in self.tasks_fnid:
- return
-
- for task in task_deps['tasks']:
-
- # Work out task dependencies
- parentids = []
- for dep in task_deps['parents'][task]:
- parentid = self.gettask_id(fn, dep)
- parentids.append(parentid)
- taskid = self.gettask_id(fn, task)
- self.tasks_tdepends[taskid].extend(parentids)
-
- # Touch all intertask dependencies
- if 'depends' in task_deps and task in task_deps['depends']:
- ids = []
- for dep in task_deps['depends'][task].split():
- if dep:
- if ":" not in dep:
- bb.msg.fatal(bb.msg.domain.TaskData, "Error, dependency %s does not contain ':' character\n. Task 'depends' should be specified in the form 'packagename:task'" % (dep, fn))
- ids.append(((self.getbuild_id(dep.split(":")[0])), dep.split(":")[1]))
- self.tasks_idepends[taskid].extend(ids)
-
- # Work out build dependencies
- if not fnid in self.depids:
- dependids = {}
- for depend in dataCache.deps[fn]:
- logger.debug(2, "Added dependency %s for %s", depend, fn)
- dependids[self.getbuild_id(depend)] = None
- self.depids[fnid] = dependids.keys()
-
- # Work out runtime dependencies
- if not fnid in self.rdepids:
- rdependids = {}
- rdepends = dataCache.rundeps[fn]
- rrecs = dataCache.runrecs[fn]
- for package in rdepends:
- for rdepend in rdepends[package]:
- logger.debug(2, "Added runtime dependency %s for %s", rdepend, fn)
- rdependids[self.getrun_id(rdepend)] = None
- for package in rrecs:
- for rdepend in rrecs[package]:
- logger.debug(2, "Added runtime recommendation %s for %s", rdepend, fn)
- rdependids[self.getrun_id(rdepend)] = None
- self.rdepids[fnid] = rdependids.keys()
-
- for dep in self.depids[fnid]:
- if dep in self.failed_deps:
- self.fail_fnid(fnid)
- return
- for dep in self.rdepids[fnid]:
- if dep in self.failed_rdeps:
- self.fail_fnid(fnid)
- return
-
- def have_build_target(self, target):
- """
- Have we a build target matching this name?
- """
- targetid = self.getbuild_id(target)
-
- if targetid in self.build_targets:
- return True
- return False
-
- def have_runtime_target(self, target):
- """
- Have we a runtime target matching this name?
- """
- targetid = self.getrun_id(target)
-
- if targetid in self.run_targets:
- return True
- return False
-
- def add_build_target(self, fn, item):
- """
- Add a build target.
- If already present, append the provider fn to the list
- """
- targetid = self.getbuild_id(item)
- fnid = self.getfn_id(fn)
-
- if targetid in self.build_targets:
- if fnid in self.build_targets[targetid]:
- return
- self.build_targets[targetid].append(fnid)
- return
- self.build_targets[targetid] = [fnid]
-
- def add_runtime_target(self, fn, item):
- """
- Add a runtime target.
- If already present, append the provider fn to the list
- """
- targetid = self.getrun_id(item)
- fnid = self.getfn_id(fn)
-
- if targetid in self.run_targets:
- if fnid in self.run_targets[targetid]:
- return
- self.run_targets[targetid].append(fnid)
- return
- self.run_targets[targetid] = [fnid]
-
- def mark_external_target(self, item):
- """
- Mark a build target as being externally requested
- """
- targetid = self.getbuild_id(item)
-
- if targetid not in self.external_targets:
- self.external_targets.append(targetid)
-
- def get_unresolved_build_targets(self, dataCache):
- """
- Return a list of build targets who's providers
- are unknown.
- """
- unresolved = []
- for target in self.build_names_index:
- if re_match_strings(target, dataCache.ignored_dependencies):
- continue
- if self.build_names_index.index(target) in self.failed_deps:
- continue
- if not self.have_build_target(target):
- unresolved.append(target)
- return unresolved
-
- def get_unresolved_run_targets(self, dataCache):
- """
- Return a list of runtime targets who's providers
- are unknown.
- """
- unresolved = []
- for target in self.run_names_index:
- if re_match_strings(target, dataCache.ignored_dependencies):
- continue
- if self.run_names_index.index(target) in self.failed_rdeps:
- continue
- if not self.have_runtime_target(target):
- unresolved.append(target)
- return unresolved
-
- def get_provider(self, item):
- """
- Return a list of providers of item
- """
- targetid = self.getbuild_id(item)
-
- return self.build_targets[targetid]
-
- def get_dependees(self, itemid):
- """
- Return a list of targets which depend on item
- """
- dependees = []
- for fnid in self.depids:
- if itemid in self.depids[fnid]:
- dependees.append(fnid)
- return dependees
-
- def get_dependees_str(self, item):
- """
- Return a list of targets which depend on item as a user readable string
- """
- itemid = self.getbuild_id(item)
- dependees = []
- for fnid in self.depids:
- if itemid in self.depids[fnid]:
- dependees.append(self.fn_index[fnid])
- return dependees
-
- def get_rdependees(self, itemid):
- """
- Return a list of targets which depend on runtime item
- """
- dependees = []
- for fnid in self.rdepids:
- if itemid in self.rdepids[fnid]:
- dependees.append(fnid)
- return dependees
-
- def get_rdependees_str(self, item):
- """
- Return a list of targets which depend on runtime item as a user readable string
- """
- itemid = self.getrun_id(item)
- dependees = []
- for fnid in self.rdepids:
- if itemid in self.rdepids[fnid]:
- dependees.append(self.fn_index[fnid])
- return dependees
-
- def add_provider(self, cfgData, dataCache, item):
- try:
- self.add_provider_internal(cfgData, dataCache, item)
- except bb.providers.NoProvider:
- if self.abort:
- raise
- self.remove_buildtarget(self.getbuild_id(item))
-
- self.mark_external_target(item)
-
- def add_provider_internal(self, cfgData, dataCache, item):
- """
- Add the providers of item to the task data
- Mark entries were specifically added externally as against dependencies
- added internally during dependency resolution
- """
-
- if re_match_strings(item, dataCache.ignored_dependencies):
- return
-
- if not item in dataCache.providers:
- bb.event.fire(bb.event.NoProvider(item, dependees=self.get_rdependees_str(item)), cfgData)
- raise bb.providers.NoProvider(item)
-
- if self.have_build_target(item):
- return
-
- all_p = dataCache.providers[item]
-
- eligible, foundUnique = bb.providers.filterProviders(all_p, item, cfgData, dataCache)
- eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
-
- if not eligible:
- bb.event.fire(bb.event.NoProvider(item, dependees=self.get_dependees_str(item)), cfgData)
- raise bb.providers.NoProvider(item)
-
- if len(eligible) > 1 and foundUnique == False:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(dataCache.pkg_fn[fn])
- bb.event.fire(bb.event.MultipleProviders(item, providers_list), cfgData)
- self.consider_msgs_cache.append(item)
-
- for fn in eligible:
- fnid = self.getfn_id(fn)
- if fnid in self.failed_fnids:
- continue
- logger.debug(2, "adding %s to satisfy %s", fn, item)
- self.add_build_target(fn, item)
- self.add_tasks(fn, dataCache)
-
-
- #item = dataCache.pkg_fn[fn]
-
- def add_rprovider(self, cfgData, dataCache, item):
- """
- Add the runtime providers of item to the task data
- (takes item names from RDEPENDS/PACKAGES namespace)
- """
-
- if re_match_strings(item, dataCache.ignored_dependencies):
- return
-
- if self.have_runtime_target(item):
- return
-
- all_p = bb.providers.getRuntimeProviders(dataCache, item)
-
- if not all_p:
- bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item)), cfgData)
- raise bb.providers.NoRProvider(item)
-
- eligible, numberPreferred = bb.providers.filterProvidersRunTime(all_p, item, cfgData, dataCache)
- eligible = [p for p in eligible if not self.getfn_id(p) in self.failed_fnids]
-
- if not eligible:
- bb.event.fire(bb.event.NoProvider(item, runtime=True, dependees=self.get_rdependees_str(item)), cfgData)
- raise bb.providers.NoRProvider(item)
-
- if len(eligible) > 1 and numberPreferred == 0:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(dataCache.pkg_fn[fn])
- bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
- self.consider_msgs_cache.append(item)
-
- if numberPreferred > 1:
- if item not in self.consider_msgs_cache:
- providers_list = []
- for fn in eligible:
- providers_list.append(dataCache.pkg_fn[fn])
- bb.event.fire(bb.event.MultipleProviders(item, providers_list, runtime=True), cfgData)
- self.consider_msgs_cache.append(item)
-
- # run through the list until we find one that we can build
- for fn in eligible:
- fnid = self.getfn_id(fn)
- if fnid in self.failed_fnids:
- continue
- logger.debug(2, "adding '%s' to satisfy runtime '%s'", fn, item)
- self.add_runtime_target(fn, item)
- self.add_tasks(fn, dataCache)
-
- def fail_fnid(self, fnid, missing_list = []):
- """
- Mark a file as failed (unbuildable)
- Remove any references from build and runtime provider lists
-
- missing_list, A list of missing requirements for this target
- """
- if fnid in self.failed_fnids:
- return
- logger.debug(1, "File '%s' is unbuildable, removing...", self.fn_index[fnid])
- self.failed_fnids.append(fnid)
- for target in self.build_targets:
- if fnid in self.build_targets[target]:
- self.build_targets[target].remove(fnid)
- if len(self.build_targets[target]) == 0:
- self.remove_buildtarget(target, missing_list)
- for target in self.run_targets:
- if fnid in self.run_targets[target]:
- self.run_targets[target].remove(fnid)
- if len(self.run_targets[target]) == 0:
- self.remove_runtarget(target, missing_list)
-
- def remove_buildtarget(self, targetid, missing_list = []):
- """
- Mark a build target as failed (unbuildable)
- Trigger removal of any files that have this as a dependency
- """
- if not missing_list:
- missing_list = [self.build_names_index[targetid]]
- else:
- missing_list = [self.build_names_index[targetid]] + missing_list
- logger.verbose("Target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.build_names_index[targetid], missing_list)
- self.failed_deps.append(targetid)
- dependees = self.get_dependees(targetid)
- for fnid in dependees:
- self.fail_fnid(fnid, missing_list)
- for taskid in xrange(len(self.tasks_idepends)):
- idepends = self.tasks_idepends[taskid]
- for (idependid, idependtask) in idepends:
- if idependid == targetid:
- self.fail_fnid(self.tasks_fnid[taskid], missing_list)
-
- if self.abort and targetid in self.external_targets:
- target = self.build_names_index[targetid]
- logger.error("Required build target '%s' has no buildable providers.\nMissing or unbuildable dependency chain was: %s", target, missing_list)
- raise bb.providers.NoProvider(target)
-
- def remove_runtarget(self, targetid, missing_list = []):
- """
- Mark a run target as failed (unbuildable)
- Trigger removal of any files that have this as a dependency
- """
- if not missing_list:
- missing_list = [self.run_names_index[targetid]]
- else:
- missing_list = [self.run_names_index[targetid]] + missing_list
-
- logger.info("Runtime target '%s' is unbuildable, removing...\nMissing or unbuildable dependency chain was: %s", self.run_names_index[targetid], missing_list)
- self.failed_rdeps.append(targetid)
- dependees = self.get_rdependees(targetid)
- for fnid in dependees:
- self.fail_fnid(fnid, missing_list)
-
- def add_unresolved(self, cfgData, dataCache):
- """
- Resolve all unresolved build and runtime targets
- """
- logger.info("Resolving any missing task queue dependencies")
- while True:
- added = 0
- for target in self.get_unresolved_build_targets(dataCache):
- try:
- self.add_provider_internal(cfgData, dataCache, target)
- added = added + 1
- except bb.providers.NoProvider:
- targetid = self.getbuild_id(target)
- if self.abort and targetid in self.external_targets:
- raise
- self.remove_buildtarget(targetid)
- for target in self.get_unresolved_run_targets(dataCache):
- try:
- self.add_rprovider(cfgData, dataCache, target)
- added = added + 1
- except bb.providers.NoRProvider:
- self.remove_runtarget(self.getrun_id(target))
- logger.debug(1, "Resolved " + str(added) + " extra dependencies")
- if added == 0:
- break
- # self.dump_data()
-
- def dump_data(self):
- """
- Dump some debug information on the internal data structures
- """
- logger.debug(3, "build_names:")
- logger.debug(3, ", ".join(self.build_names_index))
-
- logger.debug(3, "run_names:")
- logger.debug(3, ", ".join(self.run_names_index))
-
- logger.debug(3, "build_targets:")
- for buildid in xrange(len(self.build_names_index)):
- target = self.build_names_index[buildid]
- targets = "None"
- if buildid in self.build_targets:
- targets = self.build_targets[buildid]
- logger.debug(3, " (%s)%s: %s", buildid, target, targets)
-
- logger.debug(3, "run_targets:")
- for runid in xrange(len(self.run_names_index)):
- target = self.run_names_index[runid]
- targets = "None"
- if runid in self.run_targets:
- targets = self.run_targets[runid]
- logger.debug(3, " (%s)%s: %s", runid, target, targets)
-
- logger.debug(3, "tasks:")
- for task in xrange(len(self.tasks_name)):
- logger.debug(3, " (%s)%s - %s: %s",
- task,
- self.fn_index[self.tasks_fnid[task]],
- self.tasks_name[task],
- self.tasks_tdepends[task])
-
- logger.debug(3, "dependency ids (per fn):")
- for fnid in self.depids:
- logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.depids[fnid])
-
- logger.debug(3, "runtime dependency ids (per fn):")
- for fnid in self.rdepids:
- logger.debug(3, " %s %s: %s", fnid, self.fn_index[fnid], self.rdepids[fnid])
diff --git a/bitbake/lib/bb/ui/__init__.py b/bitbake/lib/bb/ui/__init__.py
deleted file mode 100644
index a4805ed028..0000000000
--- a/bitbake/lib/bb/ui/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# BitBake UI Implementation
-#
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/bitbake/lib/bb/ui/crumbs/__init__.py b/bitbake/lib/bb/ui/crumbs/__init__.py
deleted file mode 100644
index a4805ed028..0000000000
--- a/bitbake/lib/bb/ui/crumbs/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#
-# BitBake UI Implementation
-#
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
diff --git a/bitbake/lib/bb/ui/crumbs/buildmanager.py b/bitbake/lib/bb/ui/crumbs/buildmanager.py
deleted file mode 100644
index e858d75e4c..0000000000
--- a/bitbake/lib/bb/ui/crumbs/buildmanager.py
+++ /dev/null
@@ -1,455 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import threading
-import os
-import datetime
-import time
-
-class BuildConfiguration:
- """ Represents a potential *or* historic *or* concrete build. It
- encompasses all the things that we need to tell bitbake to do to make it
- build what we want it to build.
-
- It also stored the metadata URL and the set of possible machines (and the
- distros / images / uris for these. Apart from the metdata URL these are
- not serialised to file (since they may be transient). In some ways this
- functionality might be shifted to the loader class."""
-
- def __init__ (self):
- self.metadata_url = None
-
- # Tuple of (distros, image, urls)
- self.machine_options = {}
-
- self.machine = None
- self.distro = None
- self.image = None
- self.urls = []
- self.extra_urls = []
- self.extra_pkgs = []
-
- def get_machines_model (self):
- model = gtk.ListStore (gobject.TYPE_STRING)
- for machine in self.machine_options.keys():
- model.append ([machine])
-
- return model
-
- def get_distro_and_images_models (self, machine):
- distro_model = gtk.ListStore (gobject.TYPE_STRING)
-
- for distro in self.machine_options[machine][0]:
- distro_model.append ([distro])
-
- image_model = gtk.ListStore (gobject.TYPE_STRING)
-
- for image in self.machine_options[machine][1]:
- image_model.append ([image])
-
- return (distro_model, image_model)
-
- def get_repos (self):
- self.urls = self.machine_options[self.machine][2]
- return self.urls
-
- # It might be a lot lot better if we stored these in like, bitbake conf
- # file format.
- @staticmethod
- def load_from_file (filename):
-
- conf = BuildConfiguration()
- with open(filename, "r") as f:
- for line in f:
- data = line.split (";")[1]
- if (line.startswith ("metadata-url;")):
- conf.metadata_url = data.strip()
- continue
- if (line.startswith ("url;")):
- conf.urls += [data.strip()]
- continue
- if (line.startswith ("extra-url;")):
- conf.extra_urls += [data.strip()]
- continue
- if (line.startswith ("machine;")):
- conf.machine = data.strip()
- continue
- if (line.startswith ("distribution;")):
- conf.distro = data.strip()
- continue
- if (line.startswith ("image;")):
- conf.image = data.strip()
- continue
-
- return conf
-
- # Serialise to a file. This is part of the build process and we use this
- # to be able to repeat a given build (using the same set of parameters)
- # but also so that we can include the details of the image / machine /
- # distro in the build manager tree view.
- def write_to_file (self, filename):
- f = open (filename, "w")
-
- lines = []
-
- if (self.metadata_url):
- lines += ["metadata-url;%s\n" % (self.metadata_url)]
-
- for url in self.urls:
- lines += ["url;%s\n" % (url)]
-
- for url in self.extra_urls:
- lines += ["extra-url;%s\n" % (url)]
-
- if (self.machine):
- lines += ["machine;%s\n" % (self.machine)]
-
- if (self.distro):
- lines += ["distribution;%s\n" % (self.distro)]
-
- if (self.image):
- lines += ["image;%s\n" % (self.image)]
-
- f.writelines (lines)
- f.close ()
-
-class BuildResult(gobject.GObject):
- """ Represents an historic build. Perhaps not successful. But it includes
- things such as the files that are in the directory (the output from the
- build) as well as a deserialised BuildConfiguration file that is stored in
- ".conf" in the directory for the build.
-
- This is GObject so that it can be included in the TreeStore."""
-
- (STATE_COMPLETE, STATE_FAILED, STATE_ONGOING) = \
- (0, 1, 2)
-
- def __init__ (self, parent, identifier):
- gobject.GObject.__init__ (self)
- self.date = None
-
- self.files = []
- self.status = None
- self.identifier = identifier
- self.path = os.path.join (parent, identifier)
-
- # Extract the date, since the directory name is of the
- # format build-<year><month><day>-<ordinal> we can easily
- # pull it out.
- # TODO: Better to stat a file?
- (_, date, revision) = identifier.split ("-")
- print(date)
-
- year = int (date[0:4])
- month = int (date[4:6])
- day = int (date[6:8])
-
- self.date = datetime.date (year, month, day)
-
- self.conf = None
-
- # By default builds are STATE_FAILED unless we find a "complete" file
- # in which case they are STATE_COMPLETE
- self.state = BuildResult.STATE_FAILED
- for file in os.listdir (self.path):
- if (file.startswith (".conf")):
- conffile = os.path.join (self.path, file)
- self.conf = BuildConfiguration.load_from_file (conffile)
- elif (file.startswith ("complete")):
- self.state = BuildResult.STATE_COMPLETE
- else:
- self.add_file (file)
-
- def add_file (self, file):
- # Just add the file for now. Don't care about the type.
- self.files += [(file, None)]
-
-class BuildManagerModel (gtk.TreeStore):
- """ Model for the BuildManagerTreeView. This derives from gtk.TreeStore
- but it abstracts nicely what the columns mean and the setup of the columns
- in the model. """
-
- (COL_IDENT, COL_DESC, COL_MACHINE, COL_DISTRO, COL_BUILD_RESULT, COL_DATE, COL_STATE) = \
- (0, 1, 2, 3, 4, 5, 6)
-
- def __init__ (self):
- gtk.TreeStore.__init__ (self,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_OBJECT,
- gobject.TYPE_INT64,
- gobject.TYPE_INT)
-
-class BuildManager (gobject.GObject):
- """ This class manages the historic builds that have been found in the
- "results" directory but is also used for starting a new build."""
-
- __gsignals__ = {
- 'population-finished' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'populate-error' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ())
- }
-
- def update_build_result (self, result, iter):
- # Convert the date into something we can sort by.
- date = long (time.mktime (result.date.timetuple()))
-
- # Add a top level entry for the build
-
- self.model.set (iter,
- BuildManagerModel.COL_IDENT, result.identifier,
- BuildManagerModel.COL_DESC, result.conf.image,
- BuildManagerModel.COL_MACHINE, result.conf.machine,
- BuildManagerModel.COL_DISTRO, result.conf.distro,
- BuildManagerModel.COL_BUILD_RESULT, result,
- BuildManagerModel.COL_DATE, date,
- BuildManagerModel.COL_STATE, result.state)
-
- # And then we use the files in the directory as the children for the
- # top level iter.
- for file in result.files:
- self.model.append (iter, (None, file[0], None, None, None, date, -1))
-
- # This function is called as an idle by the BuildManagerPopulaterThread
- def add_build_result (self, result):
- gtk.gdk.threads_enter()
- self.known_builds += [result]
-
- self.update_build_result (result, self.model.append (None))
-
- gtk.gdk.threads_leave()
-
- def notify_build_finished (self):
- # This is a bit of a hack. If we have a running build running then we
- # will have a row in the model in STATE_ONGOING. Find it and make it
- # as if it was a proper historic build (well, it is completed now....)
-
- # We need to use the iters here rather than the Python iterator
- # interface to the model since we need to pass it into
- # update_build_result
-
- iter = self.model.get_iter_first()
-
- while (iter):
- (ident, state) = self.model.get(iter,
- BuildManagerModel.COL_IDENT,
- BuildManagerModel.COL_STATE)
-
- if state == BuildResult.STATE_ONGOING:
- result = BuildResult (self.results_directory, ident)
- self.update_build_result (result, iter)
- iter = self.model.iter_next(iter)
-
- def notify_build_succeeded (self):
- # Write the "complete" file so that when we create the BuildResult
- # object we put into the model
-
- complete_file_path = os.path.join (self.cur_build_directory, "complete")
- f = file (complete_file_path, "w")
- f.close()
- self.notify_build_finished()
-
- def notify_build_failed (self):
- # Without a "complete" file then this will mark the build as failed:
- self.notify_build_finished()
-
- # This function is called as an idle
- def emit_population_finished_signal (self):
- gtk.gdk.threads_enter()
- self.emit ("population-finished")
- gtk.gdk.threads_leave()
-
- class BuildManagerPopulaterThread (threading.Thread):
- def __init__ (self, manager, directory):
- threading.Thread.__init__ (self)
- self.manager = manager
- self.directory = directory
-
- def run (self):
- # For each of the "build-<...>" directories ..
-
- if os.path.exists (self.directory):
- for directory in os.listdir (self.directory):
-
- if not directory.startswith ("build-"):
- continue
-
- build_result = BuildResult (self.directory, directory)
- self.manager.add_build_result (build_result)
-
- gobject.idle_add (BuildManager.emit_population_finished_signal,
- self.manager)
-
- def __init__ (self, server, results_directory):
- gobject.GObject.__init__ (self)
-
- # The builds that we've found from walking the result directory
- self.known_builds = []
-
- # Save out the bitbake server, we need this for issuing commands to
- # the cooker:
- self.server = server
-
- # The TreeStore that we use
- self.model = BuildManagerModel ()
-
- # The results directory is where we create (and look for) the
- # build-<xyz>-<n> directories. We need to populate ourselves from
- # directory
- self.results_directory = results_directory
- self.populate_from_directory (self.results_directory)
-
- def populate_from_directory (self, directory):
- thread = BuildManager.BuildManagerPopulaterThread (self, directory)
- thread.start()
-
- # Come up with the name for the next build ident by combining "build-"
- # with the date formatted as yyyymmdd and then an ordinal. We do this by
- # an optimistic algorithm incrementing the ordinal if we find that it
- # already exists.
- def get_next_build_ident (self):
- today = datetime.date.today ()
- datestr = str (today.year) + str (today.month) + str (today.day)
-
- revision = 0
- test_name = "build-%s-%d" % (datestr, revision)
- test_path = os.path.join (self.results_directory, test_name)
-
- while (os.path.exists (test_path)):
- revision += 1
- test_name = "build-%s-%d" % (datestr, revision)
- test_path = os.path.join (self.results_directory, test_name)
-
- return test_name
-
- # Take a BuildConfiguration and then try and build it based on the
- # parameters of that configuration. S
- def do_build (self, conf):
- server = self.server
-
- # Work out the build directory. Note we actually create the
- # directories here since we need to write the ".conf" file. Otherwise
- # we could have relied on bitbake's builder thread to actually make
- # the directories as it proceeds with the build.
- ident = self.get_next_build_ident ()
- build_directory = os.path.join (self.results_directory,
- ident)
- self.cur_build_directory = build_directory
- os.makedirs (build_directory)
-
- conffile = os.path.join (build_directory, ".conf")
- conf.write_to_file (conffile)
-
- # Add a row to the model representing this ongoing build. It's kinda a
- # fake entry. If this build completes or fails then this gets updated
- # with the real stuff like the historic builds
- date = long (time.time())
- self.model.append (None, (ident, conf.image, conf.machine, conf.distro,
- None, date, BuildResult.STATE_ONGOING))
- try:
- server.runCommand(["setVariable", "BUILD_IMAGES_FROM_FEEDS", 1])
- server.runCommand(["setVariable", "MACHINE", conf.machine])
- server.runCommand(["setVariable", "DISTRO", conf.distro])
- server.runCommand(["setVariable", "PACKAGE_CLASSES", "package_ipk"])
- server.runCommand(["setVariable", "BBFILES", \
- """${OEROOT}/meta/packages/*/*.bb ${OEROOT}/meta-moblin/packages/*/*.bb"""])
- server.runCommand(["setVariable", "TMPDIR", "${OEROOT}/build/tmp"])
- server.runCommand(["setVariable", "IPK_FEED_URIS", \
- " ".join(conf.get_repos())])
- server.runCommand(["setVariable", "DEPLOY_DIR_IMAGE",
- build_directory])
- server.runCommand(["buildTargets", [conf.image], "rootfs"])
-
- except Exception as e:
- print(e)
-
-class BuildManagerTreeView (gtk.TreeView):
- """ The tree view for the build manager. This shows the historic builds
- and so forth. """
-
- # We use this function to control what goes in the cell since we store
- # the date in the model as seconds since the epoch (for sorting) and so we
- # need to make it human readable.
- def date_format_custom_cell_data_func (self, col, cell, model, iter):
- date = model.get (iter, BuildManagerModel.COL_DATE)[0]
- datestr = time.strftime("%A %d %B %Y", time.localtime(date))
- cell.set_property ("text", datestr)
-
- # This format function controls what goes in the cell. We use this to map
- # the integer state to a string and also to colourise the text
- def state_format_custom_cell_data_fun (self, col, cell, model, iter):
- state = model.get (iter, BuildManagerModel.COL_STATE)[0]
-
- if (state == BuildResult.STATE_ONGOING):
- cell.set_property ("text", "Active")
- cell.set_property ("foreground", "#000000")
- elif (state == BuildResult.STATE_FAILED):
- cell.set_property ("text", "Failed")
- cell.set_property ("foreground", "#ff0000")
- elif (state == BuildResult.STATE_COMPLETE):
- cell.set_property ("text", "Complete")
- cell.set_property ("foreground", "#00ff00")
- else:
- cell.set_property ("text", "")
-
- def __init__ (self):
- gtk.TreeView.__init__(self)
-
- # Misc descriptiony thing
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn (None, renderer,
- text=BuildManagerModel.COL_DESC)
- self.append_column (col)
-
- # Machine
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn ("Machine", renderer,
- text=BuildManagerModel.COL_MACHINE)
- self.append_column (col)
-
- # distro
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn ("Distribution", renderer,
- text=BuildManagerModel.COL_DISTRO)
- self.append_column (col)
-
- # date (using a custom function for formatting the cell contents it
- # takes epoch -> human readable string)
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn ("Date", renderer,
- text=BuildManagerModel.COL_DATE)
- self.append_column (col)
- col.set_cell_data_func (renderer,
- self.date_format_custom_cell_data_func)
-
- # For status.
- renderer = gtk.CellRendererText ()
- col = gtk.TreeViewColumn ("Status", renderer,
- text = BuildManagerModel.COL_STATE)
- self.append_column (col)
- col.set_cell_data_func (renderer,
- self.state_format_custom_cell_data_fun)
diff --git a/bitbake/lib/bb/ui/crumbs/progress.py b/bitbake/lib/bb/ui/crumbs/progress.py
deleted file mode 100644
index 36eca38294..0000000000
--- a/bitbake/lib/bb/ui/crumbs/progress.py
+++ /dev/null
@@ -1,17 +0,0 @@
-import gtk
-
-class ProgressBar(gtk.Dialog):
- def __init__(self, parent):
-
- gtk.Dialog.__init__(self)
- self.set_title("Parsing metadata, please wait...")
- self.set_default_size(500, 0)
- self.set_transient_for(parent)
- self.set_destroy_with_parent(True)
- self.progress = gtk.ProgressBar()
- self.vbox.pack_start(self.progress)
- self.show_all()
-
- def update(self, x, y):
- self.progress.set_fraction(float(x)/float(y))
- self.progress.set_text("%2d %%" % (x*100/y))
diff --git a/bitbake/lib/bb/ui/crumbs/puccho.glade b/bitbake/lib/bb/ui/crumbs/puccho.glade
deleted file mode 100644
index d7553a6e14..0000000000
--- a/bitbake/lib/bb/ui/crumbs/puccho.glade
+++ /dev/null
@@ -1,606 +0,0 @@
-<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<!DOCTYPE glade-interface SYSTEM "glade-2.0.dtd">
-<!--Generated with glade3 3.4.5 on Mon Nov 10 12:24:12 2008 -->
-<glade-interface>
- <widget class="GtkDialog" id="build_dialog">
- <property name="title" translatable="yes">Start a build</property>
- <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
- <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
- <property name="has_separator">False</property>
- <child internal-child="vbox">
- <widget class="GtkVBox" id="dialog-vbox1">
- <property name="visible">True</property>
- <property name="spacing">2</property>
- <child>
- <widget class="GtkTable" id="build_table">
- <property name="visible">True</property>
- <property name="border_width">6</property>
- <property name="n_rows">7</property>
- <property name="n_columns">3</property>
- <property name="column_spacing">5</property>
- <property name="row_spacing">6</property>
- <child>
- <widget class="GtkAlignment" id="status_alignment">
- <property name="visible">True</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkHBox" id="status_hbox">
- <property name="spacing">6</property>
- <child>
- <widget class="GtkImage" id="status_image">
- <property name="visible">True</property>
- <property name="no_show_all">True</property>
- <property name="xalign">0</property>
- <property name="stock">gtk-dialog-error</property>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="fill">False</property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="status_label">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">If you see this text something is wrong...</property>
- <property name="use_markup">True</property>
- <property name="use_underline">True</property>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">2</property>
- <property name="bottom_attach">3</property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label2">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Build configuration&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="image_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="image_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Image:</property>
- </widget>
- <packing>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="distribution_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="distribution_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Distribution:</property>
- </widget>
- <packing>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkComboBox" id="machine_combo">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="machine_label">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Machine:</property>
- </widget>
- <packing>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkButton" id="refresh_button">
- <property name="visible">True</property>
- <property name="sensitive">False</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-refresh</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="location_entry">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="width_chars">32</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">2</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label3">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Location:</property>
- </widget>
- <packing>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label1">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Repository&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment1">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment2">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment3">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="left_attach">2</property>
- <property name="right_attach">3</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child internal-child="action_area">
- <widget class="GtkHButtonBox" id="dialog-action_area1">
- <property name="visible">True</property>
- <property name="layout_style">GTK_BUTTONBOX_END</property>
- <child>
- <placeholder/>
- </child>
- <child>
- <placeholder/>
- </child>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="pack_type">GTK_PACK_END</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <widget class="GtkDialog" id="dialog2">
- <property name="window_position">GTK_WIN_POS_CENTER_ON_PARENT</property>
- <property name="type_hint">GDK_WINDOW_TYPE_HINT_DIALOG</property>
- <property name="has_separator">False</property>
- <child internal-child="vbox">
- <widget class="GtkVBox" id="dialog-vbox2">
- <property name="visible">True</property>
- <property name="spacing">2</property>
- <child>
- <widget class="GtkTable" id="table2">
- <property name="visible">True</property>
- <property name="border_width">6</property>
- <property name="n_rows">7</property>
- <property name="n_columns">3</property>
- <property name="column_spacing">6</property>
- <property name="row_spacing">6</property>
- <child>
- <widget class="GtkLabel" id="label7">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Repositories&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment4">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkScrolledWindow" id="scrolledwindow1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <widget class="GtkTreeView" id="treeview1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="headers_clickable">True</property>
- </widget>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">2</property>
- <property name="bottom_attach">3</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="entry1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label9">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="label" translatable="yes">&lt;b&gt;Additional packages&lt;/b&gt;</property>
- <property name="use_markup">True</property>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">4</property>
- <property name="bottom_attach">5</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment6">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="xscale">0</property>
- <child>
- <widget class="GtkLabel" id="label8">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="yalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Location: </property>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="top_attach">1</property>
- <property name="bottom_attach">2</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment7">
- <property name="visible">True</property>
- <property name="xalign">1</property>
- <property name="xscale">0</property>
- <child>
- <widget class="GtkHButtonBox" id="hbuttonbox1">
- <property name="visible">True</property>
- <property name="spacing">5</property>
- <child>
- <widget class="GtkButton" id="button7">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-remove</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- </child>
- <child>
- <widget class="GtkButton" id="button6">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-edit</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child>
- <widget class="GtkButton" id="button5">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-add</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- <packing>
- <property name="position">2</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment5">
- <property name="visible">True</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="top_attach">3</property>
- <property name="bottom_attach">4</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkLabel" id="label10">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="yalign">0</property>
- <property name="xpad">12</property>
- <property name="label" translatable="yes">Search:</property>
- </widget>
- <packing>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkEntry" id="entry2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- </widget>
- <packing>
- <property name="left_attach">1</property>
- <property name="right_attach">3</property>
- <property name="top_attach">5</property>
- <property name="bottom_attach">6</property>
- <property name="y_options"></property>
- </packing>
- </child>
- <child>
- <widget class="GtkAlignment" id="alignment8">
- <property name="visible">True</property>
- <property name="xalign">0</property>
- <property name="left_padding">12</property>
- <child>
- <widget class="GtkScrolledWindow" id="scrolledwindow2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <widget class="GtkTreeView" id="treeview2">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="headers_clickable">True</property>
- </widget>
- </child>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="right_attach">3</property>
- <property name="top_attach">6</property>
- <property name="bottom_attach">7</property>
- <property name="y_options"></property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- <child internal-child="action_area">
- <widget class="GtkHButtonBox" id="dialog-action_area2">
- <property name="visible">True</property>
- <property name="layout_style">GTK_BUTTONBOX_END</property>
- <child>
- <widget class="GtkButton" id="button4">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="receives_default">True</property>
- <property name="label" translatable="yes">gtk-close</property>
- <property name="use_stock">True</property>
- <property name="response_id">0</property>
- </widget>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- <property name="pack_type">GTK_PACK_END</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
- <widget class="GtkWindow" id="main_window">
- <child>
- <widget class="GtkVBox" id="main_window_vbox">
- <property name="visible">True</property>
- <child>
- <widget class="GtkToolbar" id="main_toolbar">
- <property name="visible">True</property>
- <child>
- <widget class="GtkToolButton" id="main_toolbutton_build">
- <property name="visible">True</property>
- <property name="label" translatable="yes">Build</property>
- <property name="stock_id">gtk-execute</property>
- </widget>
- <packing>
- <property name="expand">False</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="expand">False</property>
- </packing>
- </child>
- <child>
- <widget class="GtkVPaned" id="vpaned1">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <child>
- <widget class="GtkScrolledWindow" id="results_scrolledwindow">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="resize">False</property>
- <property name="shrink">True</property>
- </packing>
- </child>
- <child>
- <widget class="GtkScrolledWindow" id="progress_scrolledwindow">
- <property name="visible">True</property>
- <property name="can_focus">True</property>
- <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
- <child>
- <placeholder/>
- </child>
- </widget>
- <packing>
- <property name="resize">True</property>
- <property name="shrink">True</property>
- </packing>
- </child>
- </widget>
- <packing>
- <property name="position">1</property>
- </packing>
- </child>
- </widget>
- </child>
- </widget>
-</glade-interface>
diff --git a/bitbake/lib/bb/ui/crumbs/runningbuild.py b/bitbake/lib/bb/ui/crumbs/runningbuild.py
deleted file mode 100644
index 4703e6d844..0000000000
--- a/bitbake/lib/bb/ui/crumbs/runningbuild.py
+++ /dev/null
@@ -1,311 +0,0 @@
-
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import logging
-import time
-import urllib
-import urllib2
-
-class Colors(object):
- OK = "#ffffff"
- RUNNING = "#aaffaa"
- WARNING ="#f88017"
- ERROR = "#ffaaaa"
-
-class RunningBuildModel (gtk.TreeStore):
- (COL_LOG, COL_PACKAGE, COL_TASK, COL_MESSAGE, COL_ICON, COL_COLOR, COL_NUM_ACTIVE) = range(7)
-
- def __init__ (self):
- gtk.TreeStore.__init__ (self,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_STRING,
- gobject.TYPE_INT)
-
-class RunningBuild (gobject.GObject):
- __gsignals__ = {
- 'build-succeeded' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'build-failed' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ())
- }
- pids_to_task = {}
- tasks_to_iter = {}
-
- def __init__ (self):
- gobject.GObject.__init__ (self)
- self.model = RunningBuildModel()
-
- def handle_event (self, event, pbar=None):
- # Handle an event from the event queue, this may result in updating
- # the model and thus the UI. Or it may be to tell us that the build
- # has finished successfully (or not, as the case may be.)
-
- parent = None
- pid = 0
- package = None
- task = None
-
- # If we have a pid attached to this message/event try and get the
- # (package, task) pair for it. If we get that then get the parent iter
- # for the message.
- if hasattr(event, 'pid'):
- pid = event.pid
- if hasattr(event, 'process'):
- pid = event.process
-
- if pid and pid in self.pids_to_task:
- (package, task) = self.pids_to_task[pid]
- parent = self.tasks_to_iter[(package, task)]
-
- if(isinstance(event, logging.LogRecord)):
- if (event.msg.startswith ("Running task")):
- return # don't add these to the list
-
- if event.levelno >= logging.ERROR:
- icon = "dialog-error"
- color = Colors.ERROR
- elif event.levelno >= logging.WARNING:
- icon = "dialog-warning"
- color = Colors.WARNING
- else:
- icon = None
- color = Colors.OK
-
- # if we know which package we belong to, we'll append onto its list.
- # otherwise, we'll jump to the top of the master list
- if parent:
- tree_add = self.model.append
- else:
- tree_add = self.model.prepend
- tree_add(parent,
- (None,
- package,
- task,
- event.getMessage(),
- icon,
- color,
- 0))
-
- elif isinstance(event, bb.build.TaskStarted):
- (package, task) = (event._package, event._task)
-
- # Save out this PID.
- self.pids_to_task[pid] = (package, task)
-
- # Check if we already have this package in our model. If so then
- # that can be the parent for the task. Otherwise we create a new
- # top level for the package.
- if ((package, None) in self.tasks_to_iter):
- parent = self.tasks_to_iter[(package, None)]
- else:
- parent = self.model.prepend(None, (None,
- package,
- None,
- "Package: %s" % (package),
- None,
- Colors.OK,
- 0))
- self.tasks_to_iter[(package, None)] = parent
-
- # Because this parent package now has an active child mark it as
- # such.
- # @todo if parent is already in error, don't mark it green
- self.model.set(parent, self.model.COL_ICON, "gtk-execute",
- self.model.COL_COLOR, Colors.RUNNING)
-
- # Add an entry in the model for this task
- i = self.model.append (parent, (None,
- package,
- task,
- "Task: %s" % (task),
- "gtk-execute",
- Colors.RUNNING,
- 0))
-
- # update the parent's active task count
- num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] + 1
- self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
-
- # Save out the iter so that we can find it when we have a message
- # that we need to attach to a task.
- self.tasks_to_iter[(package, task)] = i
-
- elif isinstance(event, bb.build.TaskBase):
- current = self.tasks_to_iter[(package, task)]
- parent = self.tasks_to_iter[(package, None)]
-
- # remove this task from the parent's active count
- num_active = self.model.get(parent, self.model.COL_NUM_ACTIVE)[0] - 1
- self.model.set(parent, self.model.COL_NUM_ACTIVE, num_active)
-
- if isinstance(event, bb.build.TaskFailed):
- # Mark the task and parent as failed
- icon = "dialog-error"
- color = Colors.ERROR
-
- logfile = event.logfile
- if logfile and os.path.exists(logfile):
- with open(logfile) as f:
- logdata = f.read()
- self.model.append(current, ('pastebin', None, None, logdata, 'gtk-error', Colors.OK, 0))
-
- for i in (current, parent):
- self.model.set(i, self.model.COL_ICON, icon,
- self.model.COL_COLOR, color)
- else:
- icon = None
- color = Colors.OK
-
- # Mark the task as inactive
- self.model.set(current, self.model.COL_ICON, icon,
- self.model.COL_COLOR, color)
-
- # Mark the parent package as inactive, but make sure to
- # preserve error and active states
- i = self.tasks_to_iter[(package, None)]
- if self.model.get(parent, self.model.COL_ICON) != 'dialog-error':
- self.model.set(parent, self.model.COL_ICON, icon)
- if num_active == 0:
- self.model.set(parent, self.model.COL_COLOR, Colors.OK)
-
- # Clear the iters and the pids since when the task goes away the
- # pid will no longer be used for messages
- del self.tasks_to_iter[(package, task)]
- del self.pids_to_task[pid]
-
- elif isinstance(event, bb.event.BuildStarted):
-
- self.model.prepend(None, (None,
- None,
- None,
- "Build Started (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
- None,
- Colors.OK,
- 0))
- elif isinstance(event, bb.event.BuildCompleted):
- failures = int (event._failures)
- self.model.prepend(None, (None,
- None,
- None,
- "Build Completed (%s)" % time.strftime('%m/%d/%Y %H:%M:%S'),
- None,
- Colors.OK,
- 0))
-
- # Emit the appropriate signal depending on the number of failures
- if (failures >= 1):
- self.emit ("build-failed")
- else:
- self.emit ("build-succeeded")
-
- elif isinstance(event, bb.event.CacheLoadStarted) and pbar:
- pbar.set_title("Loading cache")
- self.progress_total = event.total
- pbar.update(0, self.progress_total)
- elif isinstance(event, bb.event.CacheLoadProgress) and pbar:
- pbar.update(event.current, self.progress_total)
- elif isinstance(event, bb.event.CacheLoadCompleted) and pbar:
- pbar.update(self.progress_total, self.progress_total)
-
- elif isinstance(event, bb.event.ParseStarted) and pbar:
- pbar.set_title("Processing recipes")
- self.progress_total = event.total
- pbar.update(0, self.progress_total)
- elif isinstance(event, bb.event.ParseProgress) and pbar:
- pbar.update(event.current, self.progress_total)
- elif isinstance(event, bb.event.ParseCompleted) and pbar:
- pbar.hide()
-
- return
-
-
-def do_pastebin(text):
- url = 'http://pastebin.com/api_public.php'
- params = {'paste_code': text, 'paste_format': 'text'}
-
- req = urllib2.Request(url, urllib.urlencode(params))
- response = urllib2.urlopen(req)
- paste_url = response.read()
-
- return paste_url
-
-
-class RunningBuildTreeView (gtk.TreeView):
- __gsignals__ = {
- "button_press_event" : "override"
- }
- def __init__ (self):
- gtk.TreeView.__init__ (self)
-
- # The icon that indicates whether we're building or failed.
- renderer = gtk.CellRendererPixbuf ()
- col = gtk.TreeViewColumn ("Status", renderer)
- col.add_attribute (renderer, "icon-name", 4)
- self.append_column (col)
-
- # The message of the build.
- self.message_renderer = gtk.CellRendererText ()
- self.message_column = gtk.TreeViewColumn ("Message", self.message_renderer, text=3)
- self.message_column.add_attribute(self.message_renderer, 'background', 5)
- self.message_renderer.set_property('editable', 5)
- self.append_column (self.message_column)
-
- def do_button_press_event(self, event):
- gtk.TreeView.do_button_press_event(self, event)
-
- if event.button == 3:
- selection = super(RunningBuildTreeView, self).get_selection()
- (model, iter) = selection.get_selected()
- if iter is not None:
- can_paste = model.get(iter, model.COL_LOG)[0]
- if can_paste == 'pastebin':
- # build a simple menu with a pastebin option
- menu = gtk.Menu()
- menuitem = gtk.MenuItem("Send log to pastebin")
- menu.append(menuitem)
- menuitem.connect("activate", self.pastebin_handler, (model, iter))
- menuitem.show()
- menu.show()
- menu.popup(None, None, None, event.button, event.time)
-
- def pastebin_handler(self, widget, data):
- """
- Send the log data to pastebin, then add the new paste url to the
- clipboard.
- """
- (model, iter) = data
- paste_url = do_pastebin(model.get(iter, model.COL_MESSAGE)[0])
-
- # @todo Provide visual feedback to the user that it is done and that
- # it worked.
- print paste_url
-
- clipboard = gtk.clipboard_get()
- clipboard.set_text(paste_url)
- clipboard.store() \ No newline at end of file
diff --git a/bitbake/lib/bb/ui/depexp.py b/bitbake/lib/bb/ui/depexp.py
deleted file mode 100644
index 3dbd5e0eca..0000000000
--- a/bitbake/lib/bb/ui/depexp.py
+++ /dev/null
@@ -1,307 +0,0 @@
-#
-# BitBake Graphical GTK based Dependency Explorer
-#
-# Copyright (C) 2007 Ross Burton
-# Copyright (C) 2007 - 2008 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gobject
-import gtk
-import Queue
-import threading
-import xmlrpclib
-import bb
-import bb.event
-from bb.ui.crumbs.progress import ProgressBar
-
-# Package Model
-(COL_PKG_NAME) = (0)
-
-# Dependency Model
-(TYPE_DEP, TYPE_RDEP) = (0, 1)
-(COL_DEP_TYPE, COL_DEP_PARENT, COL_DEP_PACKAGE) = (0, 1, 2)
-
-
-class PackageDepView(gtk.TreeView):
- def __init__(self, model, dep_type, label):
- gtk.TreeView.__init__(self)
- self.current = None
- self.dep_type = dep_type
- self.filter_model = model.filter_new()
- self.filter_model.set_visible_func(self._filter)
- self.set_model(self.filter_model)
- #self.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PACKAGE))
-
- def _filter(self, model, iter):
- (this_type, package) = model.get(iter, COL_DEP_TYPE, COL_DEP_PARENT)
- if this_type != self.dep_type: return False
- return package == self.current
-
- def set_current_package(self, package):
- self.current = package
- self.filter_model.refilter()
-
-
-class PackageReverseDepView(gtk.TreeView):
- def __init__(self, model, label):
- gtk.TreeView.__init__(self)
- self.current = None
- self.filter_model = model.filter_new()
- self.filter_model.set_visible_func(self._filter)
- self.set_model(self.filter_model)
- self.append_column(gtk.TreeViewColumn(label, gtk.CellRendererText(), text=COL_DEP_PARENT))
-
- def _filter(self, model, iter):
- package = model.get_value(iter, COL_DEP_PACKAGE)
- return package == self.current
-
- def set_current_package(self, package):
- self.current = package
- self.filter_model.refilter()
-
-
-class DepExplorer(gtk.Window):
- def __init__(self):
- gtk.Window.__init__(self)
- self.set_title("Dependency Explorer")
- self.set_default_size(500, 500)
- self.connect("delete-event", gtk.main_quit)
-
- # Create the data models
- self.pkg_model = gtk.ListStore(gobject.TYPE_STRING)
- self.pkg_model.set_sort_column_id(COL_PKG_NAME, gtk.SORT_ASCENDING)
- self.depends_model = gtk.ListStore(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)
- self.depends_model.set_sort_column_id(COL_DEP_PACKAGE, gtk.SORT_ASCENDING)
-
- pane = gtk.HPaned()
- pane.set_position(250)
- self.add(pane)
-
- # The master list of packages
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
-
- self.pkg_treeview = gtk.TreeView(self.pkg_model)
- self.pkg_treeview.get_selection().connect("changed", self.on_cursor_changed)
- column = gtk.TreeViewColumn("Package", gtk.CellRendererText(), text=COL_PKG_NAME)
- self.pkg_treeview.append_column(column)
- pane.add1(scrolled)
- scrolled.add(self.pkg_treeview)
-
- box = gtk.VBox(homogeneous=True, spacing=4)
-
- # Runtime Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
- self.rdep_treeview = PackageDepView(self.depends_model, TYPE_RDEP, "Runtime Depends")
- self.rdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- scrolled.add(self.rdep_treeview)
- box.add(scrolled)
-
- # Build Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
- self.dep_treeview = PackageDepView(self.depends_model, TYPE_DEP, "Build Depends")
- self.dep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PACKAGE)
- scrolled.add(self.dep_treeview)
- box.add(scrolled)
- pane.add2(box)
-
- # Reverse Depends
- scrolled = gtk.ScrolledWindow()
- scrolled.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
- scrolled.set_shadow_type(gtk.SHADOW_IN)
- self.revdep_treeview = PackageReverseDepView(self.depends_model, "Reverse Depends")
- self.revdep_treeview.connect("row-activated", self.on_package_activated, COL_DEP_PARENT)
- scrolled.add(self.revdep_treeview)
- box.add(scrolled)
- pane.add2(box)
-
- self.show_all()
-
- def on_package_activated(self, treeview, path, column, data_col):
- model = treeview.get_model()
- package = model.get_value(model.get_iter(path), data_col)
-
- pkg_path = []
- def finder(model, path, iter, needle):
- package = model.get_value(iter, COL_PKG_NAME)
- if package == needle:
- pkg_path.append(path)
- return True
- else:
- return False
- self.pkg_model.foreach(finder, package)
- if pkg_path:
- self.pkg_treeview.get_selection().select_path(pkg_path[0])
- self.pkg_treeview.scroll_to_cell(pkg_path[0])
-
- def on_cursor_changed(self, selection):
- (model, it) = selection.get_selected()
- if iter is None:
- current_package = None
- else:
- current_package = model.get_value(it, COL_PKG_NAME)
- self.rdep_treeview.set_current_package(current_package)
- self.dep_treeview.set_current_package(current_package)
- self.revdep_treeview.set_current_package(current_package)
-
-
-def parse(depgraph, pkg_model, depends_model):
- for package in depgraph["pn"]:
- pkg_model.set(pkg_model.append(), COL_PKG_NAME, package)
-
- for package in depgraph["depends"]:
- for depend in depgraph["depends"][package]:
- depends_model.set (depends_model.append(),
- COL_DEP_TYPE, TYPE_DEP,
- COL_DEP_PARENT, package,
- COL_DEP_PACKAGE, depend)
-
- for package in depgraph["rdepends-pn"]:
- for rdepend in depgraph["rdepends-pn"][package]:
- depends_model.set (depends_model.append(),
- COL_DEP_TYPE, TYPE_RDEP,
- COL_DEP_PARENT, package,
- COL_DEP_PACKAGE, rdepend)
-
-
-class gtkthread(threading.Thread):
- quit = threading.Event()
- def __init__(self, shutdown):
- threading.Thread.__init__(self)
- self.setDaemon(True)
- self.shutdown = shutdown
-
- def run(self):
- gobject.threads_init()
- gtk.gdk.threads_init()
- gtk.main()
- gtkthread.quit.set()
-
-
-def main(server, eventHandler):
- try:
- cmdline = server.runCommand(["getCmdLineAction"])
- if not cmdline or cmdline[0] != "generateDotGraph":
- print("This UI is only compatible with the -g option")
- return
- ret = server.runCommand(["generateDepTreeEvent", cmdline[1], cmdline[2]])
- if ret != True:
- print("Couldn't run command! %s" % ret)
- return
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return
-
- shutdown = 0
-
- gtkgui = gtkthread(shutdown)
- gtkgui.start()
-
- gtk.gdk.threads_enter()
- dep = DepExplorer()
- pbar = ProgressBar(dep)
- pbar.connect("delete-event", gtk.main_quit)
- gtk.gdk.threads_leave()
-
- progress_total = 0
- while True:
- try:
- event = eventHandler.waitEvent(0.25)
- if gtkthread.quit.isSet():
- server.runCommand(["stateStop"])
- break
-
- if event is None:
- continue
-
- if isinstance(event, bb.event.CacheLoadStarted):
- progress_total = event.total
- gtk.gdk.threads_enter()
- pbar.set_title("Loading Cache")
- pbar.update(0, progress_total)
- gtk.gdk.threads_leave()
-
- if isinstance(event, bb.event.CacheLoadProgress):
- x = event.current
- gtk.gdk.threads_enter()
- pbar.update(x, progress_total)
- gtk.gdk.threads_leave()
- continue
-
- if isinstance(event, bb.event.CacheLoadCompleted):
- gtk.gdk.threads_enter()
- pbar.update(progress_total, progress_total)
- gtk.gdk.threads_leave()
- continue
-
- if isinstance(event, bb.event.ParseStarted):
- progress_total = event.total
- gtk.gdk.threads_enter()
- pbar.set_title("Processing recipes")
- pbar.update(0, progress_total)
- gtk.gdk.threads_leave()
-
- if isinstance(event, bb.event.ParseProgress):
- x = event.current
- gtk.gdk.threads_enter()
- pbar.update(x, progress_total)
- gtk.gdk.threads_leave()
- continue
-
- if isinstance(event, bb.event.ParseCompleted):
- pbar.hide()
- continue
-
- if isinstance(event, bb.event.DepTreeGenerated):
- gtk.gdk.threads_enter()
- parse(event._depgraph, dep.pkg_model, dep.depends_model)
- gtk.gdk.threads_leave()
-
- if isinstance(event, bb.command.CommandCompleted):
- continue
-
- if isinstance(event, bb.command.CommandFailed):
- print("Command execution failed: %s" % event.error)
- return event.exitcode
-
- if isinstance(event, bb.command.CommandExit):
- return event.exitcode
-
- if isinstance(event, bb.cooker.CookerExit):
- break
-
- continue
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- except KeyboardInterrupt:
- if shutdown == 2:
- print("\nThird Keyboard Interrupt, exit.\n")
- break
- if shutdown == 1:
- print("\nSecond Keyboard Interrupt, stopping...\n")
- server.runCommand(["stateStop"])
- if shutdown == 0:
- print("\nKeyboard Interrupt, closing down...\n")
- server.runCommand(["stateShutdown"])
- shutdown = shutdown + 1
- pass
diff --git a/bitbake/lib/bb/ui/goggle.py b/bitbake/lib/bb/ui/goggle.py
deleted file mode 100644
index ec5a38dd4d..0000000000
--- a/bitbake/lib/bb/ui/goggle.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gobject
-import gtk
-import xmlrpclib
-from bb.ui.crumbs.runningbuild import RunningBuildTreeView, RunningBuild
-from bb.ui.crumbs.progress import ProgressBar
-
-import Queue
-
-
-def event_handle_idle_func (eventHandler, build, pbar):
-
- # Consume as many messages as we can in the time available to us
- event = eventHandler.getEvent()
- while event:
- build.handle_event (event, pbar)
- event = eventHandler.getEvent()
-
- return True
-
-def scroll_tv_cb (model, path, iter, view):
- view.scroll_to_cell (path)
-
-
-# @todo hook these into the GUI so the user has feedback...
-def running_build_failed_cb (running_build):
- pass
-
-
-def running_build_succeeded_cb (running_build):
- pass
-
-
-class MainWindow (gtk.Window):
- def __init__ (self):
- gtk.Window.__init__ (self, gtk.WINDOW_TOPLEVEL)
-
- # Setup tree view and the scrolled window
- scrolled_window = gtk.ScrolledWindow ()
- self.add (scrolled_window)
- self.cur_build_tv = RunningBuildTreeView()
- self.connect("delete-event", gtk.main_quit)
- self.set_default_size(640, 480)
- scrolled_window.add (self.cur_build_tv)
-
-
-def main (server, eventHandler):
- gobject.threads_init()
- gtk.gdk.threads_init()
-
- window = MainWindow ()
- window.show_all ()
- pbar = ProgressBar(window)
- pbar.connect("delete-event", gtk.main_quit)
-
- # Create the object for the current build
- running_build = RunningBuild ()
- window.cur_build_tv.set_model (running_build.model)
- running_build.model.connect("row-inserted", scroll_tv_cb, window.cur_build_tv)
- running_build.connect ("build-succeeded", running_build_succeeded_cb)
- running_build.connect ("build-failed", running_build_failed_cb)
-
- try:
- cmdline = server.runCommand(["getCmdLineAction"])
- if not cmdline:
- return 1
- ret = server.runCommand(cmdline)
- if ret != True:
- print("Couldn't get default commandline! %s" % ret)
- return 1
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return 1
-
- # Use a timeout function for probing the event queue to find out if we
- # have a message waiting for us.
- gobject.timeout_add (100,
- event_handle_idle_func,
- eventHandler,
- running_build,
- pbar)
-
- try:
- gtk.main()
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- finally:
- server.runCommand(["stateStop"])
-
diff --git a/bitbake/lib/bb/ui/knotty.py b/bitbake/lib/bb/ui/knotty.py
deleted file mode 100644
index 042dbe902c..0000000000
--- a/bitbake/lib/bb/ui/knotty.py
+++ /dev/null
@@ -1,248 +0,0 @@
-#
-# BitBake (No)TTY UI Implementation
-#
-# Handling output to TTYs or files (no TTY)
-#
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-from __future__ import division
-
-import os
-import sys
-import xmlrpclib
-import logging
-import progressbar
-import bb.msg
-from bb.ui import uihelper
-
-logger = logging.getLogger("BitBake")
-interactive = sys.stdout.isatty()
-
-class BBProgress(progressbar.ProgressBar):
- def __init__(self, msg, maxval):
- self.msg = msg
- widgets = [progressbar.Percentage(), ' ', progressbar.Bar(), ' ',
- progressbar.ETA()]
-
- progressbar.ProgressBar.__init__(self, maxval, [self.msg + ": "] + widgets)
-
-class NonInteractiveProgress(object):
- fobj = sys.stdout
-
- def __init__(self, msg, maxval):
- self.msg = msg
- self.maxval = maxval
-
- def start(self):
- self.fobj.write("%s..." % self.msg)
- self.fobj.flush()
- return self
-
- def update(self, value):
- pass
-
- def finish(self):
- self.fobj.write("done.\n")
- self.fobj.flush()
-
-def new_progress(msg, maxval):
- if interactive:
- return BBProgress(msg, maxval)
- else:
- return NonInteractiveProgress(msg, maxval)
-
-def main(server, eventHandler):
-
- # Get values of variables which control our output
- includelogs = server.runCommand(["getVariable", "BBINCLUDELOGS"])
- loglines = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"])
-
- helper = uihelper.BBUIHelper()
-
- console = logging.StreamHandler(sys.stdout)
- format = bb.msg.BBLogFormatter("%(levelname)s: %(message)s")
- console.setFormatter(format)
- logger.addHandler(console)
-
- try:
- cmdline = server.runCommand(["getCmdLineAction"])
- if not cmdline:
- return 1
- ret = server.runCommand(cmdline)
- if ret != True:
- print("Couldn't get default commandline! %s" % ret)
- return 1
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return 1
-
-
- parseprogress = None
- cacheprogress = None
- shutdown = 0
- return_value = 0
- while True:
- try:
- event = eventHandler.waitEvent(0.25)
- if event is None:
- continue
- helper.eventHandler(event)
- if isinstance(event, bb.runqueue.runQueueExitWait):
- if not shutdown:
- shutdown = 1
- if shutdown and helper.needUpdate:
- activetasks, failedtasks = helper.getTasks()
- if activetasks:
- print("Waiting for %s active tasks to finish:" % len(activetasks))
- for tasknum, task in enumerate(activetasks):
- print("%s: %s (pid %s)" % (tasknum, activetasks[task]["title"], task))
-
- if isinstance(event, logging.LogRecord):
- if event.levelno >= format.ERROR:
- return_value = 1
- # For "normal" logging conditions, don't show note logs from tasks
- # but do show them if the user has changed the default log level to
- # include verbose/debug messages
- if logger.getEffectiveLevel() > format.VERBOSE:
- if event.taskpid != 0 and event.levelno <= format.NOTE:
- continue
- logger.handle(event)
- continue
-
- if isinstance(event, bb.build.TaskFailed):
- return_value = 1
- logfile = event.logfile
- if logfile and os.path.exists(logfile):
- print("ERROR: Logfile of failure stored in: %s" % logfile)
- if 1 or includelogs:
- print("Log data follows:")
- f = open(logfile, "r")
- lines = []
- while True:
- l = f.readline()
- if l == '':
- break
- l = l.rstrip()
- if loglines:
- lines.append(' | %s' % l)
- if len(lines) > int(loglines):
- lines.pop(0)
- else:
- print('| %s' % l)
- f.close()
- if lines:
- for line in lines:
- print(line)
- if isinstance(event, bb.build.TaskBase):
- logger.info(event._message)
- continue
- if isinstance(event, bb.event.ParseStarted):
- parseprogress = new_progress("Parsing recipes", event.total).start()
- continue
- if isinstance(event, bb.event.ParseProgress):
- parseprogress.update(event.current)
- continue
- if isinstance(event, bb.event.ParseCompleted):
- parseprogress.finish()
- print(("Parsing of %d .bb files complete (%d cached, %d parsed). %d targets, %d skipped, %d masked, %d errors."
- % ( event.total, event.cached, event.parsed, event.virtuals, event.skipped, event.masked, event.errors)))
- continue
-
- if isinstance(event, bb.event.CacheLoadStarted):
- cacheprogress = new_progress("Loading cache", event.total).start()
- continue
- if isinstance(event, bb.event.CacheLoadProgress):
- cacheprogress.update(event.current)
- continue
- if isinstance(event, bb.event.CacheLoadCompleted):
- cacheprogress.finish()
- print("Loaded %d entries from dependency cache." % event.num_entries)
- continue
-
- if isinstance(event, bb.command.CommandCompleted):
- break
- if isinstance(event, bb.command.CommandFailed):
- return_value = event.exitcode
- logger.error("Command execution failed: %s", event.error)
- break
- if isinstance(event, bb.command.CommandExit):
- return_value = event.exitcode
- continue
- if isinstance(event, bb.cooker.CookerExit):
- break
- if isinstance(event, bb.event.MultipleProviders):
- logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "",
- event._item,
- ", ".join(event._candidates))
- logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item)
- continue
- if isinstance(event, bb.event.NoProvider):
- if event._runtime:
- r = "R"
- else:
- r = ""
-
- if event._dependees:
- logger.error("Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)", r, event._item, ", ".join(event._dependees), r)
- else:
- logger.error("Nothing %sPROVIDES '%s'", r, event._item)
- continue
-
- if isinstance(event, bb.runqueue.runQueueTaskStarted):
- if event.noexec:
- tasktype = 'noexec task'
- else:
- tasktype = 'task'
- logger.info("Running %s %s of %s (ID: %s, %s)",
- tasktype,
- event.stats.completed + event.stats.active +
- event.stats.failed + 1,
- event.stats.total, event.taskid, event.taskstring)
- continue
-
- if isinstance(event, bb.runqueue.runQueueTaskFailed):
- logger.error("Task %s (%s) failed with exit code '%s'",
- event.taskid, event.taskstring, event.exitcode)
- continue
-
- # ignore
- if isinstance(event, (bb.event.BuildBase,
- bb.event.StampUpdate,
- bb.event.ConfigParsed,
- bb.event.RecipeParsed,
- bb.runqueue.runQueueEvent,
- bb.runqueue.runQueueExitWait)):
- continue
-
- logger.error("Unknown event: %s", event)
-
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
- except KeyboardInterrupt:
- if shutdown == 2:
- print("\nThird Keyboard Interrupt, exit.\n")
- break
- if shutdown == 1:
- print("\nSecond Keyboard Interrupt, stopping...\n")
- server.runCommand(["stateStop"])
- if shutdown == 0:
- print("\nKeyboard Interrupt, closing down...\n")
- server.runCommand(["stateShutdown"])
- shutdown = shutdown + 1
- pass
- return return_value
diff --git a/bitbake/lib/bb/ui/ncurses.py b/bitbake/lib/bb/ui/ncurses.py
deleted file mode 100644
index 469f1b7309..0000000000
--- a/bitbake/lib/bb/ui/ncurses.py
+++ /dev/null
@@ -1,352 +0,0 @@
-#
-# BitBake Curses UI Implementation
-#
-# Implements an ncurses frontend for the BitBake utility.
-#
-# Copyright (C) 2006 Michael 'Mickey' Lauer
-# Copyright (C) 2006-2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-"""
- We have the following windows:
-
- 1.) Main Window: Shows what we are ultimately building and how far we are. Includes status bar
- 2.) Thread Activity Window: Shows one status line for every concurrent bitbake thread.
- 3.) Command Line Window: Contains an interactive command line where you can interact w/ Bitbake.
-
- Basic window layout is like that:
-
- |---------------------------------------------------------|
- | <Main Window> | <Thread Activity Window> |
- | | 0: foo do_compile complete|
- | Building Gtk+-2.6.10 | 1: bar do_patch complete |
- | Status: 60% | ... |
- | | ... |
- | | ... |
- |---------------------------------------------------------|
- |<Command Line Window> |
- |>>> which virtual/kernel |
- |openzaurus-kernel |
- |>>> _ |
- |---------------------------------------------------------|
-
-"""
-
-
-from __future__ import division
-import logging
-import os, sys, curses, itertools, time
-import bb
-import xmlrpclib
-from bb import ui
-from bb.ui import uihelper
-
-parsespin = itertools.cycle( r'|/-\\' )
-
-X = 0
-Y = 1
-WIDTH = 2
-HEIGHT = 3
-
-MAXSTATUSLENGTH = 32
-
-class NCursesUI:
- """
- NCurses UI Class
- """
- class Window:
- """Base Window Class"""
- def __init__( self, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
- self.win = curses.newwin( height, width, y, x )
- self.dimensions = ( x, y, width, height )
- """
- if curses.has_colors():
- color = 1
- curses.init_pair( color, fg, bg )
- self.win.bkgdset( ord(' '), curses.color_pair(color) )
- else:
- self.win.bkgdset( ord(' '), curses.A_BOLD )
- """
- self.erase()
- self.setScrolling()
- self.win.noutrefresh()
-
- def erase( self ):
- self.win.erase()
-
- def setScrolling( self, b = True ):
- self.win.scrollok( b )
- self.win.idlok( b )
-
- def setBoxed( self ):
- self.boxed = True
- self.win.box()
- self.win.noutrefresh()
-
- def setText( self, x, y, text, *args ):
- self.win.addstr( y, x, text, *args )
- self.win.noutrefresh()
-
- def appendText( self, text, *args ):
- self.win.addstr( text, *args )
- self.win.noutrefresh()
-
- def drawHline( self, y ):
- self.win.hline( y, 0, curses.ACS_HLINE, self.dimensions[WIDTH] )
- self.win.noutrefresh()
-
- class DecoratedWindow( Window ):
- """Base class for windows with a box and a title bar"""
- def __init__( self, title, x, y, width, height, fg=curses.COLOR_BLACK, bg=curses.COLOR_WHITE ):
- NCursesUI.Window.__init__( self, x+1, y+3, width-2, height-4, fg, bg )
- self.decoration = NCursesUI.Window( x, y, width, height, fg, bg )
- self.decoration.setBoxed()
- self.decoration.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
- self.setTitle( title )
-
- def setTitle( self, title ):
- self.decoration.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
-
- #-------------------------------------------------------------------------#
-# class TitleWindow( Window ):
- #-------------------------------------------------------------------------#
-# """Title Window"""
-# def __init__( self, x, y, width, height ):
-# NCursesUI.Window.__init__( self, x, y, width, height )
-# version = bb.__version__
-# title = "BitBake %s" % version
-# credit = "(C) 2003-2007 Team BitBake"
-# #self.win.hline( 2, 1, curses.ACS_HLINE, width-2 )
-# self.win.border()
-# self.setText( 1, 1, title.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
-# self.setText( 1, 2, credit.center( self.dimensions[WIDTH]-2 ), curses.A_BOLD )
-
- #-------------------------------------------------------------------------#
- class ThreadActivityWindow( DecoratedWindow ):
- #-------------------------------------------------------------------------#
- """Thread Activity Window"""
- def __init__( self, x, y, width, height ):
- NCursesUI.DecoratedWindow.__init__( self, "Thread Activity", x, y, width, height )
-
- def setStatus( self, thread, text ):
- line = "%02d: %s" % ( thread, text )
- width = self.dimensions[WIDTH]
- if ( len(line) > width ):
- line = line[:width-3] + "..."
- else:
- line = line.ljust( width )
- self.setText( 0, thread, line )
-
- #-------------------------------------------------------------------------#
- class MainWindow( DecoratedWindow ):
- #-------------------------------------------------------------------------#
- """Main Window"""
- def __init__( self, x, y, width, height ):
- self.StatusPosition = width - MAXSTATUSLENGTH
- NCursesUI.DecoratedWindow.__init__( self, None, x, y, width, height )
- curses.nl()
-
- def setTitle( self, title ):
- title = "BitBake %s" % bb.__version__
- self.decoration.setText( 2, 1, title, curses.A_BOLD )
- self.decoration.setText( self.StatusPosition - 8, 1, "Status:", curses.A_BOLD )
-
- def setStatus(self, status):
- while len(status) < MAXSTATUSLENGTH:
- status = status + " "
- self.decoration.setText( self.StatusPosition, 1, status, curses.A_BOLD )
-
-
- #-------------------------------------------------------------------------#
- class ShellOutputWindow( DecoratedWindow ):
- #-------------------------------------------------------------------------#
- """Interactive Command Line Output"""
- def __init__( self, x, y, width, height ):
- NCursesUI.DecoratedWindow.__init__( self, "Command Line Window", x, y, width, height )
-
- #-------------------------------------------------------------------------#
- class ShellInputWindow( Window ):
- #-------------------------------------------------------------------------#
- """Interactive Command Line Input"""
- def __init__( self, x, y, width, height ):
- NCursesUI.Window.__init__( self, x, y, width, height )
-
-# put that to the top again from curses.textpad import Textbox
-# self.textbox = Textbox( self.win )
-# t = threading.Thread()
-# t.run = self.textbox.edit
-# t.start()
-
- #-------------------------------------------------------------------------#
- def main(self, stdscr, server, eventHandler):
- #-------------------------------------------------------------------------#
- height, width = stdscr.getmaxyx()
-
- # for now split it like that:
- # MAIN_y + THREAD_y = 2/3 screen at the top
- # MAIN_x = 2/3 left, THREAD_y = 1/3 right
- # CLI_y = 1/3 of screen at the bottom
- # CLI_x = full
-
- main_left = 0
- main_top = 0
- main_height = ( height // 3 * 2 )
- main_width = ( width // 3 ) * 2
- clo_left = main_left
- clo_top = main_top + main_height
- clo_height = height - main_height - main_top - 1
- clo_width = width
- cli_left = main_left
- cli_top = clo_top + clo_height
- cli_height = 1
- cli_width = width
- thread_left = main_left + main_width
- thread_top = main_top
- thread_height = main_height
- thread_width = width - main_width
-
- #tw = self.TitleWindow( 0, 0, width, main_top )
- mw = self.MainWindow( main_left, main_top, main_width, main_height )
- taw = self.ThreadActivityWindow( thread_left, thread_top, thread_width, thread_height )
- clo = self.ShellOutputWindow( clo_left, clo_top, clo_width, clo_height )
- cli = self.ShellInputWindow( cli_left, cli_top, cli_width, cli_height )
- cli.setText( 0, 0, "BB>" )
-
- mw.setStatus("Idle")
-
- helper = uihelper.BBUIHelper()
- shutdown = 0
-
- try:
- cmdline = server.runCommand(["getCmdLineAction"])
- if not cmdline:
- return
- ret = server.runCommand(cmdline)
- if ret != True:
- print("Couldn't get default commandlind! %s" % ret)
- return
- except xmlrpclib.Fault as x:
- print("XMLRPC Fault getting commandline:\n %s" % x)
- return
-
- exitflag = False
- while not exitflag:
- try:
- event = eventHandler.waitEvent(0.25)
- if not event:
- continue
-
- helper.eventHandler(event)
- if isinstance(event, bb.build.TaskBase):
- mw.appendText("NOTE: %s\n" % event._message)
- if isinstance(event, logging.LogRecord):
- mw.appendText(logging.getLevelName(event.levelno) + ': ' + event.getMessage() + '\n')
-
- if isinstance(event, bb.event.CacheLoadStarted):
- self.parse_total = event.total
- if isinstance(event, bb.event.CacheLoadProgress):
- x = event.current
- y = self.parse_total
- mw.setStatus("Loading Cache: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
- if isinstance(event, bb.event.CacheLoadCompleted):
- mw.setStatus("Idle")
- mw.appendText("Loaded %d entries from dependency cache.\n"
- % ( event.num_entries))
-
- if isinstance(event, bb.event.ParseStarted):
- self.parse_total = event.total
- if isinstance(event, bb.event.ParseProgress):
- x = event.current
- y = self.parse_total
- mw.setStatus("Parsing Recipes: %s [%2d %%]" % ( next(parsespin), x*100/y ) )
- if isinstance(event, bb.event.ParseCompleted):
- mw.setStatus("Idle")
- mw.appendText("Parsing finished. %d cached, %d parsed, %d skipped, %d masked.\n"
- % ( event.cached, event.parsed, event.skipped, event.masked ))
-
-# if isinstance(event, bb.build.TaskFailed):
-# if event.logfile:
-# if data.getVar("BBINCLUDELOGS", d):
-# bb.msg.error(bb.msg.domain.Build, "log data follows (%s)" % logfile)
-# number_of_lines = data.getVar("BBINCLUDELOGS_LINES", d)
-# if number_of_lines:
-# os.system('tail -n%s %s' % (number_of_lines, logfile))
-# else:
-# f = open(logfile, "r")
-# while True:
-# l = f.readline()
-# if l == '':
-# break
-# l = l.rstrip()
-# print '| %s' % l
-# f.close()
-# else:
-# bb.msg.error(bb.msg.domain.Build, "see log in %s" % logfile)
-
- if isinstance(event, bb.command.CommandCompleted):
- # stop so the user can see the result of the build, but
- # also allow them to now exit with a single ^C
- shutdown = 2
- if isinstance(event, bb.command.CommandFailed):
- mw.appendText("Command execution failed: %s" % event.error)
- time.sleep(2)
- exitflag = True
- if isinstance(event, bb.command.CommandExit):
- exitflag = True
- if isinstance(event, bb.cooker.CookerExit):
- exitflag = True
-
- if helper.needUpdate:
- activetasks, failedtasks = helper.getTasks()
- taw.erase()
- taw.setText(0, 0, "")
- if activetasks:
- taw.appendText("Active Tasks:\n")
- for task in activetasks.itervalues():
- taw.appendText(task["title"] + '\n')
- if failedtasks:
- taw.appendText("Failed Tasks:\n")
- for task in failedtasks:
- taw.appendText(task["title"] + '\n')
-
- curses.doupdate()
- except EnvironmentError as ioerror:
- # ignore interrupted io
- if ioerror.args[0] == 4:
- pass
-
- except KeyboardInterrupt:
- if shutdown == 2:
- mw.appendText("Third Keyboard Interrupt, exit.\n")
- exitflag = True
- if shutdown == 1:
- mw.appendText("Second Keyboard Interrupt, stopping...\n")
- server.runCommand(["stateStop"])
- if shutdown == 0:
- mw.appendText("Keyboard Interrupt, closing down...\n")
- server.runCommand(["stateShutdown"])
- shutdown = shutdown + 1
- pass
-
-def main(server, eventHandler):
- if not os.isatty(sys.stdout.fileno()):
- print("FATAL: Unable to run 'ncurses' UI without a TTY.")
- return
- ui = NCursesUI()
- try:
- curses.wrapper(ui.main, server, eventHandler)
- except:
- import traceback
- traceback.print_exc()
diff --git a/bitbake/lib/bb/ui/puccho.py b/bitbake/lib/bb/ui/puccho.py
deleted file mode 100644
index 3ce4590c16..0000000000
--- a/bitbake/lib/bb/ui/puccho.py
+++ /dev/null
@@ -1,425 +0,0 @@
-#
-# BitBake Graphical GTK User Interface
-#
-# Copyright (C) 2008 Intel Corporation
-#
-# Authored by Rob Bradford <rob@linux.intel.com>
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import gtk
-import gobject
-import gtk.glade
-import threading
-import urllib2
-import os
-import contextlib
-
-from bb.ui.crumbs.buildmanager import BuildManager, BuildConfiguration
-from bb.ui.crumbs.buildmanager import BuildManagerTreeView
-
-from bb.ui.crumbs.runningbuild import RunningBuild, RunningBuildTreeView
-
-# The metadata loader is used by the BuildSetupDialog to download the
-# available options to populate the dialog
-class MetaDataLoader(gobject.GObject):
- """ This class provides the mechanism for loading the metadata (the
- fetching and parsing) from a given URL. The metadata encompasses details
- on what machines are available. The distribution and images available for
- the machine and the the uris to use for building the given machine."""
- __gsignals__ = {
- 'success' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- ()),
- 'error' : (gobject.SIGNAL_RUN_LAST,
- gobject.TYPE_NONE,
- (gobject.TYPE_STRING,))
- }
-
- # We use these little helper functions to ensure that we take the gdk lock
- # when emitting the signal. These functions are called as idles (so that
- # they happen in the gtk / main thread's main loop.
- def emit_error_signal (self, remark):
- gtk.gdk.threads_enter()
- self.emit ("error", remark)
- gtk.gdk.threads_leave()
-
- def emit_success_signal (self):
- gtk.gdk.threads_enter()
- self.emit ("success")
- gtk.gdk.threads_leave()
-
- def __init__ (self):
- gobject.GObject.__init__ (self)
-
- class LoaderThread(threading.Thread):
- """ This class provides an asynchronous loader for the metadata (by
- using threads and signals). This is useful since the metadata may be
- at a remote URL."""
- class LoaderImportException (Exception):
- pass
-
- def __init__(self, loader, url):
- threading.Thread.__init__ (self)
- self.url = url
- self.loader = loader
-
- def run (self):
- result = {}
- try:
- with contextlib.closing (urllib2.urlopen (self.url)) as f:
- # Parse the metadata format. The format is....
- # <machine>;<default distro>|<distro>...;<default image>|<image>...;<type##url>|...
- for line in f:
- components = line.split(";")
- if (len (components) < 4):
- raise MetaDataLoader.LoaderThread.LoaderImportException
- machine = components[0]
- distros = components[1].split("|")
- images = components[2].split("|")
- urls = components[3].split("|")
-
- result[machine] = (distros, images, urls)
-
- # Create an object representing this *potential*
- # configuration. It can become concrete if the machine, distro
- # and image are all chosen in the UI
- configuration = BuildConfiguration()
- configuration.metadata_url = self.url
- configuration.machine_options = result
- self.loader.configuration = configuration
-
- # Emit that we've actually got a configuration
- gobject.idle_add (MetaDataLoader.emit_success_signal,
- self.loader)
-
- except MetaDataLoader.LoaderThread.LoaderImportException as e:
- gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
- "Repository metadata corrupt")
- except Exception as e:
- gobject.idle_add (MetaDataLoader.emit_error_signal, self.loader,
- "Unable to download repository metadata")
- print(e)
-
- def try_fetch_from_url (self, url):
- # Try and download the metadata. Firing a signal if successful
- thread = MetaDataLoader.LoaderThread(self, url)
- thread.start()
-
-class BuildSetupDialog (gtk.Dialog):
- RESPONSE_BUILD = 1
-
- # A little helper method that just sets the states on the widgets based on
- # whether we've got good metadata or not.
- def set_configurable (self, configurable):
- if (self.configurable == configurable):
- return
-
- self.configurable = configurable
- for widget in self.conf_widgets:
- widget.set_sensitive (configurable)
-
- if not configurable:
- self.machine_combo.set_active (-1)
- self.distribution_combo.set_active (-1)
- self.image_combo.set_active (-1)
-
- # GTK widget callbacks
- def refresh_button_clicked (self, button):
- # Refresh button clicked.
-
- url = self.location_entry.get_chars (0, -1)
- self.loader.try_fetch_from_url(url)
-
- def repository_entry_editable_changed (self, entry):
- if (len (entry.get_chars (0, -1)) > 0):
- self.refresh_button.set_sensitive (True)
- else:
- self.refresh_button.set_sensitive (False)
- self.clear_status_message()
-
- # If we were previously configurable we are no longer since the
- # location entry has been changed
- self.set_configurable (False)
-
- def machine_combo_changed (self, combobox):
- active_iter = combobox.get_active_iter()
-
- if not active_iter:
- return
-
- model = combobox.get_model()
-
- if model:
- chosen_machine = model.get (active_iter, 0)[0]
-
- (distros_model, images_model) = \
- self.loader.configuration.get_distro_and_images_models (chosen_machine)
-
- self.distribution_combo.set_model (distros_model)
- self.image_combo.set_model (images_model)
-
- # Callbacks from the loader
- def loader_success_cb (self, loader):
- self.status_image.set_from_icon_name ("info",
- gtk.ICON_SIZE_BUTTON)
- self.status_image.show()
- self.status_label.set_label ("Repository metadata successfully downloaded")
-
- # Set the models on the combo boxes based on the models generated from
- # the configuration that the loader has created
-
- # We just need to set the machine here, that then determines the
- # distro and image options. Cunning huh? :-)
-
- self.configuration = self.loader.configuration
- model = self.configuration.get_machines_model ()
- self.machine_combo.set_model (model)
-
- self.set_configurable (True)
-
- def loader_error_cb (self, loader, message):
- self.status_image.set_from_icon_name ("error",
- gtk.ICON_SIZE_BUTTON)
- self.status_image.show()
- self.status_label.set_text ("Error downloading repository metadata")
- for widget in self.conf_widgets:
- widget.set_sensitive (False)
-
- def clear_status_message (self):
- self.status_image.hide()
- self.status_label.set_label (
- """<i>Enter the repository location and press _Refresh</i>""")
-
- def __init__ (self):
- gtk.Dialog.__init__ (self)
-
- # Cancel
- self.add_button (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL)
-
- # Build
- button = gtk.Button ("_Build", None, True)
- image = gtk.Image ()
- image.set_from_stock (gtk.STOCK_EXECUTE, gtk.ICON_SIZE_BUTTON)
- button.set_image (image)
- self.add_action_widget (button, BuildSetupDialog.RESPONSE_BUILD)
- button.show_all ()
-
- # Pull in *just* the table from the Glade XML data.
- gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
- root = "build_table")
- table = gxml.get_widget ("build_table")
- self.vbox.pack_start (table, True, False, 0)
-
- # Grab all the widgets that we need to turn on/off when we refresh...
- self.conf_widgets = []
- self.conf_widgets += [gxml.get_widget ("machine_label")]
- self.conf_widgets += [gxml.get_widget ("distribution_label")]
- self.conf_widgets += [gxml.get_widget ("image_label")]
- self.conf_widgets += [gxml.get_widget ("machine_combo")]
- self.conf_widgets += [gxml.get_widget ("distribution_combo")]
- self.conf_widgets += [gxml.get_widget ("image_combo")]
-
- # Grab the status widgets
- self.status_image = gxml.get_widget ("status_image")
- self.status_label = gxml.get_widget ("status_label")
-
- # Grab the refresh button and connect to the clicked signal
- self.refresh_button = gxml.get_widget ("refresh_button")
- self.refresh_button.connect ("clicked", self.refresh_button_clicked)
-
- # Grab the location entry and connect to editable::changed
- self.location_entry = gxml.get_widget ("location_entry")
- self.location_entry.connect ("changed",
- self.repository_entry_editable_changed)
-
- # Grab the machine combo and hook onto the changed signal. This then
- # allows us to populate the distro and image combos
- self.machine_combo = gxml.get_widget ("machine_combo")
- self.machine_combo.connect ("changed", self.machine_combo_changed)
-
- # Setup the combo
- cell = gtk.CellRendererText()
- self.machine_combo.pack_start(cell, True)
- self.machine_combo.add_attribute(cell, 'text', 0)
-
- # Grab the distro and image combos. We need these to populate with
- # models once the machine is chosen
- self.distribution_combo = gxml.get_widget ("distribution_combo")
- cell = gtk.CellRendererText()
- self.distribution_combo.pack_start(cell, True)
- self.distribution_combo.add_attribute(cell, 'text', 0)
-
- self.image_combo = gxml.get_widget ("image_combo")
- cell = gtk.CellRendererText()
- self.image_combo.pack_start(cell, True)
- self.image_combo.add_attribute(cell, 'text', 0)
-
- # Put the default descriptive text in the status box
- self.clear_status_message()
-
- # Mark as non-configurable, this is just greys out the widgets the
- # user can't yet use
- self.configurable = False
- self.set_configurable(False)
-
- # Show the table
- table.show_all ()
-
- # The loader and some signals connected to it to update the status
- # area
- self.loader = MetaDataLoader()
- self.loader.connect ("success", self.loader_success_cb)
- self.loader.connect ("error", self.loader_error_cb)
-
- def update_configuration (self):
- """ A poorly named function but it updates the internal configuration
- from the widgets. This can make that configuration concrete and can
- thus be used for building """
- # Extract the chosen machine from the combo
- model = self.machine_combo.get_model()
- active_iter = self.machine_combo.get_active_iter()
- if (active_iter):
- self.configuration.machine = model.get(active_iter, 0)[0]
-
- # Extract the chosen distro from the combo
- model = self.distribution_combo.get_model()
- active_iter = self.distribution_combo.get_active_iter()
- if (active_iter):
- self.configuration.distro = model.get(active_iter, 0)[0]
-
- # Extract the chosen image from the combo
- model = self.image_combo.get_model()
- active_iter = self.image_combo.get_active_iter()
- if (active_iter):
- self.configuration.image = model.get(active_iter, 0)[0]
-
-# This function operates to pull events out from the event queue and then push
-# them into the RunningBuild (which then drives the RunningBuild which then
-# pushes through and updates the progress tree view.)
-#
-# TODO: Should be a method on the RunningBuild class
-def event_handle_timeout (eventHandler, build):
- # Consume as many messages as we can ...
- event = eventHandler.getEvent()
- while event:
- build.handle_event (event)
- event = eventHandler.getEvent()
- return True
-
-class MainWindow (gtk.Window):
-
- # Callback that gets fired when the user hits a button in the
- # BuildSetupDialog.
- def build_dialog_box_response_cb (self, dialog, response_id):
- conf = None
- if (response_id == BuildSetupDialog.RESPONSE_BUILD):
- dialog.update_configuration()
- print(dialog.configuration.machine, dialog.configuration.distro, \
- dialog.configuration.image)
- conf = dialog.configuration
-
- dialog.destroy()
-
- if conf:
- self.manager.do_build (conf)
-
- def build_button_clicked_cb (self, button):
- dialog = BuildSetupDialog ()
-
- # For some unknown reason Dialog.run causes nice little deadlocks ... :-(
- dialog.connect ("response", self.build_dialog_box_response_cb)
- dialog.show()
-
- def __init__ (self):
- gtk.Window.__init__ (self)
-
- # Pull in *just* the main vbox from the Glade XML data and then pack
- # that inside the window
- gxml = gtk.glade.XML (os.path.dirname(__file__) + "/crumbs/puccho.glade",
- root = "main_window_vbox")
- vbox = gxml.get_widget ("main_window_vbox")
- self.add (vbox)
-
- # Create the tree views for the build manager view and the progress view
- self.build_manager_view = BuildManagerTreeView()
- self.running_build_view = RunningBuildTreeView()
-
- # Grab the scrolled windows that we put the tree views into
- self.results_scrolledwindow = gxml.get_widget ("results_scrolledwindow")
- self.progress_scrolledwindow = gxml.get_widget ("progress_scrolledwindow")
-
- # Put the tree views inside ...
- self.results_scrolledwindow.add (self.build_manager_view)
- self.progress_scrolledwindow.add (self.running_build_view)
-
- # Hook up the build button...
- self.build_button = gxml.get_widget ("main_toolbutton_build")
- self.build_button.connect ("clicked", self.build_button_clicked_cb)
-
-# I'm not very happy about the current ownership of the RunningBuild. I have
-# my suspicions that this object should be held by the BuildManager since we
-# care about the signals in the manager
-
-def running_build_succeeded_cb (running_build, manager):
- # Notify the manager that a build has succeeded. This is necessary as part
- # of the 'hack' that we use for making the row in the model / view
- # representing the ongoing build change into a row representing the
- # completed build. Since we know only one build can be running a time then
- # we can handle this.
-
- # FIXME: Refactor all this so that the RunningBuild is owned by the
- # BuildManager. It can then hook onto the signals directly and drive
- # interesting things it cares about.
- manager.notify_build_succeeded ()
- print("build succeeded")
-
-def running_build_failed_cb (running_build, manager):
- # As above
- print("build failed")
- manager.notify_build_failed ()
-
-def main (server, eventHandler):
- # Initialise threading...
- gobject.threads_init()
- gtk.gdk.threads_init()
-
- main_window = MainWindow ()
- main_window.show_all ()
-
- # Set up the build manager stuff in general
- builds_dir = os.path.join (os.getcwd(), "results")
- manager = BuildManager (server, builds_dir)
- main_window.build_manager_view.set_model (manager.model)
-
- # Do the running build setup
- running_build = RunningBuild ()
- main_window.running_build_view.set_model (running_build.model)
- running_build.connect ("build-succeeded", running_build_succeeded_cb,
- manager)
- running_build.connect ("build-failed", running_build_failed_cb, manager)
-
- # We need to save the manager into the MainWindow so that the toolbar
- # button can use it.
- # FIXME: Refactor ?
- main_window.manager = manager
-
- # Use a timeout function for probing the event queue to find out if we
- # have a message waiting for us.
- gobject.timeout_add (200,
- event_handle_timeout,
- eventHandler,
- running_build)
-
- gtk.main()
diff --git a/bitbake/lib/bb/ui/uievent.py b/bitbake/lib/bb/ui/uievent.py
deleted file mode 100644
index 2fef4e4659..0000000000
--- a/bitbake/lib/bb/ui/uievent.py
+++ /dev/null
@@ -1,127 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-
-"""
-Use this class to fork off a thread to recieve event callbacks from the bitbake
-server and queue them for the UI to process. This process must be used to avoid
-client/server deadlocks.
-"""
-
-import socket, threading, pickle
-from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
-
-class BBUIEventQueue:
- def __init__(self, BBServer):
-
- self.eventQueue = []
- self.eventQueueLock = threading.Lock()
- self.eventQueueNotify = threading.Event()
-
- self.BBServer = BBServer
-
- self.t = threading.Thread()
- self.t.setDaemon(True)
- self.t.run = self.startCallbackHandler
- self.t.start()
-
- def getEvent(self):
-
- self.eventQueueLock.acquire()
-
- if len(self.eventQueue) == 0:
- self.eventQueueLock.release()
- return None
-
- item = self.eventQueue.pop(0)
-
- if len(self.eventQueue) == 0:
- self.eventQueueNotify.clear()
-
- self.eventQueueLock.release()
- return item
-
- def waitEvent(self, delay):
- self.eventQueueNotify.wait(delay)
- return self.getEvent()
-
- def queue_event(self, event):
- self.eventQueueLock.acquire()
- self.eventQueue.append(event)
- self.eventQueueNotify.set()
- self.eventQueueLock.release()
-
- def send_event(self, event):
- self.queue_event(pickle.loads(event))
-
- def startCallbackHandler(self):
-
- server = UIXMLRPCServer()
- self.host, self.port = server.socket.getsockname()
-
- server.register_function( self.system_quit, "event.quit" )
- server.register_function( self.send_event, "event.send" )
- server.socket.settimeout(1)
-
- self.EventHandle = self.BBServer.registerEventHandler(self.host, self.port)
-
- self.server = server
- while not server.quit:
- server.handle_request()
- server.server_close()
-
- def system_quit( self ):
- """
- Shut down the callback thread
- """
- try:
- self.BBServer.unregisterEventHandler(self.EventHandle)
- except:
- pass
- self.server.quit = True
-
-class UIXMLRPCServer (SimpleXMLRPCServer):
-
- def __init__( self, interface = ("localhost", 0) ):
- self.quit = False
- SimpleXMLRPCServer.__init__( self,
- interface,
- requestHandler=SimpleXMLRPCRequestHandler,
- logRequests=False, allow_none=True)
-
- def get_request(self):
- while not self.quit:
- try:
- sock, addr = self.socket.accept()
- sock.settimeout(1)
- return (sock, addr)
- except socket.timeout:
- pass
- return (None, None)
-
- def close_request(self, request):
- if request is None:
- return
- SimpleXMLRPCServer.close_request(self, request)
-
- def process_request(self, request, client_address):
- if request is None:
- return
- SimpleXMLRPCServer.process_request(self, request, client_address)
-
diff --git a/bitbake/lib/bb/ui/uihelper.py b/bitbake/lib/bb/ui/uihelper.py
deleted file mode 100644
index 617d60db82..0000000000
--- a/bitbake/lib/bb/ui/uihelper.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-#
-# Copyright (C) 2006 - 2007 Michael 'Mickey' Lauer
-# Copyright (C) 2006 - 2007 Richard Purdie
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import bb.build
-
-class BBUIHelper:
- def __init__(self):
- self.needUpdate = False
- self.running_tasks = {}
- self.failed_tasks = []
-
- def eventHandler(self, event):
- if isinstance(event, bb.build.TaskStarted):
- self.running_tasks[event.pid] = { 'title' : "%s %s" % (event._package, event._task) }
- self.needUpdate = True
- if isinstance(event, bb.build.TaskSucceeded):
- del self.running_tasks[event.pid]
- self.needUpdate = True
- if isinstance(event, bb.build.TaskFailed):
- del self.running_tasks[event.pid]
- self.failed_tasks.append( { 'title' : "%s %s" % (event._package, event._task)})
- self.needUpdate = True
-
- def getTasks(self):
- self.needUpdate = False
- return (self.running_tasks, self.failed_tasks)
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py
deleted file mode 100644
index b2f8bb6f89..0000000000
--- a/bitbake/lib/bb/utils.py
+++ /dev/null
@@ -1,845 +0,0 @@
-# ex:ts=4:sw=4:sts=4:et
-# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*-
-"""
-BitBake Utility Functions
-"""
-
-# Copyright (C) 2004 Michael Lauer
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License version 2 as
-# published by the Free Software Foundation.
-#
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-# GNU General Public License for more details.
-#
-# You should have received a copy of the GNU General Public License along
-# with this program; if not, write to the Free Software Foundation, Inc.,
-# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
-
-import re, fcntl, os, string, stat, shutil, time
-import sys
-import errno
-import logging
-import bb
-import bb.msg
-from commands import getstatusoutput
-from contextlib import contextmanager
-
-logger = logging.getLogger("BitBake.Util")
-
-# Version comparison
-separators = ".-"
-
-# Context used in better_exec, eval
-_context = {
- "os": os,
- "bb": bb,
- "time": time,
-}
-
-def explode_version(s):
- r = []
- alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$')
- numeric_regexp = re.compile('^(\d+)(.*)$')
- while (s != ''):
- if s[0] in string.digits:
- m = numeric_regexp.match(s)
- r.append(int(m.group(1)))
- s = m.group(2)
- continue
- if s[0] in string.letters:
- m = alpha_regexp.match(s)
- r.append(m.group(1))
- s = m.group(2)
- continue
- r.append(s[0])
- s = s[1:]
- return r
-
-def vercmp_part(a, b):
- va = explode_version(a)
- vb = explode_version(b)
- sa = False
- sb = False
- while True:
- if va == []:
- ca = None
- else:
- ca = va.pop(0)
- if vb == []:
- cb = None
- else:
- cb = vb.pop(0)
- if ca == None and cb == None:
- return 0
-
- if isinstance(ca, basestring):
- sa = ca in separators
- if isinstance(cb, basestring):
- sb = cb in separators
- if sa and not sb:
- return -1
- if not sa and sb:
- return 1
-
- if ca > cb:
- return 1
- if ca < cb:
- return -1
-
-def vercmp(ta, tb):
- (ea, va, ra) = ta
- (eb, vb, rb) = tb
-
- r = int(ea or 0) - int(eb or 0)
- if (r == 0):
- r = vercmp_part(va, vb)
- if (r == 0):
- r = vercmp_part(ra, rb)
- return r
-
-_package_weights_ = {"pre":-2, "p":0, "alpha":-4, "beta":-3, "rc":-1} # dicts are unordered
-_package_ends_ = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ] # so we need ordered list
-
-def relparse(myver):
- """Parses the last elements of a version number into a triplet, that can
- later be compared.
- """
-
- number = 0
- p1 = 0
- p2 = 0
- mynewver = myver.split('_')
- if len(mynewver) == 2:
- # an _package_weights_
- number = float(mynewver[0])
- match = 0
- for x in _package_ends_:
- elen = len(x)
- if mynewver[1][:elen] == x:
- match = 1
- p1 = _package_weights_[x]
- try:
- p2 = float(mynewver[1][elen:])
- except:
- p2 = 0
- break
- if not match:
- # normal number or number with letter at end
- divider = len(myver)-1
- if myver[divider:] not in "1234567890":
- # letter at end
- p1 = ord(myver[divider:])
- number = float(myver[0:divider])
- else:
- number = float(myver)
- else:
- # normal number or number with letter at end
- divider = len(myver)-1
- if myver[divider:] not in "1234567890":
- #letter at end
- p1 = ord(myver[divider:])
- number = float(myver[0:divider])
- else:
- number = float(myver)
- return [number, p1, p2]
-
-__vercmp_cache__ = {}
-
-def vercmp_string(val1, val2):
- """This takes two version strings and returns an integer to tell you whether
- the versions are the same, val1>val2 or val2>val1.
- """
-
- # quick short-circuit
- if val1 == val2:
- return 0
- valkey = val1 + " " + val2
-
- # cache lookup
- try:
- return __vercmp_cache__[valkey]
- try:
- return - __vercmp_cache__[val2 + " " + val1]
- except KeyError:
- pass
- except KeyError:
- pass
-
- # consider 1_p2 vc 1.1
- # after expansion will become (1_p2,0) vc (1,1)
- # then 1_p2 is compared with 1 before 0 is compared with 1
- # to solve the bug we need to convert it to (1,0_p2)
- # by splitting _prepart part and adding it back _after_expansion
-
- val1_prepart = val2_prepart = ''
- if val1.count('_'):
- val1, val1_prepart = val1.split('_', 1)
- if val2.count('_'):
- val2, val2_prepart = val2.split('_', 1)
-
- # replace '-' by '.'
- # FIXME: Is it needed? can val1/2 contain '-'?
-
- val1 = val1.split("-")
- if len(val1) == 2:
- val1[0] = val1[0] + "." + val1[1]
- val2 = val2.split("-")
- if len(val2) == 2:
- val2[0] = val2[0] + "." + val2[1]
-
- val1 = val1[0].split('.')
- val2 = val2[0].split('.')
-
- # add back decimal point so that .03 does not become "3" !
- for x in xrange(1, len(val1)):
- if val1[x][0] == '0' :
- val1[x] = '.' + val1[x]
- for x in xrange(1, len(val2)):
- if val2[x][0] == '0' :
- val2[x] = '.' + val2[x]
-
- # extend varion numbers
- if len(val2) < len(val1):
- val2.extend(["0"]*(len(val1)-len(val2)))
- elif len(val1) < len(val2):
- val1.extend(["0"]*(len(val2)-len(val1)))
-
- # add back _prepart tails
- if val1_prepart:
- val1[-1] += '_' + val1_prepart
- if val2_prepart:
- val2[-1] += '_' + val2_prepart
- # The above code will extend version numbers out so they
- # have the same number of digits.
- for x in xrange(0, len(val1)):
- cmp1 = relparse(val1[x])
- cmp2 = relparse(val2[x])
- for y in xrange(0, 3):
- myret = cmp1[y] - cmp2[y]
- if myret != 0:
- __vercmp_cache__[valkey] = myret
- return myret
- __vercmp_cache__[valkey] = 0
- return 0
-
-def explode_deps(s):
- """
- Take an RDEPENDS style string of format:
- "DEPEND1 (optional version) DEPEND2 (optional version) ..."
- and return a list of dependencies.
- Version information is ignored.
- """
- r = []
- l = s.split()
- flag = False
- for i in l:
- if i[0] == '(':
- flag = True
- #j = []
- if not flag:
- r.append(i)
- #else:
- # j.append(i)
- if flag and i.endswith(')'):
- flag = False
- # Ignore version
- #r[-1] += ' ' + ' '.join(j)
- return r
-
-def explode_dep_versions(s):
- """
- Take an RDEPENDS style string of format:
- "DEPEND1 (optional version) DEPEND2 (optional version) ..."
- and return a dictionary of dependencies and versions.
- """
- r = {}
- l = s.replace(",", "").split()
- lastdep = None
- lastver = ""
- inversion = False
- for i in l:
- if i[0] == '(':
- inversion = True
- lastver = i[1:] or ""
- #j = []
- elif inversion and i.endswith(')'):
- inversion = False
- lastver = lastver + " " + (i[:-1] or "")
- r[lastdep] = lastver
- elif not inversion:
- r[i] = None
- lastdep = i
- lastver = ""
- elif inversion:
- lastver = lastver + " " + i
-
- return r
-
-def join_deps(deps):
- """
- Take the result from explode_dep_versions and generate a dependency string
- """
- result = []
- for dep in deps:
- if deps[dep]:
- result.append(dep + " (" + deps[dep] + ")")
- else:
- result.append(dep)
- return ", ".join(result)
-
-def _print_trace(body, line):
- """
- Print the Environment of a Text Body
- """
- # print the environment of the method
- min_line = max(1, line-4)
- max_line = min(line + 4, len(body))
- for i in xrange(min_line, max_line + 1):
- if line == i:
- logger.error(' *** %.4d:%s', i, body[i-1])
- else:
- logger.error(' %.4d:%s', i, body[i-1])
-
-def better_compile(text, file, realfile, mode = "exec"):
- """
- A better compile method. This method
- will print the offending lines.
- """
- try:
- return compile(text, file, mode)
- except Exception as e:
- # split the text into lines again
- body = text.split('\n')
- logger.error("Error in compiling python function in %s", realfile)
- logger.error(str(e))
- if e.lineno:
- logger.error("The lines leading to this error were:")
- logger.error("\t%d:%s:'%s'", e.lineno, e.__class__.__name__, body[e.lineno-1])
- _print_trace(body, e.lineno)
- else:
- logger.error("The function causing this error was:")
- for line in body:
- logger.error(line)
-
- raise
-
-def better_exec(code, context, text, realfile = "<code>"):
- """
- Similiar to better_compile, better_exec will
- print the lines that are responsible for the
- error.
- """
- import bb.parse
- if not hasattr(code, "co_filename"):
- code = better_compile(code, realfile, realfile)
- try:
- exec(code, _context, context)
- except Exception:
- (t, value, tb) = sys.exc_info()
-
- if t in [bb.parse.SkipPackage, bb.build.FuncFailed]:
- raise
-
- import traceback
- exception = traceback.format_exception_only(t, value)
- logger.error('Error executing a python function in %s:\n%s',
- realfile, ''.join(exception))
-
- # Strip 'us' from the stack (better_exec call)
- tb = tb.tb_next
-
- textarray = text.split('\n')
- linefailed = traceback.tb_lineno(tb)
-
- tbextract = traceback.extract_tb(tb)
- tbformat = "\n".join(traceback.format_list(tbextract))
- logger.error("The stack trace of python calls that resulted in this exception/failure was:")
- for line in tbformat.split('\n'):
- logger.error(line)
-
- logger.error("The code that was being executed was:")
- _print_trace(textarray, linefailed)
- logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[0][0], tbextract[0][1], tbextract[0][2])
-
- # See if this is a function we constructed and has calls back into other functions in
- # "text". If so, try and improve the context of the error by diving down the trace
- level = 0
- nexttb = tb.tb_next
- while nexttb is not None:
- if tbextract[level][0] == tbextract[level+1][0] and tbextract[level+1][2] == tbextract[level][0]:
- _print_trace(textarray, tbextract[level+1][1])
- logger.error("(file: '%s', lineno: %s, function: %s)", tbextract[level+1][0], tbextract[level+1][1], tbextract[level+1][2])
- else:
- break
- nexttb = tb.tb_next
- level = level + 1
-
- raise
-
-def simple_exec(code, context):
- exec(code, _context, context)
-
-def better_eval(source, locals):
- return eval(source, _context, locals)
-
-@contextmanager
-def fileslocked(files):
- """Context manager for locking and unlocking file locks."""
- locks = []
- if files:
- for lockfile in files:
- locks.append(bb.utils.lockfile(lockfile))
-
- yield
-
- for lock in locks:
- bb.utils.unlockfile(lock)
-
-def lockfile(name, shared=False):
- """
- Use the file fn as a lock file, return when the lock has been acquired.
- Returns a variable to pass to unlockfile().
- """
- path = os.path.dirname(name)
- if not os.path.isdir(path):
- logger.error("Lockfile destination directory '%s' does not exist", path)
- sys.exit(1)
-
- if not os.access(path, os.W_OK):
- logger.error("Error, lockfile path is not writable!: %s" % path)
- sys.exit(1)
-
- op = fcntl.LOCK_EX
- if shared:
- op = fcntl.LOCK_SH
-
- while True:
- # If we leave the lockfiles lying around there is no problem
- # but we should clean up after ourselves. This gives potential
- # for races though. To work around this, when we acquire the lock
- # we check the file we locked was still the lock file on disk.
- # by comparing inode numbers. If they don't match or the lockfile
- # no longer exists, we start again.
-
- # This implementation is unfair since the last person to request the
- # lock is the most likely to win it.
-
- try:
- lf = open(name, 'a+')
- fileno = lf.fileno()
- fcntl.flock(fileno, op)
- statinfo = os.fstat(fileno)
- if os.path.exists(lf.name):
- statinfo2 = os.stat(lf.name)
- if statinfo.st_ino == statinfo2.st_ino:
- return lf
- lf.close()
- except Exception:
- continue
-
-def unlockfile(lf):
- """
- Unlock a file locked using lockfile()
- """
- try:
- # If we had a shared lock, we need to promote to exclusive before
- # removing the lockfile. Attempt this, ignore failures.
- fcntl.flock(lf.fileno(), fcntl.LOCK_EX|fcntl.LOCK_NB)
- os.unlink(lf.name)
- except (IOError, OSError):
- pass
- fcntl.flock(lf.fileno(), fcntl.LOCK_UN)
- lf.close()
-
-def md5_file(filename):
- """
- Return the hex string representation of the MD5 checksum of filename.
- """
- try:
- import hashlib
- m = hashlib.md5()
- except ImportError:
- import md5
- m = md5.new()
-
- for line in open(filename):
- m.update(line)
- return m.hexdigest()
-
-def sha256_file(filename):
- """
- Return the hex string representation of the 256-bit SHA checksum of
- filename. On Python 2.4 this will return None, so callers will need to
- handle that by either skipping SHA checks, or running a standalone sha256sum
- binary.
- """
- try:
- import hashlib
- except ImportError:
- return None
-
- s = hashlib.sha256()
- for line in open(filename):
- s.update(line)
- return s.hexdigest()
-
-def preserved_envvars_exported():
- """Variables which are taken from the environment and placed in and exported
- from the metadata"""
- return [
- 'BB_TASKHASH',
- 'HOME',
- 'LOGNAME',
- 'PATH',
- 'PWD',
- 'SHELL',
- 'TERM',
- 'USER',
- 'USERNAME',
- ]
-
-def preserved_envvars_exported_interactive():
- """Variables which are taken from the environment and placed in and exported
- from the metadata, for interactive tasks"""
- return [
- 'COLORTERM',
- 'DBUS_SESSION_BUS_ADDRESS',
- 'DESKTOP_SESSION',
- 'DESKTOP_STARTUP_ID',
- 'DISPLAY',
- 'GNOME_KEYRING_PID',
- 'GNOME_KEYRING_SOCKET',
- 'GPG_AGENT_INFO',
- 'GTK_RC_FILES',
- 'SESSION_MANAGER',
- 'KRB5CCNAME',
- 'SSH_AUTH_SOCK',
- 'XAUTHORITY',
- 'XDG_DATA_DIRS',
- 'XDG_SESSION_COOKIE',
- ]
-
-def preserved_envvars():
- """Variables which are taken from the environment and placed in the metadata"""
- v = [
- 'BBPATH',
- 'BB_PRESERVE_ENV',
- 'BB_ENV_WHITELIST',
- 'BB_ENV_EXTRAWHITE',
- 'LANG',
- '_',
- ]
- return v + preserved_envvars_exported() + preserved_envvars_exported_interactive()
-
-def filter_environment(good_vars):
- """
- Create a pristine environment for bitbake. This will remove variables that
- are not known and may influence the build in a negative way.
- """
-
- removed_vars = []
- for key in os.environ.keys():
- if key in good_vars:
- continue
-
- removed_vars.append(key)
- os.unsetenv(key)
- del os.environ[key]
-
- if len(removed_vars):
- logger.debug(1, "Removed the following variables from the environment: %s", ", ".join(removed_vars))
-
- return removed_vars
-
-def create_interactive_env(d):
- for k in preserved_envvars_exported_interactive():
- os.setenv(k, bb.data.getVar(k, d, True))
-
-def clean_environment():
- """
- Clean up any spurious environment variables. This will remove any
- variables the user hasn't chose to preserve.
- """
- if 'BB_PRESERVE_ENV' not in os.environ:
- if 'BB_ENV_WHITELIST' in os.environ:
- good_vars = os.environ['BB_ENV_WHITELIST'].split()
- else:
- good_vars = preserved_envvars()
- if 'BB_ENV_EXTRAWHITE' in os.environ:
- good_vars.extend(os.environ['BB_ENV_EXTRAWHITE'].split())
- filter_environment(good_vars)
-
-def empty_environment():
- """
- Remove all variables from the environment.
- """
- for s in os.environ.keys():
- os.unsetenv(s)
- del os.environ[s]
-
-def build_environment(d):
- """
- Build an environment from all exported variables.
- """
- import bb.data
- for var in bb.data.keys(d):
- export = bb.data.getVarFlag(var, "export", d)
- if export:
- os.environ[var] = bb.data.getVar(var, d, True) or ""
-
-def remove(path, recurse=False):
- """Equivalent to rm -f or rm -rf"""
- if not path:
- return
- import os, errno, shutil, glob
- for name in glob.glob(path):
- try:
- os.unlink(name)
- except OSError as exc:
- if recurse and exc.errno == errno.EISDIR:
- shutil.rmtree(name)
- elif exc.errno != errno.ENOENT:
- raise
-
-def prunedir(topdir):
- # Delete everything reachable from the directory named in 'topdir'.
- # CAUTION: This is dangerous!
- for root, dirs, files in os.walk(topdir, topdown = False):
- for name in files:
- os.remove(os.path.join(root, name))
- for name in dirs:
- if os.path.islink(os.path.join(root, name)):
- os.remove(os.path.join(root, name))
- else:
- os.rmdir(os.path.join(root, name))
- os.rmdir(topdir)
-
-#
-# Could also use return re.compile("(%s)" % "|".join(map(re.escape, suffixes))).sub(lambda mo: "", var)
-# but thats possibly insane and suffixes is probably going to be small
-#
-def prune_suffix(var, suffixes, d):
- # See if var ends with any of the suffixes listed and
- # remove it if found
- for suffix in suffixes:
- if var.endswith(suffix):
- return var.replace(suffix, "")
- return var
-
-def mkdirhier(directory):
- """Create a directory like 'mkdir -p', but does not complain if
- directory already exists like os.makedirs
- """
-
- try:
- os.makedirs(directory)
- except OSError as e:
- if e.errno != errno.EEXIST:
- raise e
-
-def movefile(src, dest, newmtime = None, sstat = None):
- """Moves a file from src to dest, preserving all permissions and
- attributes; mtime will be preserved even when moving across
- filesystems. Returns true on success and false on failure. Move is
- atomic.
- """
-
- #print "movefile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
- try:
- if not sstat:
- sstat = os.lstat(src)
- except Exception as e:
- print("movefile: Stating source file failed...", e)
- return None
-
- destexists = 1
- try:
- dstat = os.lstat(dest)
- except:
- dstat = os.lstat(os.path.dirname(dest))
- destexists = 0
-
- if destexists:
- if stat.S_ISLNK(dstat[stat.ST_MODE]):
- try:
- os.unlink(dest)
- destexists = 0
- except Exception as e:
- pass
-
- if stat.S_ISLNK(sstat[stat.ST_MODE]):
- try:
- target = os.readlink(src)
- if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
- os.unlink(dest)
- os.symlink(target, dest)
- #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
- os.unlink(src)
- return os.lstat(dest)
- except Exception as e:
- print("movefile: failed to properly create symlink:", dest, "->", target, e)
- return None
-
- renamefailed = 1
- if sstat[stat.ST_DEV] == dstat[stat.ST_DEV]:
- try:
- os.rename(src, dest)
- renamefailed = 0
- except Exception as e:
- if e[0] != errno.EXDEV:
- # Some random error.
- print("movefile: Failed to move", src, "to", dest, e)
- return None
- # Invalid cross-device-link 'bind' mounted or actually Cross-Device
-
- if renamefailed:
- didcopy = 0
- if stat.S_ISREG(sstat[stat.ST_MODE]):
- try: # For safety copy then move it over.
- shutil.copyfile(src, dest + "#new")
- os.rename(dest + "#new", dest)
- didcopy = 1
- except Exception as e:
- print('movefile: copy', src, '->', dest, 'failed.', e)
- return None
- else:
- #we don't yet handle special, so we need to fall back to /bin/mv
- a = getstatusoutput("/bin/mv -f " + "'" + src + "' '" + dest + "'")
- if a[0] != 0:
- print("movefile: Failed to move special file:" + src + "' to '" + dest + "'", a)
- return None # failure
- try:
- if didcopy:
- os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
- os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
- os.unlink(src)
- except Exception as e:
- print("movefile: Failed to chown/chmod/unlink", dest, e)
- return None
-
- if newmtime:
- os.utime(dest, (newmtime, newmtime))
- else:
- os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
- newmtime = sstat[stat.ST_MTIME]
- return newmtime
-
-def copyfile(src, dest, newmtime = None, sstat = None):
- """
- Copies a file from src to dest, preserving all permissions and
- attributes; mtime will be preserved even when moving across
- filesystems. Returns true on success and false on failure.
- """
- #print "copyfile(" + src + "," + dest + "," + str(newmtime) + "," + str(sstat) + ")"
- try:
- if not sstat:
- sstat = os.lstat(src)
- except Exception as e:
- print("copyfile: Stating source file failed...", e)
- return False
-
- destexists = 1
- try:
- dstat = os.lstat(dest)
- except:
- dstat = os.lstat(os.path.dirname(dest))
- destexists = 0
-
- if destexists:
- if stat.S_ISLNK(dstat[stat.ST_MODE]):
- try:
- os.unlink(dest)
- destexists = 0
- except Exception as e:
- pass
-
- if stat.S_ISLNK(sstat[stat.ST_MODE]):
- try:
- target = os.readlink(src)
- if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]):
- os.unlink(dest)
- os.symlink(target, dest)
- #os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID])
- return os.lstat(dest)
- except Exception as e:
- print("copyfile: failed to properly create symlink:", dest, "->", target, e)
- return False
-
- if stat.S_ISREG(sstat[stat.ST_MODE]):
- os.chmod(src, stat.S_IRUSR) # Make sure we can read it
- try: # For safety copy then move it over.
- shutil.copyfile(src, dest + "#new")
- os.rename(dest + "#new", dest)
- except Exception as e:
- print('copyfile: copy', src, '->', dest, 'failed.', e)
- return False
- finally:
- os.chmod(src, sstat[stat.ST_MODE])
- os.utime(src, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
-
- else:
- #we don't yet handle special, so we need to fall back to /bin/mv
- a = getstatusoutput("/bin/cp -f " + "'" + src + "' '" + dest + "'")
- if a[0] != 0:
- print("copyfile: Failed to copy special file:" + src + "' to '" + dest + "'", a)
- return False # failure
- try:
- os.lchown(dest, sstat[stat.ST_UID], sstat[stat.ST_GID])
- os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown
- except Exception as e:
- print("copyfile: Failed to chown/chmod/unlink", dest, e)
- return False
-
- if newmtime:
- os.utime(dest, (newmtime, newmtime))
- else:
- os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME]))
- newmtime = sstat[stat.ST_MTIME]
- return newmtime
-
-def which(path, item, direction = 0):
- """
- Locate a file in a PATH
- """
-
- paths = (path or "").split(':')
- if direction != 0:
- paths.reverse()
-
- for p in paths:
- next = os.path.join(p, item)
- if os.path.exists(next):
- return next
-
- return ""
-
-def init_logger(logger, verbose, debug, debug_domains):
- """
- Set verbosity and debug levels in the logger
- """
-
- if debug:
- bb.msg.set_debug_level(debug)
- elif verbose:
- bb.msg.set_verbose(True)
- else:
- bb.msg.set_debug_level(0)
-
- if debug_domains:
- bb.msg.set_debug_domains(debug_domains)
-
-def to_boolean(string, default=None):
- if not string:
- return default
-
- normalized = string.lower()
- if normalized in ("y", "yes", "1", "true"):
- return True
- elif normalized in ("n", "no", "0", "false"):
- return False
- else:
- raise ValueError("Invalid value for to_boolean: %s" % string)