From d91e57c44e923b6b65396515ff878199d43763a1 Mon Sep 17 00:00:00 2001 From: Chris Larson Date: Tue, 7 Dec 2004 20:55:41 +0000 Subject: Initial import. --- lib/bb/__init__.py | 1277 +++++++++++++++++++++++++++++++++++++++++++ lib/bb/build.py | 405 ++++++++++++++ lib/bb/data.py | 490 +++++++++++++++++ lib/bb/event.py | 204 +++++++ lib/bb/fetch.py | 632 +++++++++++++++++++++ lib/bb/make.py | 263 +++++++++ lib/bb/manifest.py | 144 +++++ lib/bb/parse/BBHandler.py | 376 +++++++++++++ lib/bb/parse/ConfHandler.py | 194 +++++++ lib/bb/parse/__init__.py | 76 +++ 10 files changed, 4061 insertions(+) create mode 100644 lib/bb/__init__.py create mode 100644 lib/bb/build.py create mode 100644 lib/bb/data.py create mode 100644 lib/bb/event.py create mode 100644 lib/bb/fetch.py create mode 100644 lib/bb/make.py create mode 100644 lib/bb/manifest.py create mode 100644 lib/bb/parse/BBHandler.py create mode 100644 lib/bb/parse/ConfHandler.py create mode 100644 lib/bb/parse/__init__.py (limited to 'lib') diff --git a/lib/bb/__init__.py b/lib/bb/__init__.py new file mode 100644 index 000000000..07a7f42b7 --- /dev/null +++ b/lib/bb/__init__.py @@ -0,0 +1,1277 @@ +#!/usr/bin/python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake Build System Python Library + +Copyright (C) 2003 Holger Schurig +Copyright (C) 2003, 2004 Chris Larson + +Based on Gentoo's portage.py. + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 59 Temple +Place, Suite 330, Boston, MA 02111-1307 USA. +""" + +__version__ = "1.1" + +__all__ = [ + + "debug", + "note", + "error", + "fatal", + + "mkdirhier", + "movefile", + + "tokenize", + "evaluate", + "flatten", + "relparse", + "ververify", + "isjustname", + "isspecific", + "pkgsplit", + "catpkgsplit", + "vercmp", + "pkgcmp", + "dep_parenreduce", + "dep_opconvert", + "digraph", + +# fetch + "decodeurl", + "encodeurl", + +# modules + "parse", + "data", + "event", + "build", + "fetch", + "make", + "manifest" + ] + +whitespace = '\t\n\x0b\x0c\r ' +lowercase = 'abcdefghijklmnopqrstuvwxyz' + +import sys, os, types, re + +# +# Check for the Python version. A lot of stuff needs Python 2.3 or later +# +if sys.version_info[:3] < (2, 3, 0): + print "BitBake needs Python 2.3 or later. Please upgrade." + sys.exit(-1) + +#projectdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))) +projectdir = os.getcwd() +env = {} + +class VarExpandError(Exception): + pass + +class MalformedUrl(Exception): + """Exception raised when encountering an invalid url""" + + +####################################################################### +####################################################################### +# +# SECTION: Debug +# +# PURPOSE: little functions to make yourself known +# +####################################################################### +####################################################################### + +debug_prepend = '' + + +def debug(lvl, *args): + if 'BBDEBUG' in env and (env['BBDEBUG'] >= str(lvl)): + print debug_prepend + 'DEBUG:', ''.join(args) + +def note(*args): + print debug_prepend + 'NOTE:', ''.join(args) + +def error(*args): + print debug_prepend + 'ERROR:', ''.join(args) + +def fatal(*args): + print debug_prepend + 'ERROR:', ''.join(args) + sys.exit(1) + + +####################################################################### +####################################################################### +# +# SECTION: File +# +# PURPOSE: Basic file and directory tree related functions +# +####################################################################### +####################################################################### + +def mkdirhier(dir): + """Create a directory like 'mkdir -p', but does not complain if + directory already exists like os.makedirs + """ + + debug(3, "mkdirhier(%s)" % dir) + try: + os.makedirs(dir) + debug(2, "created " + dir) + except OSError, e: + if e.errno != 17: raise e + + +####################################################################### + +import stat + +def movefile(src,dest,newmtime=None,sstat=None): + """Moves a file from src to dest, preserving all permissions and + attributes; mtime will be preserved even when moving across + filesystems. Returns true on success and false on failure. Move is + atomic. + """ + + #print "movefile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")" + try: + if not sstat: + sstat=os.lstat(src) + except Exception, e: + print "!!! Stating source file failed... movefile()" + print "!!!",e + return None + + destexists=1 + try: + dstat=os.lstat(dest) + except: + dstat=os.lstat(os.path.dirname(dest)) + destexists=0 + + if destexists: + if stat.S_ISLNK(dstat[stat.ST_MODE]): + try: + os.unlink(dest) + destexists=0 + except Exception, e: + pass + + if stat.S_ISLNK(sstat[stat.ST_MODE]): + try: + target=os.readlink(src) + if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]): + os.unlink(dest) + os.symlink(target,dest) +# os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) + os.unlink(src) + return os.lstat(dest) + except Exception, e: + print "!!! failed to properly create symlink:" + print "!!!",dest,"->",target + print "!!!",e + return None + + renamefailed=1 + if sstat[stat.ST_DEV]==dstat[stat.ST_DEV]: + try: + ret=os.rename(src,dest) + renamefailed=0 + except Exception, e: + import errno + if e[0]!=errno.EXDEV: + # Some random error. + print "!!! Failed to move",src,"to",dest + print "!!!",e + return None + # Invalid cross-device-link 'bind' mounted or actually Cross-Device + + if renamefailed: + didcopy=0 + if stat.S_ISREG(sstat[stat.ST_MODE]): + try: # For safety copy then move it over. + shutil.copyfile(src,dest+"#new") + os.rename(dest+"#new",dest) + didcopy=1 + except Exception, e: + print '!!! copy',src,'->',dest,'failed.' + print "!!!",e + return None + else: + #we don't yet handle special, so we need to fall back to /bin/mv + a=getstatusoutput("/bin/mv -f "+"'"+src+"' '"+dest+"'") + if a[0]!=0: + print "!!! Failed to move special file:" + print "!!! '"+src+"' to '"+dest+"'" + print "!!!",a + return None # failure + try: + if didcopy: + missingos.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) + os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown + os.unlink(src) + except Exception, e: + print "!!! Failed to chown/chmod/unlink in movefile()" + print "!!!",dest + print "!!!",e + return None + + if newmtime: + os.utime(dest,(newmtime,newmtime)) + else: + os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME])) + newmtime=sstat[stat.ST_MTIME] + return newmtime + + + +####################################################################### +####################################################################### +# +# SECTION: Download +# +# PURPOSE: Download via HTTP, FTP, CVS, BITKEEPER, handling of MD5-signatures +# and mirrors +# +####################################################################### +####################################################################### + +def decodeurl(url): + """Decodes an URL into the tokens (scheme, network location, path, + user, password, parameters). + + >>> decodeurl("http://www.google.com/index.html") + ('http', 'www.google.com', '/index.html', '', '', {}) + + CVS url with username, host and cvsroot. The cvs module to check out is in the + parameters: + + >>> decodeurl("cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg") + ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}) + + Dito, but this time the username has a password part. And we also request a special tag + to check out. + + >>> decodeurl("cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81") + ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}) + """ + + m = re.compile('(?P[^:]*)://((?P.+)@)?(?P[^;]+)(;(?P.*))?').match(url) + if not m: + raise MalformedUrl(url) + + type = m.group('type') + location = m.group('location') + if not location: + raise MalformedUrl(url) + user = m.group('user') + parm = m.group('parm') + m = re.compile('(?P[^/;]+)(?P/[^;]+)').match(location) + if m: + host = m.group('host') + path = m.group('path') + else: + host = "" + path = location + if user: + m = re.compile('(?P[^:]+)(:?(?P.*))').match(user) + if m: + user = m.group('user') + pswd = m.group('pswd') + else: + user = '' + pswd = '' + #note("decodeurl: %s decoded to:" % url) + #note("decodeurl: type = '%s'" % type) + #note("decodeurl: host = '%s'" % host) + #note("decodeurl: path = '%s'" % path) + #note("decodeurl: parm = '%s'" % parm) + #note("decodeurl: user = '%s'" % user) + #note("decodeurl: pswd = '%s'" % pswd) + p = {} + if parm: + for s in parm.split(';'): + s1,s2 = s.split('=') + p[s1] = s2 + + return (type, host, path, user, pswd, p) + +####################################################################### + +def encodeurl(decoded): + """Encodes a URL from tokens (scheme, network location, path, + user, password, parameters). + + >>> encodeurl(['http', 'www.google.com', '/index.html', '', '', {}]) + + "http://www.google.com/index.html" + + CVS with username, host and cvsroot. The cvs module to check out is in the + parameters: + + >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}]) + + "cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg" + + Dito, but this time the username has a password part. And we also request a special tag + to check out. + + >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}]) + + "cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81" + """ + + (type, host, path, user, pswd, p) = decoded + + if not type or not path: + fatal("invalid or missing parameters for url encoding") + url = '%s://' % type + if user: + url += "%s" % user + if pswd: + url += ":%s" % pswd + url += "@" + if host: + url += "%s" % host + url += "%s" % path + if p: + for parm in p.keys(): + url += ";%s=%s" % (parm, p[parm]) + + return url + +####################################################################### + +def which(path, item, direction = 0): + """Useful function for locating a file in a PATH""" + found = "" + for p in (path or "").split(':'): + if os.path.exists(os.path.join(p, item)): + found = os.path.join(p, item) + if direction == 0: + break + return found + +####################################################################### + + + + +####################################################################### +####################################################################### +# +# SECTION: Dependency +# +# PURPOSE: Compare build & run dependencies +# +####################################################################### +####################################################################### + +def tokenize(mystring): + """Breaks a string like 'foo? (bar) oni? (blah (blah))' into (possibly embedded) lists: + + >>> tokenize("x") + ['x'] + >>> tokenize("x y") + ['x', 'y'] + >>> tokenize("(x y)") + [['x', 'y']] + >>> tokenize("(x y) b c") + [['x', 'y'], 'b', 'c'] + >>> tokenize("foo? (bar) oni? (blah (blah))") + ['foo?', ['bar'], 'oni?', ['blah', ['blah']]] + >>> tokenize("sys-apps/linux-headers nls? (sys-devel/gettext)") + ['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']] + """ + + newtokens = [] + curlist = newtokens + prevlists = [] + level = 0 + accum = "" + for x in mystring: + if x=="(": + if accum: + curlist.append(accum) + accum="" + prevlists.append(curlist) + curlist=[] + level=level+1 + elif x==")": + if accum: + curlist.append(accum) + accum="" + if level==0: + print "!!! tokenizer: Unmatched left parenthesis in:\n'"+mystring+"'" + return None + newlist=curlist + curlist=prevlists.pop() + curlist.append(newlist) + level=level-1 + elif x in whitespace: + if accum: + curlist.append(accum) + accum="" + else: + accum=accum+x + if accum: + curlist.append(accum) + if (level!=0): + print "!!! tokenizer: Exiting with unterminated parenthesis in:\n'"+mystring+"'" + return None + return newtokens + + +####################################################################### + +def evaluate(tokens,mydefines,allon=0): + """Removes tokens based on whether conditional definitions exist or not. + Recognizes ! + + >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {}) + ['sys-apps/linux-headers'] + + Negate the flag: + + >>> evaluate(['sys-apps/linux-headers', '!nls?', ['sys-devel/gettext']], {}) + ['sys-apps/linux-headers', ['sys-devel/gettext']] + + Define 'nls': + + >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {"nls":1}) + ['sys-apps/linux-headers', ['sys-devel/gettext']] + + Turn allon on: + + >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {}, True) + ['sys-apps/linux-headers', ['sys-devel/gettext']] + """ + + if tokens == None: + return None + mytokens = tokens + [] # this copies the list + pos = 0 + while pos < len(mytokens): + if type(mytokens[pos]) == types.ListType: + evaluate(mytokens[pos], mydefines) + if not len(mytokens[pos]): + del mytokens[pos] + continue + elif mytokens[pos][-1] == "?": + cur = mytokens[pos][:-1] + del mytokens[pos] + if allon: + if cur[0] == "!": + del mytokens[pos] + else: + if cur[0] == "!": + if (cur[1:] in mydefines) and (pos < len(mytokens)): + del mytokens[pos] + continue + elif (cur not in mydefines) and (pos < len(mytokens)): + del mytokens[pos] + continue + pos = pos + 1 + return mytokens + + +####################################################################### + +def flatten(mytokens): + """Converts nested arrays into a flat arrays: + + >>> flatten([1,[2,3]]) + [1, 2, 3] + >>> flatten(['sys-apps/linux-headers', ['sys-devel/gettext']]) + ['sys-apps/linux-headers', 'sys-devel/gettext'] + """ + + newlist=[] + for x in mytokens: + if type(x)==types.ListType: + newlist.extend(flatten(x)) + else: + newlist.append(x) + return newlist + + +####################################################################### + +_package_weights_ = {"pre":-2,"p":0,"alpha":-4,"beta":-3,"rc":-1} # dicts are unordered +_package_ends_ = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ] # so we need ordered list + +def relparse(myver): + """Parses the last elements of a version number into a triplet, that can + later be compared: + + >>> relparse('1.2_pre3') + [1.2, -2, 3.0] + >>> relparse('1.2b') + [1.2, 98, 0] + >>> relparse('1.2') + [1.2, 0, 0] + """ + + number = 0 + p1 = 0 + p2 = 0 + mynewver = myver.split('_') + if len(mynewver)==2: + # an _package_weights_ + number = float(mynewver[0]) + match = 0 + for x in _package_ends_: + elen = len(x) + if mynewver[1][:elen] == x: + match = 1 + p1 = _package_weights_[x] + try: + p2 = float(mynewver[1][elen:]) + except: + p2 = 0 + break + if not match: + # normal number or number with letter at end + divider = len(myver)-1 + if myver[divider:] not in "1234567890": + # letter at end + p1 = ord(myver[divider:]) + number = float(myver[0:divider]) + else: + number = float(myver) + else: + # normal number or number with letter at end + divider = len(myver)-1 + if myver[divider:] not in "1234567890": + #letter at end + p1 = ord(myver[divider:]) + number = float(myver[0:divider]) + else: + number = float(myver) + return [number,p1,p2] + + +####################################################################### + +__ververify_cache__ = {} + +def ververify(myorigval,silent=1): + """Returns 1 if given a valid version string, els 0. Valid versions are in the format + + ....[a-z,_{_package_weights_}[vy]] + + >>> ververify('2.4.20') + 1 + >>> ververify('2.4..20') # two dots + 0 + >>> ververify('2.x.20') # 'x' is not numeric + 0 + >>> ververify('2.4.20a') + 1 + >>> ververify('2.4.20cvs') # only one trailing letter + 0 + >>> ververify('1a') + 1 + >>> ververify('test_a') # no version at all + 0 + >>> ververify('2.4.20_beta1') + 1 + >>> ververify('2.4.20_beta') + 1 + >>> ververify('2.4.20_wrongext') # _wrongext is no valid trailer + 0 + """ + + # Lookup the cache first + try: + return __ververify_cache__[myorigval] + except KeyError: + pass + + if len(myorigval) == 0: + if not silent: + error("package version is empty") + __ververify_cache__[myorigval] = 0 + return 0 + myval = myorigval.split('.') + if len(myval)==0: + if not silent: + error("package name has empty version string") + __ververify_cache__[myorigval] = 0 + return 0 + # all but the last version must be a numeric + for x in myval[:-1]: + if not len(x): + if not silent: + error("package version has two points in a row") + __ververify_cache__[myorigval] = 0 + return 0 + try: + foo = int(x) + except: + if not silent: + error("package version contains non-numeric '"+x+"'") + __ververify_cache__[myorigval] = 0 + return 0 + if not len(myval[-1]): + if not silent: + error("package version has trailing dot") + __ververify_cache__[myorigval] = 0 + return 0 + try: + foo = int(myval[-1]) + __ververify_cache__[myorigval] = 1 + return 1 + except: + pass + + # ok, our last component is not a plain number or blank, let's continue + if myval[-1][-1] in lowercase: + try: + foo = int(myval[-1][:-1]) + return 1 + __ververify_cache__[myorigval] = 1 + # 1a, 2.0b, etc. + except: + pass + # ok, maybe we have a 1_alpha or 1_beta2; let's see + ep=string.split(myval[-1],"_") + if len(ep)!= 2: + if not silent: + error("package version has more than one letter at then end") + __ververify_cache__[myorigval] = 0 + return 0 + try: + foo = string.atoi(ep[0]) + except: + # this needs to be numeric, i.e. the "1" in "1_alpha" + if not silent: + error("package version must have numeric part before the '_'") + __ververify_cache__[myorigval] = 0 + return 0 + + for mye in _package_ends_: + if ep[1][0:len(mye)] == mye: + if len(mye) == len(ep[1]): + # no trailing numeric is ok + __ververify_cache__[myorigval] = 1 + return 1 + else: + try: + foo = string.atoi(ep[1][len(mye):]) + __ververify_cache__[myorigval] = 1 + return 1 + except: + # if no _package_weights_ work, *then* we return 0 + pass + if not silent: + error("package version extension after '_' is invalid") + __ververify_cache__[myorigval] = 0 + return 0 + + +def isjustname(mypkg): + myparts = string.split(mypkg,'-') + for x in myparts: + if ververify(x): + return 0 + return 1 + + +_isspecific_cache_={} + +def isspecific(mypkg): + "now supports packages with no category" + try: + return __isspecific_cache__[mypkg] + except: + pass + + mysplit = string.split(mypkg,"/") + if not isjustname(mysplit[-1]): + __isspecific_cache__[mypkg] = 1 + return 1 + __isspecific_cache__[mypkg] = 0 + return 0 + + +####################################################################### + +__pkgsplit_cache__={} + +def pkgsplit(mypkg, silent=1): + + """This function can be used as a package verification function. If + it is a valid name, pkgsplit will return a list containing: + [pkgname, pkgversion(norev), pkgrev ]. + + >>> pkgsplit('') + >>> pkgsplit('x') + >>> pkgsplit('x-') + >>> pkgsplit('-1') + >>> pkgsplit('glibc-1.2-8.9-r7') + >>> pkgsplit('glibc-2.2.5-r7') + ['glibc', '2.2.5', 'r7'] + >>> pkgsplit('foo-1.2-1') + >>> pkgsplit('Mesa-3.0') + ['Mesa', '3.0', 'r0'] + """ + + try: + return __pkgsplit_cache__[mypkg] + except KeyError: + pass + + myparts = string.split(mypkg,'-') + if len(myparts) < 2: + if not silent: + error("package name without name or version part") + __pkgsplit_cache__[mypkg] = None + return None + for x in myparts: + if len(x) == 0: + if not silent: + error("package name with empty name or version part") + __pkgsplit_cache__[mypkg] = None + return None + # verify rev + revok = 0 + myrev = myparts[-1] + ververify(myrev, 0) + if len(myrev) and myrev[0] == "r": + try: + string.atoi(myrev[1:]) + revok = 1 + except: + pass + if revok: + if ververify(myparts[-2]): + if len(myparts) == 2: + __pkgsplit_cache__[mypkg] = None + return None + else: + for x in myparts[:-2]: + if ververify(x): + __pkgsplit_cache__[mypkg]=None + return None + # names can't have versiony looking parts + myval=[string.join(myparts[:-2],"-"),myparts[-2],myparts[-1]] + __pkgsplit_cache__[mypkg]=myval + return myval + else: + __pkgsplit_cache__[mypkg] = None + return None + + elif ververify(myparts[-1],silent): + if len(myparts)==1: + if not silent: + print "!!! Name error in",mypkg+": missing name part." + __pkgsplit_cache__[mypkg]=None + return None + else: + for x in myparts[:-1]: + if ververify(x): + if not silent: error("package name has multiple version parts") + __pkgsplit_cache__[mypkg] = None + return None + myval = [string.join(myparts[:-1],"-"), myparts[-1],"r0"] + __pkgsplit_cache__[mypkg] = myval + return myval + else: + __pkgsplit_cache__[mypkg] = None + return None + + +####################################################################### + +__catpkgsplit_cache__ = {} + +def catpkgsplit(mydata,silent=1): + """returns [cat, pkgname, version, rev ] + + >>> catpkgsplit('sys-libs/glibc-1.2-r7') + ['sys-libs', 'glibc', '1.2', 'r7'] + >>> catpkgsplit('glibc-1.2-r7') + ['null', 'glibc', '1.2', 'r7'] + """ + + try: + return __catpkgsplit_cache__[mydata] + except KeyError: + pass + + cat = os.path.basename(os.path.dirname(mydata)) + mydata = os.path.join(cat, os.path.basename(mydata)) +# if mydata[:len(projectdir)] == projectdir: +# mydata = mydata[len(projectdir)+1:] + if mydata[-3:] == '.bb': + mydata = mydata[:-3] + + mysplit = mydata.split("/") + p_split = None + splitlen = len(mysplit) + if splitlen == 1: + retval = [None] + p_split = pkgsplit(mydata,silent) + else: + retval = [mysplit[splitlen - 2]] + p_split = pkgsplit(mysplit[splitlen - 1],silent) + if not p_split: + __catpkgsplit_cache__[mydata] = None + return None + retval.extend(p_split) + __catpkgsplit_cache__[mydata] = retval + return retval + + +####################################################################### + +__vercmp_cache__ = {} + +def vercmp(val1,val2): + """This takes two version strings and returns an integer to tell you whether + the versions are the same, val1>val2 or val2>val1. + + >>> vercmp('1', '2') + -1.0 + >>> vercmp('2', '1') + 1.0 + >>> vercmp('1', '1.0') + 0 + >>> vercmp('1', '1.1') + -1.0 + >>> vercmp('1.1', '1_p2') + 1.0 + """ + + # quick short-circuit + if val1 == val2: + return 0 + valkey = val1+" "+val2 + + # cache lookup + try: + return __vercmp_cache__[valkey] + try: + return - __vercmp_cache__[val2+" "+val1] + except KeyError: + pass + except KeyError: + pass + + # consider 1_p2 vc 1.1 + # after expansion will become (1_p2,0) vc (1,1) + # then 1_p2 is compared with 1 before 0 is compared with 1 + # to solve the bug we need to convert it to (1,0_p2) + # by splitting _prepart part and adding it back _after_expansion + + val1_prepart = val2_prepart = '' + if val1.count('_'): + val1, val1_prepart = val1.split('_', 1) + if val2.count('_'): + val2, val2_prepart = val2.split('_', 1) + + # replace '-' by '.' + # FIXME: Is it needed? can val1/2 contain '-'? + + val1 = string.split(val1,'-') + if len(val1) == 2: + val1[0] = val1[0] +"."+ val1[1] + val2 = string.split(val2,'-') + if len(val2) == 2: + val2[0] = val2[0] +"."+ val2[1] + + val1 = string.split(val1[0],'.') + val2 = string.split(val2[0],'.') + + # add back decimal point so that .03 does not become "3" ! + for x in range(1,len(val1)): + if val1[x][0] == '0' : + val1[x] = '.' + val1[x] + for x in range(1,len(val2)): + if val2[x][0] == '0' : + val2[x] = '.' + val2[x] + + # extend varion numbers + if len(val2) < len(val1): + val2.extend(["0"]*(len(val1)-len(val2))) + elif len(val1) < len(val2): + val1.extend(["0"]*(len(val2)-len(val1))) + + # add back _prepart tails + if val1_prepart: + val1[-1] += '_' + val1_prepart + if val2_prepart: + val2[-1] += '_' + val2_prepart + # The above code will extend version numbers out so they + # have the same number of digits. + for x in range(0,len(val1)): + cmp1 = relparse(val1[x]) + cmp2 = relparse(val2[x]) + for y in range(0,3): + myret = cmp1[y] - cmp2[y] + if myret != 0: + __vercmp_cache__[valkey] = myret + return myret + __vercmp_cache__[valkey] = 0 + return 0 + + +####################################################################### + +def pkgcmp(pkg1,pkg2): + """ Compares two packages, which should have been split via + pkgsplit(). if the return value val is less than zero, then pkg2 is + newer than pkg1, zero if equal and positive if older. + + >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r7']) + 0 + >>> pkgcmp(['glibc', '2.2.5', 'r4'], ['glibc', '2.2.5', 'r7']) + -1 + >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r2']) + 1 + """ + + mycmp = vercmp(pkg1[1],pkg2[1]) + if mycmp > 0: + return 1 + if mycmp < 0: + return -1 + r1=string.atoi(pkg1[2][1:]) + r2=string.atoi(pkg2[2][1:]) + if r1 > r2: + return 1 + if r2 > r1: + return -1 + return 0 + + +####################################################################### + +def dep_parenreduce(mysplit, mypos=0): + """Accepts a list of strings, and converts '(' and ')' surrounded items to sub-lists: + + >>> dep_parenreduce(['']) + [''] + >>> dep_parenreduce(['1', '2', '3']) + ['1', '2', '3'] + >>> dep_parenreduce(['1', '(', '2', '3', ')', '4']) + ['1', ['2', '3'], '4'] + """ + + while mypos < len(mysplit): + if mysplit[mypos] == "(": + firstpos = mypos + mypos = mypos + 1 + while mypos < len(mysplit): + if mysplit[mypos] == ")": + mysplit[firstpos:mypos+1] = [mysplit[firstpos+1:mypos]] + mypos = firstpos + break + elif mysplit[mypos] == "(": + # recurse + mysplit = dep_parenreduce(mysplit,mypos) + mypos = mypos + 1 + mypos = mypos + 1 + return mysplit + + +def dep_opconvert(mysplit, myuse): + "Does dependency operator conversion" + + mypos = 0 + newsplit = [] + while mypos < len(mysplit): + if type(mysplit[mypos]) == types.ListType: + newsplit.append(dep_opconvert(mysplit[mypos],myuse)) + mypos += 1 + elif mysplit[mypos] == ")": + # mismatched paren, error + return None + elif mysplit[mypos]=="||": + if ((mypos+1)>=len(mysplit)) or (type(mysplit[mypos+1])!=types.ListType): + # || must be followed by paren'd list + return None + try: + mynew = dep_opconvert(mysplit[mypos+1],myuse) + except Exception, e: + error("unable to satisfy OR dependancy: " + string.join(mysplit," || ")) + raise e + mynew[0:0] = ["||"] + newsplit.append(mynew) + mypos += 2 + elif mysplit[mypos][-1] == "?": + # use clause, i.e "gnome? ( foo bar )" + # this is a quick and dirty hack so that repoman can enable all USE vars: + if (len(myuse) == 1) and (myuse[0] == "*"): + # enable it even if it's ! (for repoman) but kill it if it's + # an arch variable that isn't for this arch. XXX Sparc64? + if (mysplit[mypos][:-1] not in settings.usemask) or \ + (mysplit[mypos][:-1]==settings["ARCH"]): + enabled=1 + else: + enabled=0 + else: + if mysplit[mypos][0] == "!": + myusevar = mysplit[mypos][1:-1] + enabled = not myusevar in myuse + #if myusevar in myuse: + # enabled = 0 + #else: + # enabled = 1 + else: + myusevar=mysplit[mypos][:-1] + enabled = myusevar in myuse + #if myusevar in myuse: + # enabled=1 + #else: + # enabled=0 + if (mypos +2 < len(mysplit)) and (mysplit[mypos+2] == ":"): + # colon mode + if enabled: + # choose the first option + if type(mysplit[mypos+1]) == types.ListType: + newsplit.append(dep_opconvert(mysplit[mypos+1],myuse)) + else: + newsplit.append(mysplit[mypos+1]) + else: + # choose the alternate option + if type(mysplit[mypos+1]) == types.ListType: + newsplit.append(dep_opconvert(mysplit[mypos+3],myuse)) + else: + newsplit.append(mysplit[mypos+3]) + mypos += 4 + else: + # normal use mode + if enabled: + if type(mysplit[mypos+1]) == types.ListType: + newsplit.append(dep_opconvert(mysplit[mypos+1],myuse)) + else: + newsplit.append(mysplit[mypos+1]) + # otherwise, continue + mypos += 2 + else: + # normal item + newsplit.append(mysplit[mypos]) + mypos += 1 + return newsplit + +class digraph: + """beautiful directed graph object""" + + def __init__(self): + self.dict={} + #okeys = keys, in order they were added (to optimize firstzero() ordering) + self.okeys=[] + self.__callback_cache=[] + + def __str__(self): + str = "" + for key in self.okeys: + str += "%s:\t%s\n" % (key, self.dict[key][1]) + return str + + def addnode(self,mykey,myparent): + if not mykey in self.dict: + self.okeys.append(mykey) + if myparent==None: + self.dict[mykey]=[0,[]] + else: + self.dict[mykey]=[0,[myparent]] + self.dict[myparent][0]=self.dict[myparent][0]+1 + return + if myparent and (not myparent in self.dict[mykey][1]): + self.dict[mykey][1].append(myparent) + self.dict[myparent][0]=self.dict[myparent][0]+1 + + def delnode(self,mykey, ref = 1): + """Delete a node + + If ref is 1, remove references to this node from other nodes. + If ref is 2, remove nodes that reference this node.""" + if not mykey in self.dict: + return + for x in self.dict[mykey][1]: + self.dict[x][0]=self.dict[x][0]-1 + del self.dict[mykey] + while 1: + try: + self.okeys.remove(mykey) + except ValueError: + break + if ref: + __kill = [] + for k in self.okeys: + if mykey in self.dict[k][1]: + if ref == 1 or ref == 2: + self.dict[k][1].remove(mykey) + if ref == 2: + __kill.append(k) + for l in __kill: + self.delnode(l, ref) + + def allnodes(self): + "returns all nodes in the dictionary" + return self.dict.keys() + + def firstzero(self): + "returns first node with zero references, or NULL if no such node exists" + for x in self.okeys: + if self.dict[x][0]==0: + return x + return None + + def firstnonzero(self): + "returns first node with nonzero references, or NULL if no such node exists" + for x in self.okeys: + if self.dict[x][0]!=0: + return x + return None + + + def allzeros(self): + "returns all nodes with zero references, or NULL if no such node exists" + zerolist = [] + for x in self.dict.keys(): + if self.dict[x][0]==0: + zerolist.append(x) + return zerolist + + def hasallzeros(self): + "returns 0/1, Are all nodes zeros? 1 : 0" + zerolist = [] + for x in self.dict.keys(): + if self.dict[x][0]!=0: + return 0 + return 1 + + def empty(self): + if len(self.dict)==0: + return 1 + return 0 + + def hasnode(self,mynode): + return mynode in self.dict + + def getparents(self, item): + if not self.hasnode(item): + return [] + return self.dict[item][1] + + def getchildren(self, item): + if not self.hasnode(item): + return [] + children = [i for i in self.okeys if item in self.getparents(i)] + return children + + def walkdown(self, item, callback, debug = None, usecache = False): + if not self.hasnode(item): + return 0 + + if usecache: + if self.__callback_cache.count(item): + if debug: + print "hit cache for item: %s" % item + return 1 + + parents = self.getparents(item) + children = self.getchildren(item) + for p in parents: + if p in children: +# print "%s is both parent and child of %s" % (p, item) + if usecache: + self.__callback_cache.append(p) + ret = callback(self, p) + if ret == 0: + return 0 + continue + if item == p: + print "eek, i'm my own parent!" + return 0 + if debug: + print "item: %s, p: %s" % (item, p) + ret = self.walkdown(p, callback, debug, usecache) + if ret == 0: + return 0 + if usecache: + self.__callback_cache.append(item) + return callback(self, item) + + def walkup(self, item, callback): + if not self.hasnode(item): + return 0 + + parents = self.getparents(item) + children = self.getchildren(item) + for c in children: + if c in parents: + ret = callback(self, item) + if ret == 0: + return 0 + continue + if item == c: + print "eek, i'm my own child!" + return 0 + ret = self.walkup(c, callback) + if ret == 0: + return 0 + return callback(self, item) + + def copy(self): + mygraph=digraph() + for x in self.dict.keys(): + mygraph.dict[x]=self.dict[x][:] + mygraph.okeys=self.okeys[:] + return mygraph + +####################################################################### +####################################################################### +# +# SECTION: Config +# +# PURPOSE: Reading and handling of system/target-specific/local configuration +# reading of package configuration +# +####################################################################### +####################################################################### + +def reader(cfgfile, feeder): + """Generic configuration file reader that opens a file, reads the lines, + handles continuation lines, comments, empty lines and feed all read lines + into the function feeder(lineno, line). + """ + + f = open(cfgfile,'r') + lineno = 0 + while 1: + lineno = lineno + 1 + s = f.readline() + if not s: break + w = s.strip() + if not w: continue # skip empty lines + s = s.rstrip() + if s[0] == '#': continue # skip comments + while s[-1] == '\\': + s2 = f.readline()[:-1].strip() + s = s[:-1] + s2 + feeder(lineno, s) + +if __name__ == "__main__": + import doctest, bb + doctest.testmod(bb) diff --git a/lib/bb/build.py b/lib/bb/build.py new file mode 100644 index 000000000..fc232bf79 --- /dev/null +++ b/lib/bb/build.py @@ -0,0 +1,405 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Build' implementation + +Core code for function execution and task handling in the +BitBake build tools. + +Copyright (C) 2003, 2004 Chris Larson + +Based on Gentoo's portage.py. + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with + +Based on functions from the base bb module, Copyright 2003 Holger Schurig +""" + +from bb import debug, data, fetch, fatal, error, note, event, mkdirhier +import bb, os + +# data holds flags and function name for a given task +_task_data = data.init() + +# graph represents task interdependencies +_task_graph = bb.digraph() + +# stack represents execution order, excepting dependencies +_task_stack = [] + +# events +class FuncFailed(Exception): + """Executed function failed""" + +class EventException(Exception): + """Exception which is associated with an Event.""" + + def __init__(self, msg, event): + self.event = event + + def getEvent(self): + return self._event + + def setEvent(self, event): + self._event = event + + event = property(getEvent, setEvent, None, "event property") + +class TaskBase(event.Event): + """Base class for task events""" + + def __init__(self, t, d = {}): + self.task = t + self.data = d + + def getTask(self): + return self._task + + def setTask(self, task): + self._task = task + + task = property(getTask, setTask, None, "task property") + + def getData(self): + return self._data + + def setData(self, data): + self._data = data + + data = property(getData, setData, None, "data property") + +class TaskStarted(TaskBase): + """Task execution started""" + +class TaskSucceeded(TaskBase): + """Task execution completed""" + +class TaskFailed(TaskBase): + """Task execution failed""" + +class InvalidTask(TaskBase): + """Invalid Task""" + +# functions + +def init(data): + global _task_data, _task_graph, _task_stack + _task_data = data.init() + _task_graph = bb.digraph() + _task_stack = [] + + +def exec_func(func, d, dirs = None): + """Execute an BB 'function'""" + + body = data.getVar(func, d) + if not body: + return + + if not dirs: + dirs = (data.getVarFlag(func, 'dirs', d) or "").split() + for adir in dirs: + adir = data.expand(adir, d) + mkdirhier(adir) + + if len(dirs) > 0: + adir = dirs[-1] + else: + adir = data.getVar('B', d, 1) + + adir = data.expand(adir, d) + + try: + prevdir = os.getcwd() + except OSError: + prevdir = data.expand('${TOPDIR}', d) + if adir and os.access(adir, os.F_OK): + os.chdir(adir) + + if data.getVarFlag(func, "python", d): + exec_func_python(func, d) + else: + exec_func_shell(func, d) + os.chdir(prevdir) + +def exec_func_python(func, d): + """Execute a python BB 'function'""" + import re, os + + tmp = "def " + func + "():\n%s" % data.getVar(func, d) + comp = compile(tmp + '\n' + func + '()', bb.data.getVar('FILE', d, 1) + ':' + func, "exec") + prevdir = os.getcwd() + g = {} # globals + g['bb'] = bb + g['os'] = os + g['d'] = d + exec comp in g + if os.path.exists(prevdir): + os.chdir(prevdir) + +def exec_func_shell(func, d): + """Execute a shell BB 'function' Returns true if execution was successful. + + For this, it creates a bash shell script in the tmp dectory, writes the local + data into it and finally executes. The output of the shell will end in a log file and stdout. + + Note on directory behavior. The 'dirs' varflag should contain a list + of the directories you need created prior to execution. The last + item in the list is where we will chdir/cd to. + """ + import sys + + deps = data.getVarFlag(func, 'deps', d) + check = data.getVarFlag(func, 'check', d) + if check in globals(): + if globals()[check](func, deps): + return + + global logfile + t = data.getVar('T', d, 1) + if not t: + return 0 + mkdirhier(t) + logfile = "%s/log.%s.%s" % (t, func, str(os.getpid())) + runfile = "%s/run.%s.%s" % (t, func, str(os.getpid())) + + f = open(runfile, "w") + f.write("#!/bin/sh -e\n") + if data.getVar("BBDEBUG", d): f.write("set -x\n") + data.emit_env(f, d) + + f.write("cd %s\n" % os.getcwd()) + if func: f.write("%s\n" % func) + f.close() + os.chmod(runfile, 0775) + if not func: + error("Function not specified") + raise FuncFailed() + + # open logs + si = file('/dev/null', 'r') + try: + if data.getVar("BBDEBUG", d): + so = os.popen("tee \"%s\"" % logfile, "w") + else: + so = file(logfile, 'w') + except OSError, e: + bb.error("opening log file: %s" % e) + pass + + se = so + + # dup the existing fds so we dont lose them + osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()] + oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()] + ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()] + + # replace those fds with our own + os.dup2(si.fileno(), osi[1]) + os.dup2(so.fileno(), oso[1]) + os.dup2(se.fileno(), ose[1]) + + # execute function + prevdir = os.getcwd() + if data.getVarFlag(func, "fakeroot", d): + maybe_fakeroot = "PATH=\"%s\" fakeroot " % bb.data.getVar("PATH", d, 1) + else: + maybe_fakeroot = '' + ret = os.system('%ssh -e %s' % (maybe_fakeroot, runfile)) + os.chdir(prevdir) + + # restore the backups + os.dup2(osi[0], osi[1]) + os.dup2(oso[0], oso[1]) + os.dup2(ose[0], ose[1]) + + # close our logs + si.close() + so.close() + se.close() + + # close the backup fds + os.close(osi[0]) + os.close(oso[0]) + os.close(ose[0]) + + if ret==0: + if not data.getVar("BBDEBUG", d): + os.remove(runfile) +# os.remove(logfile) + return + else: + error("function %s failed" % func) + if data.getVar("BBINCLUDELOGS", d): + error("log data follows (%s)" % logfile) + f = open(logfile, "r") + while True: + l = f.readline() + if l == '': + break + l = l.rstrip() + print '| %s' % l + f.close() + else: + error("see log in %s" % logfile) + raise FuncFailed() + + +_task_cache = [] + +def exec_task(task, d): + """Execute an BB 'task' + + The primary difference between executing a task versus executing + a function is that a task exists in the task digraph, and therefore + has dependencies amongst other tasks.""" + + # check if the task is in the graph.. + task_graph = data.getVar('_task_graph', d) + if not task_graph: + task_graph = bb.digraph() + data.setVar('_task_graph', task_graph, d) + task_cache = data.getVar('_task_cache', d) + if not task_cache: + task_cache = [] + data.setVar('_task_cache', task_cache, d) + if not task_graph.hasnode(task): + raise EventException("", InvalidTask(task, d)) + + # check whether this task needs executing.. + if not data.getVarFlag(task, 'force', d): + if stamp_is_current(task, d): + return 1 + + # follow digraph path up, then execute our way back down + def execute(graph, item): + if data.getVarFlag(item, 'task', d): + if item in task_cache: + return 1 + + if task != item: + # deeper than toplevel, exec w/ deps + exec_task(item, d) + return 1 + + try: + debug(1, "Executing task %s" % item) + event.fire(TaskStarted(item, d)) + exec_func(item, d) + event.fire(TaskSucceeded(item, d)) + task_cache.append(item) + except FuncFailed, reason: + note( "Task failed: %s" % reason ) + failedevent = TaskFailed(item, d) + event.fire(failedevent) + raise EventException(None, failedevent) + + # execute + task_graph.walkdown(task, execute) + + # make stamp, or cause event and raise exception + if not data.getVarFlag(task, 'nostamp', d): + mkstamp(task, d) + + +def stamp_is_current(task, d, checkdeps = 1): + """Check status of a given task's stamp. returns 0 if it is not current and needs updating.""" + task_graph = data.getVar('_task_graph', d) + if not task_graph: + task_graph = bb.digraph() + data.setVar('_task_graph', task_graph, d) + stamp = data.getVar('STAMP', d) + if not stamp: + return 0 + stampfile = "%s.%s" % (data.expand(stamp, d), task) + if not os.access(stampfile, os.F_OK): + return 0 + + if checkdeps == 0: + return 1 + + import stat + tasktime = os.stat(stampfile)[stat.ST_MTIME] + + _deps = [] + def checkStamp(graph, task): + # check for existance + if data.getVarFlag(task, 'nostamp', d): + return 1 + + if not stamp_is_current(task, d, 0): + return 0 + + depfile = "%s.%s" % (data.expand(stamp, d), task) + deptime = os.stat(depfile)[stat.ST_MTIME] + if deptime > tasktime: + return 0 + return 1 + + return task_graph.walkdown(task, checkStamp) + + +def md5_is_current(task): + """Check if a md5 file for a given task is current""" + + +def mkstamp(task, d): + """Creates/updates a stamp for a given task""" + stamp = data.getVar('STAMP', d) + if not stamp: + return + stamp = "%s.%s" % (data.expand(stamp, d), task) + mkdirhier(os.path.dirname(stamp)) + open(stamp, "w+") + + +def add_task(task, deps, d): + task_graph = data.getVar('_task_graph', d) + if not task_graph: + task_graph = bb.digraph() + data.setVar('_task_graph', task_graph, d) + data.setVarFlag(task, 'task', 1, d) + task_graph.addnode(task, None) + for dep in deps: + if not task_graph.hasnode(dep): + task_graph.addnode(dep, None) + task_graph.addnode(task, dep) + + +def remove_task(task, kill, d): + """Remove an BB 'task'. + + If kill is 1, also remove tasks that depend on this task.""" + + task_graph = data.getVar('_task_graph', d) + if not task_graph: + task_graph = bb.digraph() + data.setVar('_task_graph', task_graph, d) + if not task_graph.hasnode(task): + return + + data.delVarFlag(task, 'task', d) + ref = 1 + if kill == 1: + ref = 2 + task_graph.delnode(task, ref) + +def task_exists(task, d): + task_graph = data.getVar('_task_graph', d) + if not task_graph: + task_graph = bb.digraph() + data.setVar('_task_graph', task_graph, d) + return task_graph.hasnode(task) + +def get_task_data(): + return _task_data diff --git a/lib/bb/data.py b/lib/bb/data.py new file mode 100644 index 000000000..5ec6d9561 --- /dev/null +++ b/lib/bb/data.py @@ -0,0 +1,490 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Data' implementations + +Functions for interacting with the data structure used by the +BitBake build tools. + +Copyright (C) 2003, 2004 Chris Larson + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 59 Temple +Place, Suite 330, Boston, MA 02111-1307 USA. + +Based on functions from the base bb module, Copyright 2003 Holger Schurig +""" + +import sys, os, re, time, types +if sys.argv[0][-5:] == "pydoc": + path = os.path.dirname(os.path.dirname(sys.argv[1])) +else: + path = os.path.dirname(os.path.dirname(sys.argv[0])) +sys.path.append(path) + +from bb import note, debug + +def init(): + return {} + +_data = init() + +def initVar(var, d = _data): + """Non-destructive var init for data structure""" + if not var in d: + d[var] = {} + + if not "flags" in d[var]: + d[var]["flags"] = {} + +__setvar_regexp__ = {} +__setvar_regexp__["_append"] = re.compile('(?P.*?)%s(_(?P.*))?' % "_append") +__setvar_regexp__["_prepend"] = re.compile('(?P.*?)%s(_(?P.*))?' % "_prepend") +__setvar_regexp__["_delete"] = re.compile('(?P.*?)%s(_(?P.*))?' % "_delete") + +def setVar(var, value, d = _data): + """Set a variable to a given value + + Example: + >>> setVar('TEST', 'testcontents') + >>> print getVar('TEST') + testcontents + """ + for v in ["_append", "_prepend", "_delete"]: + match = __setvar_regexp__[v].match(var) + if match: + base = match.group('base') + override = match.group('add') + l = getVarFlag(base, v, d) or [] + if override == 'delete': + if l.count([value, None]): + del l[l.index([value, None])] + l.append([value, override]) + setVarFlag(base, v, l, d) + return + + if not var in d: + initVar(var, d) + if getVarFlag(var, 'matchesenv', d): + delVarFlag(var, 'matchesenv', d) + setVarFlag(var, 'export', 1, d) + d[var]["content"] = value + +def getVar(var, d = _data, exp = 0): + """Gets the value of a variable + + Example: + >>> setVar('TEST', 'testcontents') + >>> print getVar('TEST') + testcontents + """ + if not var in d or not "content" in d[var]: + return None + if exp: + return expand(d[var]["content"], d, var) + return d[var]["content"] + +def delVar(var, d = _data): + """Removes a variable from the data set + + Example: + >>> setVar('TEST', 'testcontents') + >>> print getVar('TEST') + testcontents + >>> delVar('TEST') + >>> print getVar('TEST') + None + """ + if var in d: + del d[var] + +def setVarFlag(var, flag, flagvalue, d = _data): + """Set a flag for a given variable to a given value + + Example: + >>> setVarFlag('TEST', 'python', 1) + >>> print getVarFlag('TEST', 'python') + 1 + """ +# print "d[%s][\"flags\"][%s] = %s" % (var, flag, flagvalue) + if not var in d: + initVar(var, d) + d[var]["flags"][flag] = flagvalue + +def getVarFlag(var, flag, d = _data): + """Gets given flag from given var + + Example: + >>> setVarFlag('TEST', 'python', 1) + >>> print getVarFlag('TEST', 'python') + 1 + """ + if var in d and "flags" in d[var] and flag in d[var]["flags"]: + return d[var]["flags"][flag] + return None + +def delVarFlag(var, flag, d = _data): + """Removes a given flag from the variable's flags + + Example: + >>> setVarFlag('TEST', 'testflag', 1) + >>> print getVarFlag('TEST', 'testflag') + 1 + >>> delVarFlag('TEST', 'testflag') + >>> print getVarFlag('TEST', 'testflag') + None + + """ + if var in d and "flags" in d[var] and flag in d[var]["flags"]: + del d[var]["flags"][flag] + +def setVarFlags(var, flags, d = _data): + """Set the flags for a given variable + + Example: + >>> myflags = {} + >>> myflags['test'] = 'blah' + >>> setVarFlags('TEST', myflags) + >>> print getVarFlag('TEST', 'test') + blah + """ + if not var in d: + initVar(var, d) + d[var]["flags"] = flags + +def getVarFlags(var, d = _data): + """Gets a variable's flags + + Example: + >>> setVarFlag('TEST', 'test', 'blah') + >>> print getVarFlags('TEST')['test'] + blah + """ + if var in d and "flags" in d[var]: + return d[var]["flags"] + return None + +def delVarFlags(var, d = _data): + """Removes a variable's flags + + Example: + >>> setVarFlag('TEST', 'testflag', 1) + >>> print getVarFlag('TEST', 'testflag') + 1 + >>> delVarFlags('TEST') + >>> print getVarFlags('TEST') + None + + """ + if var in d and "flags" in d[var]: + del d[var]["flags"] + +def getData(d = _data): + """Returns the data object used""" + return d + +def setData(newData, d = _data): + """Sets the data object to the supplied value""" + d = newData + +__expand_var_regexp__ = re.compile(r"\${[^{}]+}") +__expand_python_regexp__ = re.compile(r"\${@.+?}") + +def expand(s, d = _data, varname = None): + """Variable expansion using the data store. + + Example: + Standard expansion: + >>> setVar('A', 'sshd') + >>> print expand('/usr/bin/${A}') + /usr/bin/sshd + + Python expansion: + >>> print expand('result: ${@37 * 72}') + result: 2664 + """ + def var_sub(match): + key = match.group()[2:-1] + if varname and key: + if varname == key: + raise Exception("variable %s references itself!" % varname) + var = getVar(key, d, 1) + if var is not None: + return var + else: + return match.group() + + def python_sub(match): + import bb + code = match.group()[3:-1] + locals()['d'] = d + s = eval(code) + if type(s) == types.IntType: s = str(s) + return s + + if type(s) is not types.StringType: # sanity check + return s + + while s.find('$') != -1: + olds = s + try: + s = __expand_var_regexp__.sub(var_sub, s) + s = __expand_python_regexp__.sub(python_sub, s) + if s == olds: break + if type(s) is not types.StringType: # sanity check + import bb + bb.error('expansion of %s returned non-string %s' % (olds, s)) + except: + import bb + bb.note("%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s)) + raise + return s + +def expandKeys(alterdata = _data, readdata = None): + if readdata == None: + readdata = alterdata + + for key in alterdata.keys(): + ekey = expand(key, readdata) + if key == ekey: + continue + val = getVar(key, alterdata) + if val is None: + continue +# import copy +# setVarFlags(ekey, copy.copy(getVarFlags(key, readdata)), alterdata) + setVar(ekey, val, alterdata) + + for i in ('_append', '_prepend', '_delete'): + dest = getVarFlag(ekey, i, alterdata) or [] + src = getVarFlag(key, i, readdata) or [] + dest.extend(src) + setVarFlag(ekey, i, dest, alterdata) + + delVar(key, alterdata) + +def expandData(alterdata = _data, readdata = None): + """For each variable in alterdata, expand it, and update the var contents. + Replacements use data from readdata. + + Example: + >>> a=init() + >>> b=init() + >>> setVar("dlmsg", "dl_dir is ${DL_DIR}", a) + >>> setVar("DL_DIR", "/path/to/whatever", b) + >>> expandData(a, b) + >>> print getVar("dlmsg", a) + dl_dir is /path/to/whatever + """ + if readdata == None: + readdata = alterdata + + for key in alterdata.keys(): + val = getVar(key, alterdata) + if type(val) is not types.StringType: + continue + expanded = expand(val, readdata) +# print "key is %s, val is %s, expanded is %s" % (key, val, expanded) + if val != expanded: + setVar(key, expanded, alterdata) + +import os + +def inheritFromOS(d = _data): + """Inherit variables from the environment.""" +# fakeroot needs to be able to set these + non_inherit_vars = [ "LD_LIBRARY_PATH", "LD_PRELOAD" ] + for s in os.environ.keys(): + if not s in non_inherit_vars: + try: + setVar(s, os.environ[s], d) + setVarFlag(s, 'matchesenv', '1', d) + except TypeError: + pass + +import sys + +def emit_var(var, o=sys.__stdout__, d = _data, all=False): + """Emit a variable to be sourced by a shell.""" + if getVarFlag(var, "python", d): + return 0 + + try: + if all: + oval = getVar(var, d, 0) + val = getVar(var, d, 1) + except KeyboardInterrupt: + raise + except: + o.write('# expansion of %s threw %s\n' % (var, sys.exc_info()[0])) + return 0 + + if all: + o.write('# %s=%s\n' % (var, oval)) + + if type(val) is not types.StringType: + return 0 + + if getVarFlag(var, 'matchesenv', d): + return 0 + + if var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1: + return 0 + + val.rstrip() + if not val: + return 0 + + if getVarFlag(var, "func", d): +# NOTE: should probably check for unbalanced {} within the var + o.write("%s() {\n%s\n}\n" % (var, val)) + else: + if getVarFlag(var, "export", d): + o.write('export ') + else: + if not all: + return 0 +# if we're going to output this within doublequotes, +# to a shell, we need to escape the quotes in the var + alter = re.sub('"', '\\"', val.strip()) + o.write('%s="%s"\n' % (var, alter)) + return 1 + + +def emit_env(o=sys.__stdout__, d = _data, all=False): + """Emits all items in the data store in a format such that it can be sourced by a shell.""" + + env = d.keys() + + for e in env: + if getVarFlag(e, "func", d): + continue + emit_var(e, o, d, all) and o.write('\n') + + for e in env: + if not getVarFlag(e, "func", d): + continue + emit_var(e, o, d) and o.write('\n') + +def update_data(d = _data): + """Modifies the environment vars according to local overrides and commands. + Examples: + Appending to a variable: + >>> setVar('TEST', 'this is a') + >>> setVar('TEST_append', ' test') + >>> setVar('TEST_append', ' of the emergency broadcast system.') + >>> update_data() + >>> print getVar('TEST') + this is a test of the emergency broadcast system. + + Prepending to a variable: + >>> setVar('TEST', 'virtual/libc') + >>> setVar('TEST_prepend', 'virtual/tmake ') + >>> setVar('TEST_prepend', 'virtual/patcher ') + >>> update_data() + >>> print getVar('TEST') + virtual/patcher virtual/tmake virtual/libc + + Overrides: + >>> setVar('TEST_arm', 'target') + >>> setVar('TEST_ramses', 'machine') + >>> setVar('TEST_local', 'local') + >>> setVar('OVERRIDES', 'arm') + + >>> setVar('TEST', 'original') + >>> update_data() + >>> print getVar('TEST') + target + + >>> setVar('OVERRIDES', 'arm:ramses:local') + >>> setVar('TEST', 'original') + >>> update_data() + >>> print getVar('TEST') + local + """ + + debug(2, "update_data()") + +# can't do delete env[...] while iterating over the dictionary, so remember them + dodel = [] + overrides = (getVar('OVERRIDES', d, 1) or "").split(':') or [] + + def applyOverrides(var, d = _data): + if not overrides: + debug(1, "OVERRIDES not defined, nothing to do") + return + val = getVar(var, d) + for o in overrides: + if var.endswith("_" + o): + l = len(o)+1 + name = var[:-l] + d[name] = d[var] + + for s in d.keys(): + applyOverrides(s, d) + sval = getVar(s, d) or "" + +# Handle line appends: + for (a, o) in getVarFlag(s, '_append', d) or []: + # maybe the OVERRIDE was not yet added so keep the append + if (o and o in overrides) or not o: + delVarFlag(s, '_append', d) + if o: + if not o in overrides: + continue + sval+=a + setVar(s, sval, d) + +# Handle line prepends + for (a, o) in getVarFlag(s, '_prepend', d) or []: + # maybe the OVERRIDE was not yet added so keep the append + if (o and o in overrides) or not o: + delVarFlag(s, '_prepend', d) + if o: + if not o in overrides: + continue + sval=a+sval + setVar(s, sval, d) + +# Handle line deletions + name = s + "_delete" + nameval = getVar(name, d) + if nameval: + sval = getVar(s, d) + if sval: + new = '' + pattern = nameval.replace('\n','').strip() + for line in sval.split('\n'): + if line.find(pattern) == -1: + new = new + '\n' + line + setVar(s, new, d) + dodel.append(name) + +# delete all environment vars no longer needed + for s in dodel: + delVar(s, d) + +def inherits_class(klass, d): + val = getVar('__inherit_cache', d) or "" + if os.path.join('classes', '%s.bbclass' % klass) in val.split(): + return True + return False + +def _test(): + """Start a doctest run on this module""" + import doctest + from bb import data + doctest.testmod(data) + +if __name__ == "__main__": + _test() diff --git a/lib/bb/event.py b/lib/bb/event.py new file mode 100644 index 000000000..dd86e2f44 --- /dev/null +++ b/lib/bb/event.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Event' implementation + +Classes and functions for manipulating 'events' in the +BitBake build tools. + +Copyright (C) 2003, 2004 Chris Larson + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 59 Temple +Place, Suite 330, Boston, MA 02111-1307 USA. +""" + +import os, re +class Event: + """Base class for events""" + type = "Event" + +NotHandled = 0 +Handled = 1 +handlers = [] + +def tmpHandler(event): + """Default handler for code events""" + return NotHandled + +def defaultTmpHandler(): + tmp = "def tmpHandler(e):\n\t\"\"\"heh\"\"\"\n\treturn 0" + comp = compile(tmp, "tmpHandler(e)", "exec") + return comp + +def fire(event): + """Fire off an Event""" + for h in handlers: + if type(h).__name__ == "code": + exec(h) + if tmpHandler(event) == Handled: + return Handled + else: + if h(event) == Handled: + return Handled + return NotHandled + +def register(handler): + """Register an Event handler""" + if handler is not None: +# handle string containing python code + if type(handler).__name__ == "str": + return registerCode(handler) +# prevent duplicate registration + if not handler in handlers: + handlers.append(handler) + +def registerCode(handlerStr): + """Register a 'code' Event. + Deprecated interface; call register instead. + + Expects to be passed python code as a string, which will + be passed in turn to compile() and then exec(). Note that + the code will be within a function, so should have had + appropriate tabbing put in place.""" + tmp = "def tmpHandler(e):\n%s" % handlerStr + comp = compile(tmp, "tmpHandler(e)", "exec") +# prevent duplicate registration + if not comp in handlers: + handlers.append(comp) + +def remove(handler): + """Remove an Event handler""" + for h in handlers: + if type(handler).__name__ == "str": + return removeCode(handler) + + if handler is h: + handlers.remove(handler) + +def removeCode(handlerStr): + """Remove a 'code' Event handler + Deprecated interface; call remove instead.""" + tmp = "def tmpHandler(e):\n%s" % handlerStr + comp = compile(tmp, "tmpHandler(e)", "exec") + handlers.remove(comp) + +def getName(e): + """Returns the name of a class or class instance""" + if getattr(e, "__name__", None) == None: + return e.__class__.__name__ + else: + return e.__name__ + + +class PkgBase(Event): + """Base class for package events""" + + def __init__(self, t, d = {}): + self.pkg = t + self.data = d + + def getPkg(self): + return self._pkg + + def setPkg(self, pkg): + self._pkg = pkg + + def getData(self): + return self._data + + def setData(self, data): + self._data = data + + pkg = property(getPkg, setPkg, None, "pkg property") + data = property(getData, setData, None, "data property") + + +class BuildBase(Event): + """Base class for bbmake run events""" + + def __init__(self, n, p, c): + self.name = n + self.pkgs = p + self.cfg = c + + def getPkgs(self): + return self._pkgs + + def setPkgs(self, pkgs): + self._pkgs = pkgs + + def getName(self): + return self._name + + def setName(self, name): + self._name = name + + def getCfg(self): + return self._cfg + + def setCfg(self, cfg): + self._cfg = cfg + + pkgs = property(getPkgs, setPkgs, None, "pkgs property") + name = property(getName, setName, None, "name property") + cfg = property(getCfg, setCfg, None, "cfg property") + + +class DepBase(PkgBase): + """Base class for dependency events""" + + def __init__(self, t, data, d): + self.dep = d + PkgBase.__init__(self, t, data) + + def getDep(self): + return self._dep + + def setDep(self, dep): + self._dep = dep + + dep = property(getDep, setDep, None, "dep property") + + +class PkgStarted(PkgBase): + """Package build started""" + + +class PkgFailed(PkgBase): + """Package build failed""" + + +class PkgSucceeded(PkgBase): + """Package build completed""" + + +class BuildStarted(BuildBase): + """bbmake build run started""" + + +class BuildCompleted(BuildBase): + """bbmake build run completed""" + + +class UnsatisfiedDep(DepBase): + """Unsatisfied Dependency""" + + +class RecursiveDep(DepBase): + """Recursive Dependency""" + + +class MultipleProviders(PkgBase): + """Multiple Providers""" + diff --git a/lib/bb/fetch.py b/lib/bb/fetch.py new file mode 100644 index 000000000..2697e1e67 --- /dev/null +++ b/lib/bb/fetch.py @@ -0,0 +1,632 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Fetch' implementations + +Classes for obtaining upstream sources for the +BitBake build tools. + +Copyright (C) 2003, 2004 Chris Larson + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 59 Temple +Place, Suite 330, Boston, MA 02111-1307 USA. + +Based on functions from the base bb module, Copyright 2003 Holger Schurig +""" + +import os, re +import bb +import bb.data + +class FetchError(Exception): + """Exception raised when a download fails""" + +class NoMethodError(Exception): + """Exception raised when there is no method to obtain a supplied url or set of urls""" + +class MissingParameterError(Exception): + """Exception raised when a fetch method is missing a critical parameter in the url""" + +#decodeurl("cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81") +#('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}) + +def uri_replace(uri, uri_find, uri_replace, d = bb.data.init()): +# bb.note("uri_replace: operating on %s" % uri) + if not uri or not uri_find or not uri_replace: + bb.debug(1, "uri_replace: passed an undefined value, not replacing") + uri_decoded = list(bb.decodeurl(uri)) + uri_find_decoded = list(bb.decodeurl(uri_find)) + uri_replace_decoded = list(bb.decodeurl(uri_replace)) + result_decoded = ['','','','','',{}] + for i in uri_find_decoded: + loc = uri_find_decoded.index(i) + result_decoded[loc] = uri_decoded[loc] + import types + if type(i) == types.StringType: + import re + if (re.match(i, uri_decoded[loc])): + result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) + if uri_find_decoded.index(i) == 2: + if d: + localfn = bb.fetch.localpath(uri, d) + if localfn: + result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) +# bb.note("uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) + else: +# bb.note("uri_replace: no match") + return uri +# else: +# for j in i.keys(): +# FIXME: apply replacements against options + return bb.encodeurl(result_decoded) + +methods = [] + +def init(urls = [], d = bb.data.init()): + for m in methods: + m.urls = [] + + for u in urls: + for m in methods: + m.data = d + if m.supports(u, d): + m.urls.append(u) + +def go(d = bb.data.init()): + """Fetch all urls""" + for m in methods: + if m.urls: + m.go(d) + +def localpaths(d): + """Return a list of the local filenames, assuming successful fetch""" + local = [] + for m in methods: + for u in m.urls: + local.append(m.localpath(u, d)) + return local + +def localpath(url, d = bb.data.init()): + for m in methods: + if m.supports(url, d): + return m.localpath(url, d) + return url + +class Fetch(object): + """Base class for 'fetch'ing data""" + + def __init__(self, urls = []): + self.urls = [] + for url in urls: + if self.supports(bb.decodeurl(url), d) is 1: + self.urls.append(url) + + def supports(url, d): + """Check to see if this fetch class supports a given url. + Expects supplied url in list form, as outputted by bb.decodeurl(). + """ + return 0 + supports = staticmethod(supports) + + def localpath(url, d = bb.data.init()): + """Return the local filename of a given url assuming a successful fetch. + """ + return url + localpath = staticmethod(localpath) + + def setUrls(self, urls): + self.__urls = urls + + def getUrls(self): + return self.__urls + + urls = property(getUrls, setUrls, None, "Urls property") + + def setData(self, data): + self.__data = data + + def getData(self): + return self.__data + + data = property(getData, setData, None, "Data property") + + def go(self, urls = []): + """Fetch urls""" + raise NoMethodError("Missing implementation for url") + +class Wget(Fetch): + """Class to fetch urls via 'wget'""" + def supports(url, d): + """Check to see if a given url can be fetched using wget. + Expects supplied url in list form, as outputted by bb.decodeurl(). + """ + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) + return type in ['http','https','ftp'] + supports = staticmethod(supports) + + def localpath(url, d): +# strip off parameters + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) + if "localpath" in parm: +# if user overrides local path, use it. + return parm["localpath"] + url = bb.encodeurl([type, host, path, user, pswd, {}]) + return os.path.join(bb.data.getVar("DL_DIR", d), os.path.basename(url)) + localpath = staticmethod(localpath) + + def go(self, d = bb.data.init(), urls = []): + """Fetch urls""" + def fetch_uri(uri, basename, dl, md5, d): + if os.path.exists(dl): +# file exists, but we didnt complete it.. trying again.. + fetchcmd = bb.data.getVar("RESUMECOMMAND", d, 1) + else: + fetchcmd = bb.data.getVar("FETCHCOMMAND", d, 1) + + bb.note("fetch " + uri) + fetchcmd = fetchcmd.replace("${URI}", uri) + fetchcmd = fetchcmd.replace("${FILE}", basename) + bb.debug(2, "executing " + fetchcmd) + ret = os.system(fetchcmd) + if ret != 0: + return False + +# supposedly complete.. write out md5sum + if bb.which(bb.data.getVar('PATH', d), 'md5sum'): + try: + md5pipe = os.popen('md5sum ' + dl) + md5data = (md5pipe.readline().split() or [ "" ])[0] + md5pipe.close() + except OSError: + md5data = "" + md5out = file(md5, 'w') + md5out.write(md5data) + md5out.close() + else: + md5out = file(md5, 'w') + md5out.write("") + md5out.close() + return True + + if not urls: + urls = self.urls + + from copy import deepcopy + localdata = deepcopy(d) + bb.data.setVar('OVERRIDES', "wget:" + bb.data.getVar('OVERRIDES', localdata), localdata) + bb.data.update_data(localdata) + + for uri in urls: + completed = 0 + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(uri, localdata)) + basename = os.path.basename(path) + dl = self.localpath(uri, d) + dl = bb.data.expand(dl, localdata) + md5 = dl + '.md5' + + if os.path.exists(md5): +# complete, nothing to see here.. + continue + + premirrors = [ i.split() for i in (bb.data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] + for (find, replace) in premirrors: + newuri = uri_replace(uri, find, replace) + if newuri != uri: + if fetch_uri(newuri, basename, dl, md5, localdata): + completed = 1 + break + + if completed: + continue + + if fetch_uri(uri, basename, dl, md5, localdata): + continue + +# try mirrors + mirrors = [ i.split() for i in (bb.data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] + for (find, replace) in mirrors: + newuri = uri_replace(uri, find, replace) + if newuri != uri: + if fetch_uri(newuri, basename, dl, md5, localdata): + completed = 1 + break + + if not completed: + raise FetchError(uri) + + del localdata + + +methods.append(Wget()) + +class Cvs(Fetch): + """Class to fetch a module or modules from cvs repositories""" + def supports(url, d): + """Check to see if a given url can be fetched with cvs. + Expects supplied url in list form, as outputted by bb.decodeurl(). + """ + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) + return type in ['cvs', 'pserver'] + supports = staticmethod(supports) + + def localpath(url, d): + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) + if "localpath" in parm: +# if user overrides local path, use it. + return parm["localpath"] + + if not "module" in parm: + raise MissingParameterError("cvs method needs a 'module' parameter") + else: + module = parm["module"] + if 'tag' in parm: + tag = parm['tag'] + else: + tag = "" + if 'date' in parm: + date = parm['date'] + else: + if not tag: + date = bb.data.getVar("CVSDATE", d, 1) or bb.data.getVar("DATE", d, 1) + else: + date = "" + + return os.path.join(bb.data.getVar("DL_DIR", d, 1),bb.data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, tag, date), d)) + localpath = staticmethod(localpath) + + def go(self, d = bb.data.init(), urls = []): + """Fetch urls""" + if not urls: + urls = self.urls + + from copy import deepcopy + localdata = deepcopy(d) + bb.data.setVar('OVERRIDES', "cvs:%s" % bb.data.getVar('OVERRIDES', localdata), localdata) + bb.data.update_data(localdata) + + for loc in urls: + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(loc, localdata)) + if not "module" in parm: + raise MissingParameterError("cvs method needs a 'module' parameter") + else: + module = parm["module"] + + dlfile = self.localpath(loc, localdata) + dldir = bb.data.getVar('DL_DIR', localdata, 1) +# if local path contains the cvs +# module, consider the dir above it to be the +# download directory +# pos = dlfile.find(module) +# if pos: +# dldir = dlfile[:pos] +# else: +# dldir = os.path.dirname(dlfile) + +# setup cvs options + options = [] + if 'tag' in parm: + tag = parm['tag'] + else: + tag = "" + + if 'date' in parm: + date = parm['date'] + else: + if not tag: + date = bb.data.getVar("CVSDATE", d, 1) or bb.data.getVar("DATE", d, 1) + else: + date = "" + + if "method" in parm: + method = parm["method"] + else: + method = "pserver" + + if "localdir" in parm: + localdir = parm["localdir"] + else: + localdir = module + + cvs_rsh = None + if method == "ext": + if "rsh" in parm: + cvs_rsh = parm["rsh"] + + tarfn = bb.data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata) + bb.data.setVar('TARFILES', dlfile, localdata) + bb.data.setVar('TARFN', tarfn, localdata) + + dl = os.path.join(dldir, tarfn) + if os.access(dl, os.R_OK): + bb.debug(1, "%s already exists, skipping cvs checkout." % tarfn) + continue + + pn = bb.data.getVar('PN', d, 1) + cvs_tarball_stash = None + if pn: + cvs_tarball_stash = bb.data.getVar('CVS_TARBALL_STASH_%s' % pn, d, 1) + if cvs_tarball_stash == None: + cvs_tarball_stash = bb.data.getVar('CVS_TARBALL_STASH', d, 1) + if cvs_tarball_stash: + fetchcmd = bb.data.getVar("FETCHCOMMAND_wget", d, 1) + uri = cvs_tarball_stash + tarfn + bb.note("fetch " + uri) + fetchcmd = fetchcmd.replace("${URI}", uri) + ret = os.system(fetchcmd) + if ret == 0: + bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) + continue + + if date: + options.append("-D %s" % date) + if tag: + options.append("-r %s" % tag) + + olddir = os.path.abspath(os.getcwd()) + os.chdir(bb.data.expand(dldir, localdata)) + +# setup cvsroot + if method == "dir": + cvsroot = path + else: + cvsroot = ":" + method + ":" + user + if pswd: + cvsroot += ":" + pswd + cvsroot += "@" + host + ":" + path + + bb.data.setVar('CVSROOT', cvsroot, localdata) + bb.data.setVar('CVSCOOPTS', " ".join(options), localdata) + bb.data.setVar('CVSMODULE', module, localdata) + cvscmd = bb.data.getVar('FETCHCOMMAND', localdata, 1) + cvsupdatecmd = bb.data.getVar('UPDATECOMMAND', localdata, 1) + + if cvs_rsh: + cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) + cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) + +# create module directory + bb.debug(2, "Fetch: checking for module directory") + pkg=bb.data.expand('${PN}', d) + pkgdir=os.path.join(bb.data.expand('${CVSDIR}', localdata), pkg) + moddir=os.path.join(pkgdir,localdir) + if os.access(os.path.join(moddir,'CVS'), os.R_OK): + bb.note("Update " + loc) +# update sources there + os.chdir(moddir) + myret = os.system(cvsupdatecmd) + else: + bb.note("Fetch " + loc) +# check out sources there + bb.mkdirhier(pkgdir) + os.chdir(pkgdir) + bb.debug(1, "Running %s" % cvscmd) + myret = os.system(cvscmd) + + if myret != 0: + try: + os.rmdir(moddir) + except OSError: + pass + raise FetchError(module) + + os.chdir(moddir) + os.chdir('..') +# tar them up to a defined filename + myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir))) + if myret != 0: + try: + os.unlink(tarfn) + except OSError: + pass + os.chdir(olddir) + del localdata + +methods.append(Cvs()) + +class Bk(Fetch): + def supports(url, d): + """Check to see if a given url can be fetched via bitkeeper. + Expects supplied url in list form, as outputted by bb.decodeurl(). + """ + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) + return type in ['bk'] + supports = staticmethod(supports) + +methods.append(Bk()) + +class Local(Fetch): + def supports(url, d): + """Check to see if a given url can be fetched in the local filesystem. + Expects supplied url in list form, as outputted by bb.decodeurl(). + """ + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) + return type in ['file','patch'] + supports = staticmethod(supports) + + def localpath(url, d): + """Return the local filename of a given url assuming a successful fetch. + """ + path = url.split("://")[1] + newpath = path + if path[0] != "/": + filespath = bb.data.getVar('FILESPATH', d, 1) + if filespath: + newpath = bb.which(filespath, path) + if not newpath: + filesdir = bb.data.getVar('FILESDIR', d, 1) + if filesdir: + newpath = os.path.join(filesdir, path) + return newpath + localpath = staticmethod(localpath) + + def go(self, urls = []): + """Fetch urls (no-op for Local method)""" +# no need to fetch local files, we'll deal with them in place. + return 1 + +methods.append(Local()) + +class Svn(Fetch): + """Class to fetch a module or modules from svn repositories""" + def supports(url, d): + """Check to see if a given url can be fetched with svn. + Expects supplied url in list form, as outputted by bb.decodeurl(). + """ + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) + return type in ['svn'] + supports = staticmethod(supports) + + def localpath(url, d): + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(url, d)) + if "localpath" in parm: +# if user overrides local path, use it. + return parm["localpath"] + + if not "module" in parm: + raise MissingParameterError("svn method needs a 'module' parameter") + else: + module = parm["module"] + if 'rev' in parm: + revision = parm['rev'] + else: + revision = "" + + date = bb.data.getVar("CVSDATE", d, 1) or bb.data.getVar("DATE", d, 1) + + return os.path.join(bb.data.getVar("DL_DIR", d, 1),bb.data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, revision, date), d)) + localpath = staticmethod(localpath) + + def go(self, d = bb.data.init(), urls = []): + """Fetch urls""" + if not urls: + urls = self.urls + + from copy import deepcopy + localdata = deepcopy(d) + bb.data.setVar('OVERRIDES', "svn:%s" % bb.data.getVar('OVERRIDES', localdata), localdata) + bb.data.update_data(localdata) + + for loc in urls: + (type, host, path, user, pswd, parm) = bb.decodeurl(bb.data.expand(loc, localdata)) + if not "module" in parm: + raise MissingParameterError("svn method needs a 'module' parameter") + else: + module = parm["module"] + + dlfile = self.localpath(loc, localdata) + dldir = bb.data.getVar('DL_DIR', localdata, 1) +# if local path contains the svn +# module, consider the dir above it to be the +# download directory +# pos = dlfile.find(module) +# if pos: +# dldir = dlfile[:pos] +# else: +# dldir = os.path.dirname(dlfile) + +# setup svn options + options = [] + if 'rev' in parm: + revision = parm['rev'] + else: + revision = "" + + date = bb.data.getVar("CVSDATE", d, 1) or bb.data.getVar("DATE", d, 1) + + if "method" in parm: + method = parm["method"] + else: + method = "pserver" + + svn_rsh = None + if method == "ext": + if "rsh" in parm: + svn_rsh = parm["rsh"] + + tarfn = bb.data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, revision, date), localdata) + bb.data.setVar('TARFILES', dlfile, localdata) + bb.data.setVar('TARFN', tarfn, localdata) + + dl = os.path.join(dldir, tarfn) + if os.access(dl, os.R_OK): + bb.debug(1, "%s already exists, skipping svn checkout." % tarfn) + continue + + svn_tarball_stash = bb.data.getVar('CVS_TARBALL_STASH', d, 1) + if svn_tarball_stash: + fetchcmd = bb.data.getVar("FETCHCOMMAND_wget", d, 1) + uri = svn_tarball_stash + tarfn + bb.note("fetch " + uri) + fetchcmd = fetchcmd.replace("${URI}", uri) + ret = os.system(fetchcmd) + if ret == 0: + bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) + continue + + olddir = os.path.abspath(os.getcwd()) + os.chdir(bb.data.expand(dldir, localdata)) + +# setup svnroot +# svnroot = ":" + method + ":" + user +# if pswd: +# svnroot += ":" + pswd + svnroot = host + path + + bb.data.setVar('SVNROOT', svnroot, localdata) + bb.data.setVar('SVNCOOPTS', " ".join(options), localdata) + bb.data.setVar('SVNMODULE', module, localdata) + svncmd = bb.data.getVar('FETCHCOMMAND', localdata, 1) + svncmd = "svn co http://%s/%s" % (svnroot, module) + + if revision: + svncmd = "svn co -r %s http://%s/%s" % (revision, svnroot, module) + if svn_rsh: + svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) + +# create temp directory + bb.debug(2, "Fetch: creating temporary directory") + bb.mkdirhier(bb.data.expand('${WORKDIR}', localdata)) + bb.data.setVar('TMPBASE', bb.data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata) + tmppipe = os.popen(bb.data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") + tmpfile = tmppipe.readline().strip() + if not tmpfile: + bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") + raise FetchError(module) + +# check out sources there + os.chdir(tmpfile) + bb.note("Fetch " + loc) + bb.debug(1, "Running %s" % svncmd) + myret = os.system(svncmd) + if myret != 0: + try: + os.rmdir(tmpfile) + except OSError: + pass + raise FetchError(module) + + os.chdir(os.path.join(tmpfile, os.path.dirname(module))) +# tar them up to a defined filename + myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) + if myret != 0: + try: + os.unlink(tarfn) + except OSError: + pass +# cleanup + os.system('rm -rf %s' % tmpfile) + os.chdir(olddir) + del localdata + +methods.append(Svn()) diff --git a/lib/bb/make.py b/lib/bb/make.py new file mode 100644 index 000000000..62437be36 --- /dev/null +++ b/lib/bb/make.py @@ -0,0 +1,263 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake 'Make' implementations + +Functions for reading BB files, building a dependency graph and +building a set of BB files while walking along the dependency graph. + +Copyright (C) 2003, 2004 Mickey Lauer +Copyright (C) 2003, 2004 Phil Blundell +Copyright (C) 2003, 2004 Chris Larson + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 59 Temple +Place, Suite 330, Boston, MA 02111-1307 USA. + +This file is part of the BitBake build tools. +""" + +from bb import debug, digraph, data, fetch, fatal, error, note, event, parse +import copy, bb, re, sys, os, glob, sre_constants +try: + import cPickle as pickle +except ImportError: + import pickle + print "NOTE: Importing cPickle failed. Falling back to a very slow implementation." + +pkgdata = {} +cfg = data.init() +cache = None +digits = "0123456789" +ascii_letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" +mtime_cache = {} + +def get_bbfiles( path = os.getcwd() ): + """Get list of default .bb files by reading out the current directory""" + contents = os.listdir(path) + bbfiles = [] + for f in contents: + (root, ext) = os.path.splitext(f) + if ext == ".bb": + bbfiles.append(os.path.abspath(os.path.join(os.getcwd(),f))) + return bbfiles + +def find_bbfiles( path ): + """Find all the .bb files in a directory (uses find)""" + findcmd = 'find ' + path + ' -name *.bb | grep -v SCCS/' + try: + finddata = os.popen(findcmd) + except OSError: + return [] + return finddata.readlines() + +def deps_clean(d): + depstr = data.getVar('__depends', d) + if depstr: + deps = depstr.split(" ") + for dep in deps: + (f,old_mtime_s) = dep.split("@") + old_mtime = int(old_mtime_s) + new_mtime = parse.cached_mtime(f) + if (new_mtime > old_mtime): + return False + return True + +def load_bbfile( bbfile ): + """Load and parse one .bb build file""" + + if not cache in [None, '']: + cache_bbfile = bbfile.replace( '/', '_' ) + + try: + cache_mtime = os.stat( "%s/%s" % ( cache, cache_bbfile ) )[8] + except OSError: + cache_mtime = 0 + file_mtime = parse.cached_mtime(bbfile) + + if file_mtime > cache_mtime: + #print " : '%s' dirty. reparsing..." % bbfile + pass + else: + #print " : '%s' clean. loading from cache..." % bbfile + cache_data = unpickle_bb( cache_bbfile ) + if deps_clean(cache_data): + return cache_data, True + + bbpath = data.getVar('BBPATH', cfg) + safebbpath = data.getVar('BBPATH', cfg) + topdir = data.getVar('TOPDIR', cfg) + if not topdir: + topdir = os.path.abspath(os.getcwd()) + # set topdir to here + data.setVar('TOPDIR', topdir, cfg) + bbfile = os.path.abspath(bbfile) + bbfile_loc = os.path.abspath(os.path.dirname(bbfile)) + # expand tmpdir to include this topdir + data.setVar('TMPDIR', data.getVar('TMPDIR', cfg, 1) or "", cfg) + # add topdir to bbpath + # bbpath = "%s:%s" % (topdir, bbpath) + # set topdir to location of .bb file + topdir = bbfile_loc + #data.setVar('TOPDIR', topdir, cfg) + # add that topdir to bbpath + bbpath = "%s:%s" % (topdir, bbpath) + # go there + oldpath = os.path.abspath(os.getcwd()) + os.chdir(topdir) + data.setVar('BBPATH', bbpath, cfg) + bb = copy.deepcopy(cfg) + try: + parse.handle(bbfile, bb) # read .bb data + if not cache in [None, '']: pickle_bb( cache_bbfile, bb) # write cache + os.chdir(oldpath) + return bb, False + finally: + os.chdir(oldpath) + data.setVar('BBPATH', safebbpath, cfg) + +def pickle_bb( bbfile, bb ): + p = pickle.Pickler( file( "%s/%s" % ( cache, bbfile ), "wb" ), -1 ) + p.dump( bb ) + +def unpickle_bb( bbfile ): + p = pickle.Unpickler( file( "%s/%s" % ( cache, bbfile ), "rb" ) ) + bb = p.load() + funcstr = data.getVar('__functions__', bb) + if funcstr: + comp = compile(funcstr, "", "exec") + exec comp in __builtins__ + return bb + +def collect_bbfiles( progressCallback ): + """Collect all available .bb build files""" + + parsed, cached, skipped, masked = 0, 0, 0, 0 + global cache + cache = bb.data.getVar( "CACHE", cfg, 1 ) + if not cache in [None, '']: + print "NOTE: Using cache in '%s'" % cache + try: + os.stat( cache ) + except OSError: + bb.mkdirhier( cache ) + else: print "NOTE: Not using a cache. Set CACHE = to enable." + files = (data.getVar( "BBFILES", cfg, 1 ) or "").split() + data.setVar("BBFILES", " ".join(files), cfg) + + if not len(files): + files = get_bbfiles() + + if not len(files): + bb.error("no files to build.") + + newfiles = [] + for f in files: + if os.path.isdir(f): + dirfiles = find_bbfiles(f) + if dirfiles: + newfiles += dirfiles + continue + newfiles += glob.glob(f) or [ f ] + + bbmask = bb.data.getVar('BBMASK', cfg, 1) or "" + try: + bbmask_compiled = re.compile(bbmask) + except sre_constants.error: + bb.fatal("BBMASK is not a valid regular expression.") + + for i in xrange( len( newfiles ) ): + f = newfiles[i] + if bbmask and bbmask_compiled.search(f): + bb.debug(1, "bbmake: skipping %s" % f) + masked += 1 + continue + progressCallback( i + 1, len( newfiles ), f ) + debug(1, "bbmake: parsing %s" % f) + + # read a file's metadata + try: + pkgdata[f], fromCache = load_bbfile(f) + if fromCache: cached += 1 + else: parsed += 1 + deps = None + if pkgdata[f] is not None: + # allow metadata files to add items to BBFILES + #data.update_data(pkgdata[f]) + addbbfiles = data.getVar('BBFILES', pkgdata[f]) or None + if addbbfiles: + for aof in addbbfiles.split(): + if not files.count(aof): + if not os.path.isabs(aof): + aof = os.path.join(os.path.dirname(f),aof) + files.append(aof) + for var in pkgdata[f].keys(): + if data.getVarFlag(var, "handler", pkgdata[f]) and data.getVar(var, pkgdata[f]): + event.register(data.getVar(var, pkgdata[f])) + except IOError, e: + bb.error("opening %s: %s" % (f, e)) + pass + except bb.parse.SkipPackage: + skipped += 1 + pass + except KeyboardInterrupt: + raise + except Exception, e: + bb.error("%s while parsing %s" % (e, f)) + print "\rNOTE: Parsing finished. %d cached, %d parsed, %d skipped, %d masked." % ( cached, parsed, skipped, masked ), + +def explode_version(s): + import string + r = [] + alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$') + numeric_regexp = re.compile('^(\d+)(.*)$') + while (s != ''): + if s[0] in digits: + m = numeric_regexp.match(s) + r.append(int(m.group(1))) + s = m.group(2) + continue + if s[0] in ascii_letters: + m = alpha_regexp.match(s) + r.append(m.group(1)) + s = m.group(2) + continue + s = s[1:] + return r + +def vercmp_part(a, b): + va = explode_version(a) + vb = explode_version(b) + while True: + if va == []: + ca = None + else: + ca = va.pop(0) + if vb == []: + cb = None + else: + cb = vb.pop(0) + if ca == None and cb == None: + return 0 + if ca > cb: + return 1 + if ca < cb: + return -1 + +def vercmp(ta, tb): + (va, ra) = ta + (vb, rb) = tb + + r = vercmp_part(va, vb) + if (r == 0): + r = vercmp_part(ra, rb) + return r diff --git a/lib/bb/manifest.py b/lib/bb/manifest.py new file mode 100644 index 000000000..30bb45472 --- /dev/null +++ b/lib/bb/manifest.py @@ -0,0 +1,144 @@ +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +# +# Copyright (C) 2003, 2004 Chris Larson +# +# This program is free software; you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation; either version 2 of the License, or (at your option) any later +# version. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License along with +# this program; if not, write to the Free Software Foundation, Inc., 59 Temple +# Place, Suite 330, Boston, MA 02111-1307 USA. + +import os, sys +import bb, bb.data + +def getfields(line): + fields = {} + fieldmap = ( "pkg", "src", "dest", "type", "mode", "uid", "gid", "major", "minor", "start", "inc", "count" ) + for f in xrange(len(fieldmap)): + fields[fieldmap[f]] = None + + if not line: + return None + + splitline = line.split() + if not len(splitline): + return None + + try: + for f in xrange(len(fieldmap)): + if splitline[f] == '-': + continue + fields[fieldmap[f]] = splitline[f] + except IndexError: + pass + return fields + +def parse (mfile, d): + manifest = [] + while 1: + line = mfile.readline() + if not line: + break + if line.startswith("#"): + continue + fields = getfields(line) + if not fields: + continue + manifest.append(fields) + return manifest + +def emit (func, manifest, d): +#str = "%s () {\n" % func + str = "" + for line in manifest: + emittedline = emit_line(func, line, d) + if not emittedline: + continue + str += emittedline + "\n" +# str += "}\n" + return str + +def mangle (func, line, d): + import copy + newline = copy.copy(line) + src = bb.data.expand(newline["src"], d) + + if src: + if not os.path.isabs(src): + src = "${WORKDIR}/" + src + + dest = newline["dest"] + if not dest: + return + + if dest.startswith("/"): + dest = dest[1:] + + if func is "do_install": + dest = "${D}/" + dest + + elif func is "do_populate": + dest = "${WORKDIR}/install/" + newline["pkg"] + "/" + dest + + elif func is "do_stage": + varmap = {} + varmap["${bindir}"] = "${STAGING_DIR}/${HOST_SYS}/bin" + varmap["${libdir}"] = "${STAGING_DIR}/${HOST_SYS}/lib" + varmap["${includedir}"] = "${STAGING_DIR}/${HOST_SYS}/include" + varmap["${datadir}"] = "${STAGING_DATADIR}" + + matched = 0 + for key in varmap.keys(): + if dest.startswith(key): + dest = varmap[key] + "/" + dest[len(key):] + matched = 1 + if not matched: + newline = None + return + else: + newline = None + return + + newline["src"] = src + newline["dest"] = dest + return newline + +def emit_line (func, line, d): + import copy + newline = copy.deepcopy(line) + newline = mangle(func, newline, d) + if not newline: + return None + + str = "" + type = newline["type"] + mode = newline["mode"] + src = newline["src"] + dest = newline["dest"] + if type is "d": + str = "install -d " + if mode: + str += "-m %s " % mode + str += dest + elif type is "f": + if not src: + return None + if dest.endswith("/"): + str = "install -d " + str += dest + "\n" + str += "install " + else: + str = "install -D " + if mode: + str += "-m %s " % mode + str += src + " " + dest + del newline + return str diff --git a/lib/bb/parse/BBHandler.py b/lib/bb/parse/BBHandler.py new file mode 100644 index 000000000..32f4ce873 --- /dev/null +++ b/lib/bb/parse/BBHandler.py @@ -0,0 +1,376 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +"""class for handling .bb files + + Reads a .bb file and obtains its metadata + + Copyright (C) 2003, 2004 Chris Larson + Copyright (C) 2003, 2004 Phil Blundell + + This program is free software; you can redistribute it and/or modify it under + the terms of the GNU General Public License as published by the Free Software + Foundation; either version 2 of the License, or (at your option) any later + version. + + This program is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with + this program; if not, write to the Free Software Foundation, Inc., 59 Temple + Place, Suite 330, Boston, MA 02111-1307 USA.""" + +import re, bb, os, sys +import bb.fetch, bb.build +from bb import debug, data, fetch, fatal + +from bb.parse.ConfHandler import include, localpath, obtain, init +from bb.parse import ParseError + +__func_start_regexp__ = re.compile( r"(((?Ppython)|(?Pfakeroot))\s*)*(?P[\w\-\+]+)?\s*\(\s*\)\s*{$" ) +__inherit_regexp__ = re.compile( r"inherit\s+(.+)" ) +__export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" ) +__addtask_regexp__ = re.compile("addtask\s+(?P\w+)\s*((before\s*(?P((.*(?=after))|(.*))))|(after\s*(?P((.*(?=before))|(.*)))))*") +__addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" ) +__def_regexp__ = re.compile( r"def\s+(\w+).*:" ) +__python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" ) +__word__ = re.compile(r"\S+") + +__infunc__ = "" +__inpython__ = False +__body__ = [] +__bbpath_found__ = 0 +__classname__ = "" +classes = [ None, ] + +def supports(fn, d): + localfn = localpath(fn, d) + return localfn[-3:] == ".bb" or localfn[-8:] == ".bbclass" or localfn[-4:] == ".inc" + +def inherit(files, d): + __inherit_cache = data.getVar('__inherit_cache', d) or "" + fn = "" + lineno = 0 + for f in files: + file = data.expand(f, d) + if file[0] != "/" and file[-8:] != ".bbclass": + file = os.path.join('classes', '%s.bbclass' % file) + + if not file in __inherit_cache.split(): + debug(2, "BB %s:%d: inheriting %s" % (fn, lineno, file)) + __inherit_cache += " %s" % file + include(fn, file, d) + data.setVar('__inherit_cache', __inherit_cache, d) + + +def handle(fn, d = {}, include = 0): + global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __bbpath_found__, __residue__ + __body__ = [] + __bbpath_found__ = 0 + __infunc__ = "" + __classname__ = "" + __residue__ = [] + + if include == 0: + debug(2, "BB " + fn + ": handle(data)") + else: + debug(2, "BB " + fn + ": handle(data, include)") + + (root, ext) = os.path.splitext(os.path.basename(fn)) + init(d) + + if ext == ".bbclass": + __classname__ = root + classes.append(__classname__) + + if include != 0: + oldfile = data.getVar('FILE', d) + else: + oldfile = None + + fn = obtain(fn, d) + bbpath = [] + if not os.path.isabs(fn): + f = None + vbbpath = data.getVar("BBPATH", d) + if vbbpath: + bbpath += vbbpath.split(":") + for p in bbpath: + p = data.expand(p, d) + j = os.path.join(p, fn) + if os.access(j, os.R_OK): + abs_fn = j + f = open(j, 'r') + break + if f is None: + raise IOError("file not found") + else: + f = open(fn,'r') + abs_fn = fn + + if include: + bb.parse.mark_dependency(d, abs_fn) + + if ext != ".bbclass": + data.setVar('FILE', fn, d) + i = (data.getVar("INHERIT", d, 1) or "").split() + if not "base" in i and __classname__ != "base": + i[0:0] = ["base"] + inherit(i, d) + + lineno = 0 + while 1: + lineno = lineno + 1 + s = f.readline() + if not s: break + s = s.rstrip() + feeder(lineno, s, fn, d) + if __inpython__: + # add a blank line to close out any python definition + feeder(lineno + 1, "", fn, d) + if ext == ".bbclass": + classes.remove(__classname__) + else: + if include == 0: + data.expandKeys(d) + data.update_data(d) + anonqueue = data.getVar("__anonqueue", d, 1) or [] + for anon in anonqueue: + data.setVar("__anonfunc", anon["content"], d) + data.setVarFlags("__anonfunc", anon["flags"], d) + from bb import build + try: + t = data.getVar('T', d) + data.setVar('T', '${TMPDIR}/', d) + build.exec_func("__anonfunc", d) + data.delVar('T', d) + if t: + data.setVar('T', t, d) + except Exception, e: + bb.debug(1, "executing anonymous function: %s" % e) + raise + data.delVar("__anonqueue", d) + data.delVar("__anonfunc", d) + set_additional_vars(fn, d, include) + data.update_data(d) + + for var in d.keys(): + if data.getVarFlag(var, 'handler', d): + bb.event.register(data.getVar(var, d)) + continue + + if not data.getVarFlag(var, 'task', d): + continue + + deps = data.getVarFlag(var, 'deps', d) or [] + postdeps = data.getVarFlag(var, 'postdeps', d) or [] + bb.build.add_task(var, deps, d) + for p in postdeps: + pdeps = data.getVarFlag(p, 'deps', d) or [] + pdeps.append(var) + data.setVarFlag(p, 'deps', pdeps, d) + bb.build.add_task(p, pdeps, d) + if oldfile: + bb.data.setVar("FILE", oldfile, d) + return d + +def feeder(lineno, s, fn, d): + global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__,__infunc__, __body__, __bbpath_found__, classes, bb, __residue__ + if __infunc__: + if s == '}': + __body__.append('') + data.setVar(__infunc__, '\n'.join(__body__), d) + data.setVarFlag(__infunc__, "func", 1, d) + if __infunc__ == "__anonymous": + anonqueue = bb.data.getVar("__anonqueue", d) or [] + anonitem = {} + anonitem["content"] = bb.data.getVar("__anonymous", d) + anonitem["flags"] = bb.data.getVarFlags("__anonymous", d) + anonqueue.append(anonitem) + bb.data.setVar("__anonqueue", anonqueue, d) + bb.data.delVarFlags("__anonymous", d) + bb.data.delVar("__anonymous", d) + __infunc__ = "" + __body__ = [] + else: + __body__.append(s) + return + + if __inpython__: + m = __python_func_regexp__.match(s) + if m: + __body__.append(s) + return + else: + text = '\n'.join(__body__) + comp = compile(text, "", "exec") + exec comp in __builtins__ + __body__ = [] + __inpython__ = False + funcs = data.getVar('__functions__', d) or "" + data.setVar('__functions__', "%s\n%s" % (funcs, text), d) +# fall through + + if s == '' or s[0] == '#': return # skip comments and empty lines + + if s[-1] == '\\': + __residue__.append(s[:-1]) + return + + s = "".join(__residue__) + s + __residue__ = [] + + m = __func_start_regexp__.match(s) + if m: + __infunc__ = m.group("func") or "__anonymous" + key = __infunc__ + if data.getVar(key, d): +# clean up old version of this piece of metadata, as its +# flags could cause problems + data.setVarFlag(key, 'python', None, d) + data.setVarFlag(key, 'fakeroot', None, d) + if m.group("py") is not None: + data.setVarFlag(key, "python", "1", d) + else: + data.delVarFlag(key, "python", d) + if m.group("fr") is not None: + data.setVarFlag(key, "fakeroot", "1", d) + else: + data.delVarFlag(key, "fakeroot", d) + return + + m = __def_regexp__.match(s) + if m: + __body__.append(s) + __inpython__ = True + return + + m = __export_func_regexp__.match(s) + if m: + fns = m.group(1) + n = __word__.findall(fns) + for f in n: + allvars = [] + allvars.append(f) + allvars.append(classes[-1] + "_" + f) + + vars = [[ allvars[0], allvars[1] ]] + if len(classes) > 1 and classes[-2] is not None: + allvars.append(classes[-2] + "_" + f) + vars = [] + vars.append([allvars[2], allvars[1]]) + vars.append([allvars[0], allvars[2]]) + + for (var, calledvar) in vars: + if data.getVar(var, d) and not data.getVarFlag(var, 'export_func', d): + continue + + if data.getVar(var, d): + data.setVarFlag(var, 'python', None, d) + data.setVarFlag(var, 'func', None, d) + + for flag in [ "func", "python" ]: + if data.getVarFlag(calledvar, flag, d): + data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag, d), d) + for flag in [ "dirs" ]: + if data.getVarFlag(var, flag, d): + data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag, d), d) + + if data.getVarFlag(calledvar, "python", d): + data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", d) + else: + data.setVar(var, "\t" + calledvar + "\n", d) + data.setVarFlag(var, 'export_func', '1', d) + + return + + m = __addtask_regexp__.match(s) + if m: + func = m.group("func") + before = m.group("before") + after = m.group("after") + if func is None: + return + var = "do_" + func + + data.setVarFlag(var, "task", 1, d) + + if after is not None: +# set up deps for function + data.setVarFlag(var, "deps", after.split(), d) + if before is not None: +# set up things that depend on this func + data.setVarFlag(var, "postdeps", before.split(), d) + return + + m = __addhandler_regexp__.match(s) + if m: + fns = m.group(1) + hs = __word__.findall(fns) + for h in hs: + data.setVarFlag(h, "handler", 1, d) + return + + m = __inherit_regexp__.match(s) + if m: + + files = m.group(1) + n = __word__.findall(files) + inherit(n, d) + return + + from bb.parse import ConfHandler + return ConfHandler.feeder(lineno, s, fn, d) + +__pkgsplit_cache__={} +def vars_from_file(mypkg, d): + if not mypkg: + return (None, None, None) + if mypkg in __pkgsplit_cache__: + return __pkgsplit_cache__[mypkg] + + myfile = os.path.splitext(os.path.basename(mypkg)) + parts = myfile[0].split('_') + __pkgsplit_cache__[mypkg] = parts + exp = 3 - len(parts) + tmplist = [] + while exp != 0: + exp -= 1 + tmplist.append(None) + parts.extend(tmplist) + return parts + +def set_additional_vars(file, d, include): + """Deduce rest of variables, e.g. ${A} out of ${SRC_URI}""" + + debug(2,"BB %s: set_additional_vars" % file) + + src_uri = data.getVar('SRC_URI', d) + if not src_uri: + return + src_uri = data.expand(src_uri, d) + + a = data.getVar('A', d) + if a: + a = data.expand(a, d).split() + else: + a = [] + + from bb import fetch + try: + fetch.init(src_uri.split()) + except fetch.NoMethodError: + pass + except bb.MalformedUrl,e: + raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e) + + a += fetch.localpaths(d) + del fetch + data.setVar('A', " ".join(a), d) + + +# Add us to the handlers list +from bb.parse import handlers +handlers.append({'supports': supports, 'handle': handle, 'init': init}) +del handlers diff --git a/lib/bb/parse/ConfHandler.py b/lib/bb/parse/ConfHandler.py new file mode 100644 index 000000000..43cdec665 --- /dev/null +++ b/lib/bb/parse/ConfHandler.py @@ -0,0 +1,194 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +"""class for handling configuration data files + + Reads a .conf file and obtains its metadata + + Copyright (C) 2003, 2004 Chris Larson + Copyright (C) 2003, 2004 Phil Blundell + + This program is free software; you can redistribute it and/or modify it under + the terms of the GNU General Public License as published by the Free Software + Foundation; either version 2 of the License, or (at your option) any later + version. + + This program is distributed in the hope that it will be useful, but WITHOUT + ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + + You should have received a copy of the GNU General Public License along with + this program; if not, write to the Free Software Foundation, Inc., 59 Temple + Place, Suite 330, Boston, MA 02111-1307 USA.""" + +import re, bb.data, os, sys +from bb import debug, fatal + +#__config_regexp__ = re.compile( r"(?Pexport\s*)?(?P[a-zA-Z0-9\-_+.${}]+)\s*(?P:)?(?P\?)?=\s*(?P['\"]?)(?P.*)(?P=apo)$") +__config_regexp__ = re.compile( r"(?Pexport\s*)?(?P[a-zA-Z0-9\-_+.${}/]+)(\[(?P[a-zA-Z0-9\-_+.]+)\])?\s*((?P:=)|(?P\?=)|(?P\+=)|(?P=\+)|=)\s*(?P['\"]?)(?P.*)(?P=apo)$") +__include_regexp__ = re.compile( r"include\s+(.+)" ) + +def init(data): + if not bb.data.getVar('TOPDIR', data): + bb.data.setVar('TOPDIR', os.getcwd(), data) + if not bb.data.getVar('BBPATH', data): + bb.data.setVar('BBPATH', os.path.join(sys.prefix, 'share', 'bitbake')) + +def supports(fn, d): + return localpath(fn, d)[-5:] == ".conf" + +def localpath(fn, d): + if os.path.exists(fn): + return fn + + localfn = None + try: + localfn = bb.fetch.localpath(fn, d) + except bb.MalformedUrl: + pass + + if not localfn: + localfn = fn + return localfn + +def obtain(fn, data = {}): + import sys, bb + fn = bb.data.expand(fn, data) + localfn = bb.data.expand(localpath(fn, data), data) + + if localfn != fn: + dldir = bb.data.getVar('DL_DIR', data, 1) + if not dldir: + debug(1, "obtain: DL_DIR not defined") + return localfn + bb.mkdirhier(dldir) + try: + bb.fetch.init([fn]) + except bb.fetch.NoMethodError: + (type, value, traceback) = sys.exc_info() + debug(1, "obtain: no method: %s" % value) + return localfn + + try: + bb.fetch.go(data) + except bb.fetch.MissingParameterError: + (type, value, traceback) = sys.exc_info() + debug(1, "obtain: missing parameters: %s" % value) + return localfn + except bb.fetch.FetchError: + (type, value, traceback) = sys.exc_info() + debug(1, "obtain: failed: %s" % value) + return localfn + return localfn + + +def include(oldfn, fn, data = {}): + if oldfn == fn: # prevent infinate recursion + return None + + import bb + fn = bb.data.expand(fn, data) + oldfn = bb.data.expand(oldfn, data) + + from bb.parse import handle + try: + ret = handle(fn, data, 1) + except IOError: + debug(2, "CONF file '%s' not found" % fn) + +def handle(fn, data = {}, include = 0): + if include: + inc_string = "including" + else: + inc_string = "reading" + init(data) + + if include == 0: + bb.data.inheritFromOS(data) + oldfile = None + else: + oldfile = bb.data.getVar('FILE', data) + + fn = obtain(fn, data) + bbpath = [] + if not os.path.isabs(fn): + f = None + vbbpath = bb.data.getVar("BBPATH", data) + if vbbpath: + bbpath += vbbpath.split(":") + for p in bbpath: + currname = os.path.join(bb.data.expand(p, data), fn) + if os.access(currname, os.R_OK): + f = open(currname, 'r') + abs_fn = currname + debug(1, "CONF %s %s" % (inc_string, currname)) + break + if f is None: + raise IOError("file not found") + else: + f = open(fn,'r') + debug(1, "CONF %s %s" % (inc_string,fn)) + abs_fn = fn + + if include: + bb.parse.mark_dependency(data, abs_fn) + + lineno = 0 + bb.data.setVar('FILE', fn, data) + while 1: + lineno = lineno + 1 + s = f.readline() + if not s: break + w = s.strip() + if not w: continue # skip empty lines + s = s.rstrip() + if s[0] == '#': continue # skip comments + while s[-1] == '\\': + s2 = f.readline()[:-1].strip() + lineno = lineno + 1 + s = s[:-1] + s2 + feeder(lineno, s, fn, data) + + if oldfile: + bb.data.setVar('FILE', oldfile, data) + return data + +def feeder(lineno, s, fn, data = {}): + m = __config_regexp__.match(s) + if m: + groupd = m.groupdict() + key = groupd["var"] + if "exp" in groupd and groupd["exp"] != None: + bb.data.setVarFlag(key, "export", 1, data) + if "ques" in groupd and groupd["ques"] != None: + val = bb.data.getVar(key, data) + if val == None: + val = groupd["value"] + elif "colon" in groupd and groupd["colon"] != None: + val = bb.data.expand(groupd["value"], data) + elif "append" in groupd and groupd["append"] != None: + val = "%s %s" % ((bb.data.getVar(key, data) or ""), groupd["value"]) + elif "prepend" in groupd and groupd["prepend"] != None: + val = "%s %s" % (groupd["value"], (bb.data.getVar(key, data) or "")) + else: + val = groupd["value"] + if 'flag' in groupd and groupd['flag'] != None: +# bb.note("setVarFlag(%s, %s, %s, data)" % (key, groupd['flag'], val)) + bb.data.setVarFlag(key, groupd['flag'], val, data) + else: + bb.data.setVar(key, val, data) + return + + m = __include_regexp__.match(s) + if m: + s = bb.data.expand(m.group(1), data) +# debug(2, "CONF %s:%d: including %s" % (fn, lineno, s)) + include(fn, s, data) + return + + raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s)); + +# Add us to the handlers list +from bb.parse import handlers +handlers.append({'supports': supports, 'handle': handle, 'init': init}) +del handlers diff --git a/lib/bb/parse/__init__.py b/lib/bb/parse/__init__.py new file mode 100644 index 000000000..a61630478 --- /dev/null +++ b/lib/bb/parse/__init__.py @@ -0,0 +1,76 @@ +#!/usr/bin/env python +# ex:ts=4:sw=4:sts=4:et +# -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- +""" +BitBake Parsers + +File parsers for the BitBake build tools. + +Copyright (C) 2003, 2004 Chris Larson +Copyright (C) 2003, 2004 Phil Blundell + +This program is free software; you can redistribute it and/or modify it under +the terms of the GNU General Public License as published by the Free Software +Foundation; either version 2 of the License, or (at your option) any later +version. + +This program is distributed in the hope that it will be useful, but WITHOUT +ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. + +You should have received a copy of the GNU General Public License along with +this program; if not, write to the Free Software Foundation, Inc., 59 Temple +Place, Suite 330, Boston, MA 02111-1307 USA. + +Based on functions from the base bb module, Copyright 2003 Holger Schurig +""" +__version__ = '1.0' + +__all__ = [ 'handlers', 'supports', 'handle', 'init', 'ConfHandler', 'BBHandler', 'ParseError' ] +handlers = [] + +class ParseError(Exception): + """Exception raised when parsing fails""" + +class SkipPackage(Exception): + """Exception raised to skip this package""" + +import ConfHandler +ConfHandler.ParseError = ParseError +import BBHandler +BBHandler.ParseError = ParseError + +__mtime_cache = {} + +def cached_mtime(f): + import os + if not __mtime_cache.has_key(f): + __mtime_cache[f] = os.stat(f)[8] + return __mtime_cache[f] + +def mark_dependency(d, f): + import bb, os + if f.startswith('./'): + f = "%s/%s" % (os.getcwd(), f[2:]) + deps = (bb.data.getVar('__depends', d) or "").split() + deps.append("%s@%s" % (f, cached_mtime(f))) + bb.data.setVar('__depends', " ".join(deps), d) + +def supports(fn, data): + """Returns true if we have a handler for this file, false otherwise""" + for h in handlers: + if h['supports'](fn, data): + return 1 + return 0 + +def handle(fn, data, include = 0): + """Call the handler that is appropriate for this file""" + for h in handlers: + if h['supports'](fn, data): + return h['handle'](fn, data, include) + return None + +def init(fn, data): + for h in handlers: + if h['supports'](fn): + return h['init'](data) -- cgit 1.2.3-korg