diff options
Diffstat (limited to 'bitbake/lib')
35 files changed, 5837 insertions, 0 deletions
diff --git a/bitbake/lib/bb/__init__.py b/bitbake/lib/bb/__init__.py new file mode 100644 index 0000000000..00b0e8b57f --- /dev/null +++ b/bitbake/lib/bb/__init__.py | |||
| @@ -0,0 +1,1266 @@ | |||
| 1 | #!/usr/bin/python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | """ | ||
| 5 | BitBake Build System Python Library | ||
| 6 | |||
| 7 | Copyright (C) 2003 Holger Schurig | ||
| 8 | Copyright (C) 2003, 2004 Chris Larson | ||
| 9 | |||
| 10 | Based on Gentoo's portage.py. | ||
| 11 | |||
| 12 | This program is free software; you can redistribute it and/or modify it under | ||
| 13 | the terms of the GNU General Public License as published by the Free Software | ||
| 14 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 15 | version. | ||
| 16 | |||
| 17 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 18 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 19 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 20 | |||
| 21 | You should have received a copy of the GNU General Public License along with | ||
| 22 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 23 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 24 | """ | ||
| 25 | |||
| 26 | __version__ = "1.3.2" | ||
| 27 | |||
| 28 | __all__ = [ | ||
| 29 | |||
| 30 | "debug", | ||
| 31 | "note", | ||
| 32 | "error", | ||
| 33 | "fatal", | ||
| 34 | |||
| 35 | "mkdirhier", | ||
| 36 | "movefile", | ||
| 37 | |||
| 38 | "tokenize", | ||
| 39 | "evaluate", | ||
| 40 | "flatten", | ||
| 41 | "relparse", | ||
| 42 | "ververify", | ||
| 43 | "isjustname", | ||
| 44 | "isspecific", | ||
| 45 | "pkgsplit", | ||
| 46 | "catpkgsplit", | ||
| 47 | "vercmp", | ||
| 48 | "pkgcmp", | ||
| 49 | "dep_parenreduce", | ||
| 50 | "dep_opconvert", | ||
| 51 | "digraph", | ||
| 52 | |||
| 53 | # fetch | ||
| 54 | "decodeurl", | ||
| 55 | "encodeurl", | ||
| 56 | |||
| 57 | # modules | ||
| 58 | "parse", | ||
| 59 | "data", | ||
| 60 | "event", | ||
| 61 | "build", | ||
| 62 | "fetch", | ||
| 63 | "manifest" | ||
| 64 | ] | ||
| 65 | |||
| 66 | whitespace = '\t\n\x0b\x0c\r ' | ||
| 67 | lowercase = 'abcdefghijklmnopqrstuvwxyz' | ||
| 68 | |||
| 69 | import sys, os, types, re, string | ||
| 70 | |||
| 71 | #projectdir = os.path.dirname(os.path.dirname(os.path.abspath(sys.argv[0]))) | ||
| 72 | projectdir = os.getcwd() | ||
| 73 | |||
| 74 | debug_level = 0 | ||
| 75 | |||
| 76 | if "BBDEBUG" in os.environ: | ||
| 77 | level = int(os.environ["BBDEBUG"]) | ||
| 78 | if level: | ||
| 79 | debug_level = level | ||
| 80 | else: | ||
| 81 | debug_level = 0 | ||
| 82 | |||
| 83 | class VarExpandError(Exception): | ||
| 84 | pass | ||
| 85 | |||
| 86 | class MalformedUrl(Exception): | ||
| 87 | """Exception raised when encountering an invalid url""" | ||
| 88 | |||
| 89 | |||
| 90 | ####################################################################### | ||
| 91 | ####################################################################### | ||
| 92 | # | ||
| 93 | # SECTION: Debug | ||
| 94 | # | ||
| 95 | # PURPOSE: little functions to make yourself known | ||
| 96 | # | ||
| 97 | ####################################################################### | ||
| 98 | ####################################################################### | ||
| 99 | |||
| 100 | debug_prepend = '' | ||
| 101 | |||
| 102 | |||
| 103 | def debug(lvl, *args): | ||
| 104 | if debug_level >= lvl: | ||
| 105 | print debug_prepend + 'DEBUG:', ''.join(args) | ||
| 106 | |||
| 107 | def note(*args): | ||
| 108 | print debug_prepend + 'NOTE:', ''.join(args) | ||
| 109 | |||
| 110 | def error(*args): | ||
| 111 | print debug_prepend + 'ERROR:', ''.join(args) | ||
| 112 | |||
| 113 | def fatal(*args): | ||
| 114 | print debug_prepend + 'ERROR:', ''.join(args) | ||
| 115 | sys.exit(1) | ||
| 116 | |||
| 117 | |||
| 118 | ####################################################################### | ||
| 119 | ####################################################################### | ||
| 120 | # | ||
| 121 | # SECTION: File | ||
| 122 | # | ||
| 123 | # PURPOSE: Basic file and directory tree related functions | ||
| 124 | # | ||
| 125 | ####################################################################### | ||
| 126 | ####################################################################### | ||
| 127 | |||
| 128 | def mkdirhier(dir): | ||
| 129 | """Create a directory like 'mkdir -p', but does not complain if | ||
| 130 | directory already exists like os.makedirs | ||
| 131 | """ | ||
| 132 | |||
| 133 | debug(3, "mkdirhier(%s)" % dir) | ||
| 134 | try: | ||
| 135 | os.makedirs(dir) | ||
| 136 | debug(2, "created " + dir) | ||
| 137 | except OSError, e: | ||
| 138 | if e.errno != 17: raise e | ||
| 139 | |||
| 140 | |||
| 141 | ####################################################################### | ||
| 142 | |||
| 143 | import stat | ||
| 144 | |||
| 145 | def movefile(src,dest,newmtime=None,sstat=None): | ||
| 146 | """Moves a file from src to dest, preserving all permissions and | ||
| 147 | attributes; mtime will be preserved even when moving across | ||
| 148 | filesystems. Returns true on success and false on failure. Move is | ||
| 149 | atomic. | ||
| 150 | """ | ||
| 151 | |||
| 152 | #print "movefile("+src+","+dest+","+str(newmtime)+","+str(sstat)+")" | ||
| 153 | try: | ||
| 154 | if not sstat: | ||
| 155 | sstat=os.lstat(src) | ||
| 156 | except Exception, e: | ||
| 157 | print "!!! Stating source file failed... movefile()" | ||
| 158 | print "!!!",e | ||
| 159 | return None | ||
| 160 | |||
| 161 | destexists=1 | ||
| 162 | try: | ||
| 163 | dstat=os.lstat(dest) | ||
| 164 | except: | ||
| 165 | dstat=os.lstat(os.path.dirname(dest)) | ||
| 166 | destexists=0 | ||
| 167 | |||
| 168 | if destexists: | ||
| 169 | if stat.S_ISLNK(dstat[stat.ST_MODE]): | ||
| 170 | try: | ||
| 171 | os.unlink(dest) | ||
| 172 | destexists=0 | ||
| 173 | except Exception, e: | ||
| 174 | pass | ||
| 175 | |||
| 176 | if stat.S_ISLNK(sstat[stat.ST_MODE]): | ||
| 177 | try: | ||
| 178 | target=os.readlink(src) | ||
| 179 | if destexists and not stat.S_ISDIR(dstat[stat.ST_MODE]): | ||
| 180 | os.unlink(dest) | ||
| 181 | os.symlink(target,dest) | ||
| 182 | # os.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) | ||
| 183 | os.unlink(src) | ||
| 184 | return os.lstat(dest) | ||
| 185 | except Exception, e: | ||
| 186 | print "!!! failed to properly create symlink:" | ||
| 187 | print "!!!",dest,"->",target | ||
| 188 | print "!!!",e | ||
| 189 | return None | ||
| 190 | |||
| 191 | renamefailed=1 | ||
| 192 | if sstat[stat.ST_DEV]==dstat[stat.ST_DEV]: | ||
| 193 | try: | ||
| 194 | ret=os.rename(src,dest) | ||
| 195 | renamefailed=0 | ||
| 196 | except Exception, e: | ||
| 197 | import errno | ||
| 198 | if e[0]!=errno.EXDEV: | ||
| 199 | # Some random error. | ||
| 200 | print "!!! Failed to move",src,"to",dest | ||
| 201 | print "!!!",e | ||
| 202 | return None | ||
| 203 | # Invalid cross-device-link 'bind' mounted or actually Cross-Device | ||
| 204 | |||
| 205 | if renamefailed: | ||
| 206 | didcopy=0 | ||
| 207 | if stat.S_ISREG(sstat[stat.ST_MODE]): | ||
| 208 | try: # For safety copy then move it over. | ||
| 209 | shutil.copyfile(src,dest+"#new") | ||
| 210 | os.rename(dest+"#new",dest) | ||
| 211 | didcopy=1 | ||
| 212 | except Exception, e: | ||
| 213 | print '!!! copy',src,'->',dest,'failed.' | ||
| 214 | print "!!!",e | ||
| 215 | return None | ||
| 216 | else: | ||
| 217 | #we don't yet handle special, so we need to fall back to /bin/mv | ||
| 218 | a=getstatusoutput("/bin/mv -f "+"'"+src+"' '"+dest+"'") | ||
| 219 | if a[0]!=0: | ||
| 220 | print "!!! Failed to move special file:" | ||
| 221 | print "!!! '"+src+"' to '"+dest+"'" | ||
| 222 | print "!!!",a | ||
| 223 | return None # failure | ||
| 224 | try: | ||
| 225 | if didcopy: | ||
| 226 | missingos.lchown(dest,sstat[stat.ST_UID],sstat[stat.ST_GID]) | ||
| 227 | os.chmod(dest, stat.S_IMODE(sstat[stat.ST_MODE])) # Sticky is reset on chown | ||
| 228 | os.unlink(src) | ||
| 229 | except Exception, e: | ||
| 230 | print "!!! Failed to chown/chmod/unlink in movefile()" | ||
| 231 | print "!!!",dest | ||
| 232 | print "!!!",e | ||
| 233 | return None | ||
| 234 | |||
| 235 | if newmtime: | ||
| 236 | os.utime(dest,(newmtime,newmtime)) | ||
| 237 | else: | ||
| 238 | os.utime(dest, (sstat[stat.ST_ATIME], sstat[stat.ST_MTIME])) | ||
| 239 | newmtime=sstat[stat.ST_MTIME] | ||
| 240 | return newmtime | ||
| 241 | |||
| 242 | |||
| 243 | |||
| 244 | ####################################################################### | ||
| 245 | ####################################################################### | ||
| 246 | # | ||
| 247 | # SECTION: Download | ||
| 248 | # | ||
| 249 | # PURPOSE: Download via HTTP, FTP, CVS, BITKEEPER, handling of MD5-signatures | ||
| 250 | # and mirrors | ||
| 251 | # | ||
| 252 | ####################################################################### | ||
| 253 | ####################################################################### | ||
| 254 | |||
| 255 | def decodeurl(url): | ||
| 256 | """Decodes an URL into the tokens (scheme, network location, path, | ||
| 257 | user, password, parameters). | ||
| 258 | |||
| 259 | >>> decodeurl("http://www.google.com/index.html") | ||
| 260 | ('http', 'www.google.com', '/index.html', '', '', {}) | ||
| 261 | |||
| 262 | CVS url with username, host and cvsroot. The cvs module to check out is in the | ||
| 263 | parameters: | ||
| 264 | |||
| 265 | >>> decodeurl("cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg") | ||
| 266 | ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}) | ||
| 267 | |||
| 268 | Dito, but this time the username has a password part. And we also request a special tag | ||
| 269 | to check out. | ||
| 270 | |||
| 271 | >>> decodeurl("cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;module=familiar/dist/ipkg;tag=V0-99-81") | ||
| 272 | ('cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}) | ||
| 273 | """ | ||
| 274 | |||
| 275 | m = re.compile('(?P<type>[^:]*)://((?P<user>.+)@)?(?P<location>[^;]+)(;(?P<parm>.*))?').match(url) | ||
| 276 | if not m: | ||
| 277 | raise MalformedUrl(url) | ||
| 278 | |||
| 279 | type = m.group('type') | ||
| 280 | location = m.group('location') | ||
| 281 | if not location: | ||
| 282 | raise MalformedUrl(url) | ||
| 283 | user = m.group('user') | ||
| 284 | parm = m.group('parm') | ||
| 285 | m = re.compile('(?P<host>[^/;]+)(?P<path>/[^;]+)').match(location) | ||
| 286 | if m: | ||
| 287 | host = m.group('host') | ||
| 288 | path = m.group('path') | ||
| 289 | else: | ||
| 290 | host = "" | ||
| 291 | path = location | ||
| 292 | if user: | ||
| 293 | m = re.compile('(?P<user>[^:]+)(:?(?P<pswd>.*))').match(user) | ||
| 294 | if m: | ||
| 295 | user = m.group('user') | ||
| 296 | pswd = m.group('pswd') | ||
| 297 | else: | ||
| 298 | user = '' | ||
| 299 | pswd = '' | ||
| 300 | |||
| 301 | p = {} | ||
| 302 | if parm: | ||
| 303 | for s in parm.split(';'): | ||
| 304 | s1,s2 = s.split('=') | ||
| 305 | p[s1] = s2 | ||
| 306 | |||
| 307 | return (type, host, path, user, pswd, p) | ||
| 308 | |||
| 309 | ####################################################################### | ||
| 310 | |||
| 311 | def encodeurl(decoded): | ||
| 312 | """Encodes a URL from tokens (scheme, network location, path, | ||
| 313 | user, password, parameters). | ||
| 314 | |||
| 315 | >>> encodeurl(['http', 'www.google.com', '/index.html', '', '', {}]) | ||
| 316 | 'http://www.google.com/index.html' | ||
| 317 | |||
| 318 | CVS with username, host and cvsroot. The cvs module to check out is in the | ||
| 319 | parameters: | ||
| 320 | |||
| 321 | >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', '', {'module': 'familiar/dist/ipkg'}]) | ||
| 322 | 'cvs://anoncvs@cvs.handhelds.org/cvs;module=familiar/dist/ipkg' | ||
| 323 | |||
| 324 | Dito, but this time the username has a password part. And we also request a special tag | ||
| 325 | to check out. | ||
| 326 | |||
| 327 | >>> encodeurl(['cvs', 'cvs.handhelds.org', '/cvs', 'anoncvs', 'anonymous', {'tag': 'V0-99-81', 'module': 'familiar/dist/ipkg'}]) | ||
| 328 | 'cvs://anoncvs:anonymous@cvs.handhelds.org/cvs;tag=V0-99-81;module=familiar/dist/ipkg' | ||
| 329 | """ | ||
| 330 | |||
| 331 | (type, host, path, user, pswd, p) = decoded | ||
| 332 | |||
| 333 | if not type or not path: | ||
| 334 | fatal("invalid or missing parameters for url encoding") | ||
| 335 | url = '%s://' % type | ||
| 336 | if user: | ||
| 337 | url += "%s" % user | ||
| 338 | if pswd: | ||
| 339 | url += ":%s" % pswd | ||
| 340 | url += "@" | ||
| 341 | if host: | ||
| 342 | url += "%s" % host | ||
| 343 | url += "%s" % path | ||
| 344 | if p: | ||
| 345 | for parm in p.keys(): | ||
| 346 | url += ";%s=%s" % (parm, p[parm]) | ||
| 347 | |||
| 348 | return url | ||
| 349 | |||
| 350 | ####################################################################### | ||
| 351 | |||
| 352 | def which(path, item, direction = 0): | ||
| 353 | """Useful function for locating a file in a PATH""" | ||
| 354 | found = "" | ||
| 355 | for p in (path or "").split(':'): | ||
| 356 | if os.path.exists(os.path.join(p, item)): | ||
| 357 | found = os.path.join(p, item) | ||
| 358 | if direction == 0: | ||
| 359 | break | ||
| 360 | return found | ||
| 361 | |||
| 362 | ####################################################################### | ||
| 363 | |||
| 364 | |||
| 365 | |||
| 366 | |||
| 367 | ####################################################################### | ||
| 368 | ####################################################################### | ||
| 369 | # | ||
| 370 | # SECTION: Dependency | ||
| 371 | # | ||
| 372 | # PURPOSE: Compare build & run dependencies | ||
| 373 | # | ||
| 374 | ####################################################################### | ||
| 375 | ####################################################################### | ||
| 376 | |||
| 377 | def tokenize(mystring): | ||
| 378 | """Breaks a string like 'foo? (bar) oni? (blah (blah))' into (possibly embedded) lists: | ||
| 379 | |||
| 380 | >>> tokenize("x") | ||
| 381 | ['x'] | ||
| 382 | >>> tokenize("x y") | ||
| 383 | ['x', 'y'] | ||
| 384 | >>> tokenize("(x y)") | ||
| 385 | [['x', 'y']] | ||
| 386 | >>> tokenize("(x y) b c") | ||
| 387 | [['x', 'y'], 'b', 'c'] | ||
| 388 | >>> tokenize("foo? (bar) oni? (blah (blah))") | ||
| 389 | ['foo?', ['bar'], 'oni?', ['blah', ['blah']]] | ||
| 390 | >>> tokenize("sys-apps/linux-headers nls? (sys-devel/gettext)") | ||
| 391 | ['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']] | ||
| 392 | """ | ||
| 393 | |||
| 394 | newtokens = [] | ||
| 395 | curlist = newtokens | ||
| 396 | prevlists = [] | ||
| 397 | level = 0 | ||
| 398 | accum = "" | ||
| 399 | for x in mystring: | ||
| 400 | if x=="(": | ||
| 401 | if accum: | ||
| 402 | curlist.append(accum) | ||
| 403 | accum="" | ||
| 404 | prevlists.append(curlist) | ||
| 405 | curlist=[] | ||
| 406 | level=level+1 | ||
| 407 | elif x==")": | ||
| 408 | if accum: | ||
| 409 | curlist.append(accum) | ||
| 410 | accum="" | ||
| 411 | if level==0: | ||
| 412 | print "!!! tokenizer: Unmatched left parenthesis in:\n'"+mystring+"'" | ||
| 413 | return None | ||
| 414 | newlist=curlist | ||
| 415 | curlist=prevlists.pop() | ||
| 416 | curlist.append(newlist) | ||
| 417 | level=level-1 | ||
| 418 | elif x in whitespace: | ||
| 419 | if accum: | ||
| 420 | curlist.append(accum) | ||
| 421 | accum="" | ||
| 422 | else: | ||
| 423 | accum=accum+x | ||
| 424 | if accum: | ||
| 425 | curlist.append(accum) | ||
| 426 | if (level!=0): | ||
| 427 | print "!!! tokenizer: Exiting with unterminated parenthesis in:\n'"+mystring+"'" | ||
| 428 | return None | ||
| 429 | return newtokens | ||
| 430 | |||
| 431 | |||
| 432 | ####################################################################### | ||
| 433 | |||
| 434 | def evaluate(tokens,mydefines,allon=0): | ||
| 435 | """Removes tokens based on whether conditional definitions exist or not. | ||
| 436 | Recognizes ! | ||
| 437 | |||
| 438 | >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {}) | ||
| 439 | ['sys-apps/linux-headers'] | ||
| 440 | |||
| 441 | Negate the flag: | ||
| 442 | |||
| 443 | >>> evaluate(['sys-apps/linux-headers', '!nls?', ['sys-devel/gettext']], {}) | ||
| 444 | ['sys-apps/linux-headers', ['sys-devel/gettext']] | ||
| 445 | |||
| 446 | Define 'nls': | ||
| 447 | |||
| 448 | >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {"nls":1}) | ||
| 449 | ['sys-apps/linux-headers', ['sys-devel/gettext']] | ||
| 450 | |||
| 451 | Turn allon on: | ||
| 452 | |||
| 453 | >>> evaluate(['sys-apps/linux-headers', 'nls?', ['sys-devel/gettext']], {}, True) | ||
| 454 | ['sys-apps/linux-headers', ['sys-devel/gettext']] | ||
| 455 | """ | ||
| 456 | |||
| 457 | if tokens == None: | ||
| 458 | return None | ||
| 459 | mytokens = tokens + [] # this copies the list | ||
| 460 | pos = 0 | ||
| 461 | while pos < len(mytokens): | ||
| 462 | if type(mytokens[pos]) == types.ListType: | ||
| 463 | evaluate(mytokens[pos], mydefines) | ||
| 464 | if not len(mytokens[pos]): | ||
| 465 | del mytokens[pos] | ||
| 466 | continue | ||
| 467 | elif mytokens[pos][-1] == "?": | ||
| 468 | cur = mytokens[pos][:-1] | ||
| 469 | del mytokens[pos] | ||
| 470 | if allon: | ||
| 471 | if cur[0] == "!": | ||
| 472 | del mytokens[pos] | ||
| 473 | else: | ||
| 474 | if cur[0] == "!": | ||
| 475 | if (cur[1:] in mydefines) and (pos < len(mytokens)): | ||
| 476 | del mytokens[pos] | ||
| 477 | continue | ||
| 478 | elif (cur not in mydefines) and (pos < len(mytokens)): | ||
| 479 | del mytokens[pos] | ||
| 480 | continue | ||
| 481 | pos = pos + 1 | ||
| 482 | return mytokens | ||
| 483 | |||
| 484 | |||
| 485 | ####################################################################### | ||
| 486 | |||
| 487 | def flatten(mytokens): | ||
| 488 | """Converts nested arrays into a flat arrays: | ||
| 489 | |||
| 490 | >>> flatten([1,[2,3]]) | ||
| 491 | [1, 2, 3] | ||
| 492 | >>> flatten(['sys-apps/linux-headers', ['sys-devel/gettext']]) | ||
| 493 | ['sys-apps/linux-headers', 'sys-devel/gettext'] | ||
| 494 | """ | ||
| 495 | |||
| 496 | newlist=[] | ||
| 497 | for x in mytokens: | ||
| 498 | if type(x)==types.ListType: | ||
| 499 | newlist.extend(flatten(x)) | ||
| 500 | else: | ||
| 501 | newlist.append(x) | ||
| 502 | return newlist | ||
| 503 | |||
| 504 | |||
| 505 | ####################################################################### | ||
| 506 | |||
| 507 | _package_weights_ = {"pre":-2,"p":0,"alpha":-4,"beta":-3,"rc":-1} # dicts are unordered | ||
| 508 | _package_ends_ = ["pre", "p", "alpha", "beta", "rc", "cvs", "bk", "HEAD" ] # so we need ordered list | ||
| 509 | |||
| 510 | def relparse(myver): | ||
| 511 | """Parses the last elements of a version number into a triplet, that can | ||
| 512 | later be compared: | ||
| 513 | |||
| 514 | >>> relparse('1.2_pre3') | ||
| 515 | [1.2, -2, 3.0] | ||
| 516 | >>> relparse('1.2b') | ||
| 517 | [1.2, 98, 0] | ||
| 518 | >>> relparse('1.2') | ||
| 519 | [1.2, 0, 0] | ||
| 520 | """ | ||
| 521 | |||
| 522 | number = 0 | ||
| 523 | p1 = 0 | ||
| 524 | p2 = 0 | ||
| 525 | mynewver = myver.split('_') | ||
| 526 | if len(mynewver)==2: | ||
| 527 | # an _package_weights_ | ||
| 528 | number = float(mynewver[0]) | ||
| 529 | match = 0 | ||
| 530 | for x in _package_ends_: | ||
| 531 | elen = len(x) | ||
| 532 | if mynewver[1][:elen] == x: | ||
| 533 | match = 1 | ||
| 534 | p1 = _package_weights_[x] | ||
| 535 | try: | ||
| 536 | p2 = float(mynewver[1][elen:]) | ||
| 537 | except: | ||
| 538 | p2 = 0 | ||
| 539 | break | ||
| 540 | if not match: | ||
| 541 | # normal number or number with letter at end | ||
| 542 | divider = len(myver)-1 | ||
| 543 | if myver[divider:] not in "1234567890": | ||
| 544 | # letter at end | ||
| 545 | p1 = ord(myver[divider:]) | ||
| 546 | number = float(myver[0:divider]) | ||
| 547 | else: | ||
| 548 | number = float(myver) | ||
| 549 | else: | ||
| 550 | # normal number or number with letter at end | ||
| 551 | divider = len(myver)-1 | ||
| 552 | if myver[divider:] not in "1234567890": | ||
| 553 | #letter at end | ||
| 554 | p1 = ord(myver[divider:]) | ||
| 555 | number = float(myver[0:divider]) | ||
| 556 | else: | ||
| 557 | number = float(myver) | ||
| 558 | return [number,p1,p2] | ||
| 559 | |||
| 560 | |||
| 561 | ####################################################################### | ||
| 562 | |||
| 563 | __ververify_cache__ = {} | ||
| 564 | |||
| 565 | def ververify(myorigval,silent=1): | ||
| 566 | """Returns 1 if given a valid version string, els 0. Valid versions are in the format | ||
| 567 | |||
| 568 | <v1>.<v2>...<vx>[a-z,_{_package_weights_}[vy]] | ||
| 569 | |||
| 570 | >>> ververify('2.4.20') | ||
| 571 | 1 | ||
| 572 | >>> ververify('2.4..20') # two dots | ||
| 573 | 0 | ||
| 574 | >>> ververify('2.x.20') # 'x' is not numeric | ||
| 575 | 0 | ||
| 576 | >>> ververify('2.4.20a') | ||
| 577 | 1 | ||
| 578 | >>> ververify('2.4.20cvs') # only one trailing letter | ||
| 579 | 0 | ||
| 580 | >>> ververify('1a') | ||
| 581 | 1 | ||
| 582 | >>> ververify('test_a') # no version at all | ||
| 583 | 0 | ||
| 584 | >>> ververify('2.4.20_beta1') | ||
| 585 | 1 | ||
| 586 | >>> ververify('2.4.20_beta') | ||
| 587 | 1 | ||
| 588 | >>> ververify('2.4.20_wrongext') # _wrongext is no valid trailer | ||
| 589 | 0 | ||
| 590 | """ | ||
| 591 | |||
| 592 | # Lookup the cache first | ||
| 593 | try: | ||
| 594 | return __ververify_cache__[myorigval] | ||
| 595 | except KeyError: | ||
| 596 | pass | ||
| 597 | |||
| 598 | if len(myorigval) == 0: | ||
| 599 | if not silent: | ||
| 600 | error("package version is empty") | ||
| 601 | __ververify_cache__[myorigval] = 0 | ||
| 602 | return 0 | ||
| 603 | myval = myorigval.split('.') | ||
| 604 | if len(myval)==0: | ||
| 605 | if not silent: | ||
| 606 | error("package name has empty version string") | ||
| 607 | __ververify_cache__[myorigval] = 0 | ||
| 608 | return 0 | ||
| 609 | # all but the last version must be a numeric | ||
| 610 | for x in myval[:-1]: | ||
| 611 | if not len(x): | ||
| 612 | if not silent: | ||
| 613 | error("package version has two points in a row") | ||
| 614 | __ververify_cache__[myorigval] = 0 | ||
| 615 | return 0 | ||
| 616 | try: | ||
| 617 | foo = int(x) | ||
| 618 | except: | ||
| 619 | if not silent: | ||
| 620 | error("package version contains non-numeric '"+x+"'") | ||
| 621 | __ververify_cache__[myorigval] = 0 | ||
| 622 | return 0 | ||
| 623 | if not len(myval[-1]): | ||
| 624 | if not silent: | ||
| 625 | error("package version has trailing dot") | ||
| 626 | __ververify_cache__[myorigval] = 0 | ||
| 627 | return 0 | ||
| 628 | try: | ||
| 629 | foo = int(myval[-1]) | ||
| 630 | __ververify_cache__[myorigval] = 1 | ||
| 631 | return 1 | ||
| 632 | except: | ||
| 633 | pass | ||
| 634 | |||
| 635 | # ok, our last component is not a plain number or blank, let's continue | ||
| 636 | if myval[-1][-1] in lowercase: | ||
| 637 | try: | ||
| 638 | foo = int(myval[-1][:-1]) | ||
| 639 | return 1 | ||
| 640 | __ververify_cache__[myorigval] = 1 | ||
| 641 | # 1a, 2.0b, etc. | ||
| 642 | except: | ||
| 643 | pass | ||
| 644 | # ok, maybe we have a 1_alpha or 1_beta2; let's see | ||
| 645 | ep=string.split(myval[-1],"_") | ||
| 646 | if len(ep)!= 2: | ||
| 647 | if not silent: | ||
| 648 | error("package version has more than one letter at then end") | ||
| 649 | __ververify_cache__[myorigval] = 0 | ||
| 650 | return 0 | ||
| 651 | try: | ||
| 652 | foo = string.atoi(ep[0]) | ||
| 653 | except: | ||
| 654 | # this needs to be numeric, i.e. the "1" in "1_alpha" | ||
| 655 | if not silent: | ||
| 656 | error("package version must have numeric part before the '_'") | ||
| 657 | __ververify_cache__[myorigval] = 0 | ||
| 658 | return 0 | ||
| 659 | |||
| 660 | for mye in _package_ends_: | ||
| 661 | if ep[1][0:len(mye)] == mye: | ||
| 662 | if len(mye) == len(ep[1]): | ||
| 663 | # no trailing numeric is ok | ||
| 664 | __ververify_cache__[myorigval] = 1 | ||
| 665 | return 1 | ||
| 666 | else: | ||
| 667 | try: | ||
| 668 | foo = string.atoi(ep[1][len(mye):]) | ||
| 669 | __ververify_cache__[myorigval] = 1 | ||
| 670 | return 1 | ||
| 671 | except: | ||
| 672 | # if no _package_weights_ work, *then* we return 0 | ||
| 673 | pass | ||
| 674 | if not silent: | ||
| 675 | error("package version extension after '_' is invalid") | ||
| 676 | __ververify_cache__[myorigval] = 0 | ||
| 677 | return 0 | ||
| 678 | |||
| 679 | |||
| 680 | def isjustname(mypkg): | ||
| 681 | myparts = string.split(mypkg,'-') | ||
| 682 | for x in myparts: | ||
| 683 | if ververify(x): | ||
| 684 | return 0 | ||
| 685 | return 1 | ||
| 686 | |||
| 687 | |||
| 688 | _isspecific_cache_={} | ||
| 689 | |||
| 690 | def isspecific(mypkg): | ||
| 691 | "now supports packages with no category" | ||
| 692 | try: | ||
| 693 | return __isspecific_cache__[mypkg] | ||
| 694 | except: | ||
| 695 | pass | ||
| 696 | |||
| 697 | mysplit = string.split(mypkg,"/") | ||
| 698 | if not isjustname(mysplit[-1]): | ||
| 699 | __isspecific_cache__[mypkg] = 1 | ||
| 700 | return 1 | ||
| 701 | __isspecific_cache__[mypkg] = 0 | ||
| 702 | return 0 | ||
| 703 | |||
| 704 | |||
| 705 | ####################################################################### | ||
| 706 | |||
| 707 | __pkgsplit_cache__={} | ||
| 708 | |||
| 709 | def pkgsplit(mypkg, silent=1): | ||
| 710 | |||
| 711 | """This function can be used as a package verification function. If | ||
| 712 | it is a valid name, pkgsplit will return a list containing: | ||
| 713 | [pkgname, pkgversion(norev), pkgrev ]. | ||
| 714 | |||
| 715 | >>> pkgsplit('') | ||
| 716 | >>> pkgsplit('x') | ||
| 717 | >>> pkgsplit('x-') | ||
| 718 | >>> pkgsplit('-1') | ||
| 719 | >>> pkgsplit('glibc-1.2-8.9-r7') | ||
| 720 | >>> pkgsplit('glibc-2.2.5-r7') | ||
| 721 | ['glibc', '2.2.5', 'r7'] | ||
| 722 | >>> pkgsplit('foo-1.2-1') | ||
| 723 | >>> pkgsplit('Mesa-3.0') | ||
| 724 | ['Mesa', '3.0', 'r0'] | ||
| 725 | """ | ||
| 726 | |||
| 727 | try: | ||
| 728 | return __pkgsplit_cache__[mypkg] | ||
| 729 | except KeyError: | ||
| 730 | pass | ||
| 731 | |||
| 732 | myparts = string.split(mypkg,'-') | ||
| 733 | if len(myparts) < 2: | ||
| 734 | if not silent: | ||
| 735 | error("package name without name or version part") | ||
| 736 | __pkgsplit_cache__[mypkg] = None | ||
| 737 | return None | ||
| 738 | for x in myparts: | ||
| 739 | if len(x) == 0: | ||
| 740 | if not silent: | ||
| 741 | error("package name with empty name or version part") | ||
| 742 | __pkgsplit_cache__[mypkg] = None | ||
| 743 | return None | ||
| 744 | # verify rev | ||
| 745 | revok = 0 | ||
| 746 | myrev = myparts[-1] | ||
| 747 | ververify(myrev, silent) | ||
| 748 | if len(myrev) and myrev[0] == "r": | ||
| 749 | try: | ||
| 750 | string.atoi(myrev[1:]) | ||
| 751 | revok = 1 | ||
| 752 | except: | ||
| 753 | pass | ||
| 754 | if revok: | ||
| 755 | if ververify(myparts[-2]): | ||
| 756 | if len(myparts) == 2: | ||
| 757 | __pkgsplit_cache__[mypkg] = None | ||
| 758 | return None | ||
| 759 | else: | ||
| 760 | for x in myparts[:-2]: | ||
| 761 | if ververify(x): | ||
| 762 | __pkgsplit_cache__[mypkg]=None | ||
| 763 | return None | ||
| 764 | # names can't have versiony looking parts | ||
| 765 | myval=[string.join(myparts[:-2],"-"),myparts[-2],myparts[-1]] | ||
| 766 | __pkgsplit_cache__[mypkg]=myval | ||
| 767 | return myval | ||
| 768 | else: | ||
| 769 | __pkgsplit_cache__[mypkg] = None | ||
| 770 | return None | ||
| 771 | |||
| 772 | elif ververify(myparts[-1],silent): | ||
| 773 | if len(myparts)==1: | ||
| 774 | if not silent: | ||
| 775 | print "!!! Name error in",mypkg+": missing name part." | ||
| 776 | __pkgsplit_cache__[mypkg]=None | ||
| 777 | return None | ||
| 778 | else: | ||
| 779 | for x in myparts[:-1]: | ||
| 780 | if ververify(x): | ||
| 781 | if not silent: error("package name has multiple version parts") | ||
| 782 | __pkgsplit_cache__[mypkg] = None | ||
| 783 | return None | ||
| 784 | myval = [string.join(myparts[:-1],"-"), myparts[-1],"r0"] | ||
| 785 | __pkgsplit_cache__[mypkg] = myval | ||
| 786 | return myval | ||
| 787 | else: | ||
| 788 | __pkgsplit_cache__[mypkg] = None | ||
| 789 | return None | ||
| 790 | |||
| 791 | |||
| 792 | ####################################################################### | ||
| 793 | |||
| 794 | __catpkgsplit_cache__ = {} | ||
| 795 | |||
| 796 | def catpkgsplit(mydata,silent=1): | ||
| 797 | """returns [cat, pkgname, version, rev ] | ||
| 798 | |||
| 799 | >>> catpkgsplit('sys-libs/glibc-1.2-r7') | ||
| 800 | ['sys-libs', 'glibc', '1.2', 'r7'] | ||
| 801 | >>> catpkgsplit('glibc-1.2-r7') | ||
| 802 | [None, 'glibc', '1.2', 'r7'] | ||
| 803 | """ | ||
| 804 | |||
| 805 | try: | ||
| 806 | return __catpkgsplit_cache__[mydata] | ||
| 807 | except KeyError: | ||
| 808 | pass | ||
| 809 | |||
| 810 | cat = os.path.basename(os.path.dirname(mydata)) | ||
| 811 | mydata = os.path.join(cat, os.path.basename(mydata)) | ||
| 812 | if mydata[-3:] == '.bb': | ||
| 813 | mydata = mydata[:-3] | ||
| 814 | |||
| 815 | mysplit = mydata.split("/") | ||
| 816 | p_split = None | ||
| 817 | splitlen = len(mysplit) | ||
| 818 | if splitlen == 1: | ||
| 819 | retval = [None] | ||
| 820 | p_split = pkgsplit(mydata,silent) | ||
| 821 | else: | ||
| 822 | retval = [mysplit[splitlen - 2]] | ||
| 823 | p_split = pkgsplit(mysplit[splitlen - 1],silent) | ||
| 824 | if not p_split: | ||
| 825 | __catpkgsplit_cache__[mydata] = None | ||
| 826 | return None | ||
| 827 | retval.extend(p_split) | ||
| 828 | __catpkgsplit_cache__[mydata] = retval | ||
| 829 | return retval | ||
| 830 | |||
| 831 | |||
| 832 | ####################################################################### | ||
| 833 | |||
| 834 | __vercmp_cache__ = {} | ||
| 835 | |||
| 836 | def vercmp(val1,val2): | ||
| 837 | """This takes two version strings and returns an integer to tell you whether | ||
| 838 | the versions are the same, val1>val2 or val2>val1. | ||
| 839 | |||
| 840 | >>> vercmp('1', '2') | ||
| 841 | -1.0 | ||
| 842 | >>> vercmp('2', '1') | ||
| 843 | 1.0 | ||
| 844 | >>> vercmp('1', '1.0') | ||
| 845 | 0 | ||
| 846 | >>> vercmp('1', '1.1') | ||
| 847 | -1.0 | ||
| 848 | >>> vercmp('1.1', '1_p2') | ||
| 849 | 1.0 | ||
| 850 | """ | ||
| 851 | |||
| 852 | # quick short-circuit | ||
| 853 | if val1 == val2: | ||
| 854 | return 0 | ||
| 855 | valkey = val1+" "+val2 | ||
| 856 | |||
| 857 | # cache lookup | ||
| 858 | try: | ||
| 859 | return __vercmp_cache__[valkey] | ||
| 860 | try: | ||
| 861 | return - __vercmp_cache__[val2+" "+val1] | ||
| 862 | except KeyError: | ||
| 863 | pass | ||
| 864 | except KeyError: | ||
| 865 | pass | ||
| 866 | |||
| 867 | # consider 1_p2 vc 1.1 | ||
| 868 | # after expansion will become (1_p2,0) vc (1,1) | ||
| 869 | # then 1_p2 is compared with 1 before 0 is compared with 1 | ||
| 870 | # to solve the bug we need to convert it to (1,0_p2) | ||
| 871 | # by splitting _prepart part and adding it back _after_expansion | ||
| 872 | |||
| 873 | val1_prepart = val2_prepart = '' | ||
| 874 | if val1.count('_'): | ||
| 875 | val1, val1_prepart = val1.split('_', 1) | ||
| 876 | if val2.count('_'): | ||
| 877 | val2, val2_prepart = val2.split('_', 1) | ||
| 878 | |||
| 879 | # replace '-' by '.' | ||
| 880 | # FIXME: Is it needed? can val1/2 contain '-'? | ||
| 881 | |||
| 882 | val1 = string.split(val1,'-') | ||
| 883 | if len(val1) == 2: | ||
| 884 | val1[0] = val1[0] +"."+ val1[1] | ||
| 885 | val2 = string.split(val2,'-') | ||
| 886 | if len(val2) == 2: | ||
| 887 | val2[0] = val2[0] +"."+ val2[1] | ||
| 888 | |||
| 889 | val1 = string.split(val1[0],'.') | ||
| 890 | val2 = string.split(val2[0],'.') | ||
| 891 | |||
| 892 | # add back decimal point so that .03 does not become "3" ! | ||
| 893 | for x in range(1,len(val1)): | ||
| 894 | if val1[x][0] == '0' : | ||
| 895 | val1[x] = '.' + val1[x] | ||
| 896 | for x in range(1,len(val2)): | ||
| 897 | if val2[x][0] == '0' : | ||
| 898 | val2[x] = '.' + val2[x] | ||
| 899 | |||
| 900 | # extend varion numbers | ||
| 901 | if len(val2) < len(val1): | ||
| 902 | val2.extend(["0"]*(len(val1)-len(val2))) | ||
| 903 | elif len(val1) < len(val2): | ||
| 904 | val1.extend(["0"]*(len(val2)-len(val1))) | ||
| 905 | |||
| 906 | # add back _prepart tails | ||
| 907 | if val1_prepart: | ||
| 908 | val1[-1] += '_' + val1_prepart | ||
| 909 | if val2_prepart: | ||
| 910 | val2[-1] += '_' + val2_prepart | ||
| 911 | # The above code will extend version numbers out so they | ||
| 912 | # have the same number of digits. | ||
| 913 | for x in range(0,len(val1)): | ||
| 914 | cmp1 = relparse(val1[x]) | ||
| 915 | cmp2 = relparse(val2[x]) | ||
| 916 | for y in range(0,3): | ||
| 917 | myret = cmp1[y] - cmp2[y] | ||
| 918 | if myret != 0: | ||
| 919 | __vercmp_cache__[valkey] = myret | ||
| 920 | return myret | ||
| 921 | __vercmp_cache__[valkey] = 0 | ||
| 922 | return 0 | ||
| 923 | |||
| 924 | |||
| 925 | ####################################################################### | ||
| 926 | |||
| 927 | def pkgcmp(pkg1,pkg2): | ||
| 928 | """ Compares two packages, which should have been split via | ||
| 929 | pkgsplit(). if the return value val is less than zero, then pkg2 is | ||
| 930 | newer than pkg1, zero if equal and positive if older. | ||
| 931 | |||
| 932 | >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r7']) | ||
| 933 | 0 | ||
| 934 | >>> pkgcmp(['glibc', '2.2.5', 'r4'], ['glibc', '2.2.5', 'r7']) | ||
| 935 | -1 | ||
| 936 | >>> pkgcmp(['glibc', '2.2.5', 'r7'], ['glibc', '2.2.5', 'r2']) | ||
| 937 | 1 | ||
| 938 | """ | ||
| 939 | |||
| 940 | mycmp = vercmp(pkg1[1],pkg2[1]) | ||
| 941 | if mycmp > 0: | ||
| 942 | return 1 | ||
| 943 | if mycmp < 0: | ||
| 944 | return -1 | ||
| 945 | r1=string.atoi(pkg1[2][1:]) | ||
| 946 | r2=string.atoi(pkg2[2][1:]) | ||
| 947 | if r1 > r2: | ||
| 948 | return 1 | ||
| 949 | if r2 > r1: | ||
| 950 | return -1 | ||
| 951 | return 0 | ||
| 952 | |||
| 953 | |||
| 954 | ####################################################################### | ||
| 955 | |||
| 956 | def dep_parenreduce(mysplit, mypos=0): | ||
| 957 | """Accepts a list of strings, and converts '(' and ')' surrounded items to sub-lists: | ||
| 958 | |||
| 959 | >>> dep_parenreduce(['']) | ||
| 960 | [''] | ||
| 961 | >>> dep_parenreduce(['1', '2', '3']) | ||
| 962 | ['1', '2', '3'] | ||
| 963 | >>> dep_parenreduce(['1', '(', '2', '3', ')', '4']) | ||
| 964 | ['1', ['2', '3'], '4'] | ||
| 965 | """ | ||
| 966 | |||
| 967 | while mypos < len(mysplit): | ||
| 968 | if mysplit[mypos] == "(": | ||
| 969 | firstpos = mypos | ||
| 970 | mypos = mypos + 1 | ||
| 971 | while mypos < len(mysplit): | ||
| 972 | if mysplit[mypos] == ")": | ||
| 973 | mysplit[firstpos:mypos+1] = [mysplit[firstpos+1:mypos]] | ||
| 974 | mypos = firstpos | ||
| 975 | break | ||
| 976 | elif mysplit[mypos] == "(": | ||
| 977 | # recurse | ||
| 978 | mysplit = dep_parenreduce(mysplit,mypos) | ||
| 979 | mypos = mypos + 1 | ||
| 980 | mypos = mypos + 1 | ||
| 981 | return mysplit | ||
| 982 | |||
| 983 | |||
| 984 | def dep_opconvert(mysplit, myuse): | ||
| 985 | "Does dependency operator conversion" | ||
| 986 | |||
| 987 | mypos = 0 | ||
| 988 | newsplit = [] | ||
| 989 | while mypos < len(mysplit): | ||
| 990 | if type(mysplit[mypos]) == types.ListType: | ||
| 991 | newsplit.append(dep_opconvert(mysplit[mypos],myuse)) | ||
| 992 | mypos += 1 | ||
| 993 | elif mysplit[mypos] == ")": | ||
| 994 | # mismatched paren, error | ||
| 995 | return None | ||
| 996 | elif mysplit[mypos]=="||": | ||
| 997 | if ((mypos+1)>=len(mysplit)) or (type(mysplit[mypos+1])!=types.ListType): | ||
| 998 | # || must be followed by paren'd list | ||
| 999 | return None | ||
| 1000 | try: | ||
| 1001 | mynew = dep_opconvert(mysplit[mypos+1],myuse) | ||
| 1002 | except Exception, e: | ||
| 1003 | error("unable to satisfy OR dependancy: " + string.join(mysplit," || ")) | ||
| 1004 | raise e | ||
| 1005 | mynew[0:0] = ["||"] | ||
| 1006 | newsplit.append(mynew) | ||
| 1007 | mypos += 2 | ||
| 1008 | elif mysplit[mypos][-1] == "?": | ||
| 1009 | # use clause, i.e "gnome? ( foo bar )" | ||
| 1010 | # this is a quick and dirty hack so that repoman can enable all USE vars: | ||
| 1011 | if (len(myuse) == 1) and (myuse[0] == "*"): | ||
| 1012 | # enable it even if it's ! (for repoman) but kill it if it's | ||
| 1013 | # an arch variable that isn't for this arch. XXX Sparc64? | ||
| 1014 | if (mysplit[mypos][:-1] not in settings.usemask) or \ | ||
| 1015 | (mysplit[mypos][:-1]==settings["ARCH"]): | ||
| 1016 | enabled=1 | ||
| 1017 | else: | ||
| 1018 | enabled=0 | ||
| 1019 | else: | ||
| 1020 | if mysplit[mypos][0] == "!": | ||
| 1021 | myusevar = mysplit[mypos][1:-1] | ||
| 1022 | enabled = not myusevar in myuse | ||
| 1023 | #if myusevar in myuse: | ||
| 1024 | # enabled = 0 | ||
| 1025 | #else: | ||
| 1026 | # enabled = 1 | ||
| 1027 | else: | ||
| 1028 | myusevar=mysplit[mypos][:-1] | ||
| 1029 | enabled = myusevar in myuse | ||
| 1030 | #if myusevar in myuse: | ||
| 1031 | # enabled=1 | ||
| 1032 | #else: | ||
| 1033 | # enabled=0 | ||
| 1034 | if (mypos +2 < len(mysplit)) and (mysplit[mypos+2] == ":"): | ||
| 1035 | # colon mode | ||
| 1036 | if enabled: | ||
| 1037 | # choose the first option | ||
| 1038 | if type(mysplit[mypos+1]) == types.ListType: | ||
| 1039 | newsplit.append(dep_opconvert(mysplit[mypos+1],myuse)) | ||
| 1040 | else: | ||
| 1041 | newsplit.append(mysplit[mypos+1]) | ||
| 1042 | else: | ||
| 1043 | # choose the alternate option | ||
| 1044 | if type(mysplit[mypos+1]) == types.ListType: | ||
| 1045 | newsplit.append(dep_opconvert(mysplit[mypos+3],myuse)) | ||
| 1046 | else: | ||
| 1047 | newsplit.append(mysplit[mypos+3]) | ||
| 1048 | mypos += 4 | ||
| 1049 | else: | ||
| 1050 | # normal use mode | ||
| 1051 | if enabled: | ||
| 1052 | if type(mysplit[mypos+1]) == types.ListType: | ||
| 1053 | newsplit.append(dep_opconvert(mysplit[mypos+1],myuse)) | ||
| 1054 | else: | ||
| 1055 | newsplit.append(mysplit[mypos+1]) | ||
| 1056 | # otherwise, continue | ||
| 1057 | mypos += 2 | ||
| 1058 | else: | ||
| 1059 | # normal item | ||
| 1060 | newsplit.append(mysplit[mypos]) | ||
| 1061 | mypos += 1 | ||
| 1062 | return newsplit | ||
| 1063 | |||
| 1064 | class digraph: | ||
| 1065 | """beautiful directed graph object""" | ||
| 1066 | |||
| 1067 | def __init__(self): | ||
| 1068 | self.dict={} | ||
| 1069 | #okeys = keys, in order they were added (to optimize firstzero() ordering) | ||
| 1070 | self.okeys=[] | ||
| 1071 | self.__callback_cache=[] | ||
| 1072 | |||
| 1073 | def __str__(self): | ||
| 1074 | str = "" | ||
| 1075 | for key in self.okeys: | ||
| 1076 | str += "%s:\t%s\n" % (key, self.dict[key][1]) | ||
| 1077 | return str | ||
| 1078 | |||
| 1079 | def addnode(self,mykey,myparent): | ||
| 1080 | if not mykey in self.dict: | ||
| 1081 | self.okeys.append(mykey) | ||
| 1082 | if myparent==None: | ||
| 1083 | self.dict[mykey]=[0,[]] | ||
| 1084 | else: | ||
| 1085 | self.dict[mykey]=[0,[myparent]] | ||
| 1086 | self.dict[myparent][0]=self.dict[myparent][0]+1 | ||
| 1087 | return | ||
| 1088 | if myparent and (not myparent in self.dict[mykey][1]): | ||
| 1089 | self.dict[mykey][1].append(myparent) | ||
| 1090 | self.dict[myparent][0]=self.dict[myparent][0]+1 | ||
| 1091 | |||
| 1092 | def delnode(self,mykey, ref = 1): | ||
| 1093 | """Delete a node | ||
| 1094 | |||
| 1095 | If ref is 1, remove references to this node from other nodes. | ||
| 1096 | If ref is 2, remove nodes that reference this node.""" | ||
| 1097 | if not mykey in self.dict: | ||
| 1098 | return | ||
| 1099 | for x in self.dict[mykey][1]: | ||
| 1100 | self.dict[x][0]=self.dict[x][0]-1 | ||
| 1101 | del self.dict[mykey] | ||
| 1102 | while 1: | ||
| 1103 | try: | ||
| 1104 | self.okeys.remove(mykey) | ||
| 1105 | except ValueError: | ||
| 1106 | break | ||
| 1107 | if ref: | ||
| 1108 | __kill = [] | ||
| 1109 | for k in self.okeys: | ||
| 1110 | if mykey in self.dict[k][1]: | ||
| 1111 | if ref == 1 or ref == 2: | ||
| 1112 | self.dict[k][1].remove(mykey) | ||
| 1113 | if ref == 2: | ||
| 1114 | __kill.append(k) | ||
| 1115 | for l in __kill: | ||
| 1116 | self.delnode(l, ref) | ||
| 1117 | |||
| 1118 | def allnodes(self): | ||
| 1119 | "returns all nodes in the dictionary" | ||
| 1120 | return self.dict.keys() | ||
| 1121 | |||
| 1122 | def firstzero(self): | ||
| 1123 | "returns first node with zero references, or NULL if no such node exists" | ||
| 1124 | for x in self.okeys: | ||
| 1125 | if self.dict[x][0]==0: | ||
| 1126 | return x | ||
| 1127 | return None | ||
| 1128 | |||
| 1129 | def firstnonzero(self): | ||
| 1130 | "returns first node with nonzero references, or NULL if no such node exists" | ||
| 1131 | for x in self.okeys: | ||
| 1132 | if self.dict[x][0]!=0: | ||
| 1133 | return x | ||
| 1134 | return None | ||
| 1135 | |||
| 1136 | |||
| 1137 | def allzeros(self): | ||
| 1138 | "returns all nodes with zero references, or NULL if no such node exists" | ||
| 1139 | zerolist = [] | ||
| 1140 | for x in self.dict.keys(): | ||
| 1141 | if self.dict[x][0]==0: | ||
| 1142 | zerolist.append(x) | ||
| 1143 | return zerolist | ||
| 1144 | |||
| 1145 | def hasallzeros(self): | ||
| 1146 | "returns 0/1, Are all nodes zeros? 1 : 0" | ||
| 1147 | zerolist = [] | ||
| 1148 | for x in self.dict.keys(): | ||
| 1149 | if self.dict[x][0]!=0: | ||
| 1150 | return 0 | ||
| 1151 | return 1 | ||
| 1152 | |||
| 1153 | def empty(self): | ||
| 1154 | if len(self.dict)==0: | ||
| 1155 | return 1 | ||
| 1156 | return 0 | ||
| 1157 | |||
| 1158 | def hasnode(self,mynode): | ||
| 1159 | return mynode in self.dict | ||
| 1160 | |||
| 1161 | def getparents(self, item): | ||
| 1162 | if not self.hasnode(item): | ||
| 1163 | return [] | ||
| 1164 | return self.dict[item][1] | ||
| 1165 | |||
| 1166 | def getchildren(self, item): | ||
| 1167 | if not self.hasnode(item): | ||
| 1168 | return [] | ||
| 1169 | children = [i for i in self.okeys if item in self.getparents(i)] | ||
| 1170 | return children | ||
| 1171 | |||
| 1172 | def walkdown(self, item, callback, debug = None, usecache = False): | ||
| 1173 | if not self.hasnode(item): | ||
| 1174 | return 0 | ||
| 1175 | |||
| 1176 | if usecache: | ||
| 1177 | if self.__callback_cache.count(item): | ||
| 1178 | if debug: | ||
| 1179 | print "hit cache for item: %s" % item | ||
| 1180 | return 1 | ||
| 1181 | |||
| 1182 | parents = self.getparents(item) | ||
| 1183 | children = self.getchildren(item) | ||
| 1184 | for p in parents: | ||
| 1185 | if p in children: | ||
| 1186 | # print "%s is both parent and child of %s" % (p, item) | ||
| 1187 | if usecache: | ||
| 1188 | self.__callback_cache.append(p) | ||
| 1189 | ret = callback(self, p) | ||
| 1190 | if ret == 0: | ||
| 1191 | return 0 | ||
| 1192 | continue | ||
| 1193 | if item == p: | ||
| 1194 | print "eek, i'm my own parent!" | ||
| 1195 | return 0 | ||
| 1196 | if debug: | ||
| 1197 | print "item: %s, p: %s" % (item, p) | ||
| 1198 | ret = self.walkdown(p, callback, debug, usecache) | ||
| 1199 | if ret == 0: | ||
| 1200 | return 0 | ||
| 1201 | if usecache: | ||
| 1202 | self.__callback_cache.append(item) | ||
| 1203 | return callback(self, item) | ||
| 1204 | |||
| 1205 | def walkup(self, item, callback): | ||
| 1206 | if not self.hasnode(item): | ||
| 1207 | return 0 | ||
| 1208 | |||
| 1209 | parents = self.getparents(item) | ||
| 1210 | children = self.getchildren(item) | ||
| 1211 | for c in children: | ||
| 1212 | if c in parents: | ||
| 1213 | ret = callback(self, item) | ||
| 1214 | if ret == 0: | ||
| 1215 | return 0 | ||
| 1216 | continue | ||
| 1217 | if item == c: | ||
| 1218 | print "eek, i'm my own child!" | ||
| 1219 | return 0 | ||
| 1220 | ret = self.walkup(c, callback) | ||
| 1221 | if ret == 0: | ||
| 1222 | return 0 | ||
| 1223 | return callback(self, item) | ||
| 1224 | |||
| 1225 | def copy(self): | ||
| 1226 | mygraph=digraph() | ||
| 1227 | for x in self.dict.keys(): | ||
| 1228 | mygraph.dict[x]=self.dict[x][:] | ||
| 1229 | mygraph.okeys=self.okeys[:] | ||
| 1230 | return mygraph | ||
| 1231 | |||
| 1232 | ####################################################################### | ||
| 1233 | ####################################################################### | ||
| 1234 | # | ||
| 1235 | # SECTION: Config | ||
| 1236 | # | ||
| 1237 | # PURPOSE: Reading and handling of system/target-specific/local configuration | ||
| 1238 | # reading of package configuration | ||
| 1239 | # | ||
| 1240 | ####################################################################### | ||
| 1241 | ####################################################################### | ||
| 1242 | |||
| 1243 | def reader(cfgfile, feeder): | ||
| 1244 | """Generic configuration file reader that opens a file, reads the lines, | ||
| 1245 | handles continuation lines, comments, empty lines and feed all read lines | ||
| 1246 | into the function feeder(lineno, line). | ||
| 1247 | """ | ||
| 1248 | |||
| 1249 | f = open(cfgfile,'r') | ||
| 1250 | lineno = 0 | ||
| 1251 | while 1: | ||
| 1252 | lineno = lineno + 1 | ||
| 1253 | s = f.readline() | ||
| 1254 | if not s: break | ||
| 1255 | w = s.strip() | ||
| 1256 | if not w: continue # skip empty lines | ||
| 1257 | s = s.rstrip() | ||
| 1258 | if s[0] == '#': continue # skip comments | ||
| 1259 | while s[-1] == '\\': | ||
| 1260 | s2 = f.readline()[:-1].strip() | ||
| 1261 | s = s[:-1] + s2 | ||
| 1262 | feeder(lineno, s) | ||
| 1263 | |||
| 1264 | if __name__ == "__main__": | ||
| 1265 | import doctest, bb | ||
| 1266 | doctest.testmod(bb) | ||
diff --git a/bitbake/lib/bb/__init__.pyc b/bitbake/lib/bb/__init__.pyc new file mode 100644 index 0000000000..e8331843e8 --- /dev/null +++ b/bitbake/lib/bb/__init__.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/build.py b/bitbake/lib/bb/build.py new file mode 100644 index 0000000000..599b45d9d3 --- /dev/null +++ b/bitbake/lib/bb/build.py | |||
| @@ -0,0 +1,395 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | """ | ||
| 5 | BitBake 'Build' implementation | ||
| 6 | |||
| 7 | Core code for function execution and task handling in the | ||
| 8 | BitBake build tools. | ||
| 9 | |||
| 10 | Copyright (C) 2003, 2004 Chris Larson | ||
| 11 | |||
| 12 | Based on Gentoo's portage.py. | ||
| 13 | |||
| 14 | This program is free software; you can redistribute it and/or modify it under | ||
| 15 | the terms of the GNU General Public License as published by the Free Software | ||
| 16 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 17 | version. | ||
| 18 | |||
| 19 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 20 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 21 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 22 | |||
| 23 | You should have received a copy of the GNU General Public License along with | ||
| 24 | |||
| 25 | Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 26 | """ | ||
| 27 | |||
| 28 | from bb import debug, data, fetch, fatal, error, note, event, mkdirhier | ||
| 29 | import bb, os | ||
| 30 | |||
| 31 | # data holds flags and function name for a given task | ||
| 32 | _task_data = data.init() | ||
| 33 | |||
| 34 | # graph represents task interdependencies | ||
| 35 | _task_graph = bb.digraph() | ||
| 36 | |||
| 37 | # stack represents execution order, excepting dependencies | ||
| 38 | _task_stack = [] | ||
| 39 | |||
| 40 | # events | ||
| 41 | class FuncFailed(Exception): | ||
| 42 | """Executed function failed""" | ||
| 43 | |||
| 44 | class EventException(Exception): | ||
| 45 | """Exception which is associated with an Event.""" | ||
| 46 | |||
| 47 | def __init__(self, msg, event): | ||
| 48 | self.args = msg, event | ||
| 49 | |||
| 50 | class TaskBase(event.Event): | ||
| 51 | """Base class for task events""" | ||
| 52 | |||
| 53 | def __init__(self, t, d ): | ||
| 54 | self._task = t | ||
| 55 | event.Event.__init__(self, d) | ||
| 56 | |||
| 57 | def getTask(self): | ||
| 58 | return self._task | ||
| 59 | |||
| 60 | def setTask(self, task): | ||
| 61 | self._task = task | ||
| 62 | |||
| 63 | task = property(getTask, setTask, None, "task property") | ||
| 64 | |||
| 65 | class TaskStarted(TaskBase): | ||
| 66 | """Task execution started""" | ||
| 67 | |||
| 68 | class TaskSucceeded(TaskBase): | ||
| 69 | """Task execution completed""" | ||
| 70 | |||
| 71 | class TaskFailed(TaskBase): | ||
| 72 | """Task execution failed""" | ||
| 73 | |||
| 74 | class InvalidTask(TaskBase): | ||
| 75 | """Invalid Task""" | ||
| 76 | |||
| 77 | # functions | ||
| 78 | |||
| 79 | def init(data): | ||
| 80 | global _task_data, _task_graph, _task_stack | ||
| 81 | _task_data = data.init() | ||
| 82 | _task_graph = bb.digraph() | ||
| 83 | _task_stack = [] | ||
| 84 | |||
| 85 | |||
| 86 | def exec_func(func, d, dirs = None): | ||
| 87 | """Execute an BB 'function'""" | ||
| 88 | |||
| 89 | body = data.getVar(func, d) | ||
| 90 | if not body: | ||
| 91 | return | ||
| 92 | |||
| 93 | if not dirs: | ||
| 94 | dirs = (data.getVarFlag(func, 'dirs', d) or "").split() | ||
| 95 | for adir in dirs: | ||
| 96 | adir = data.expand(adir, d) | ||
| 97 | mkdirhier(adir) | ||
| 98 | |||
| 99 | if len(dirs) > 0: | ||
| 100 | adir = dirs[-1] | ||
| 101 | else: | ||
| 102 | adir = data.getVar('B', d, 1) | ||
| 103 | |||
| 104 | adir = data.expand(adir, d) | ||
| 105 | |||
| 106 | try: | ||
| 107 | prevdir = os.getcwd() | ||
| 108 | except OSError: | ||
| 109 | prevdir = data.expand('${TOPDIR}', d) | ||
| 110 | if adir and os.access(adir, os.F_OK): | ||
| 111 | os.chdir(adir) | ||
| 112 | |||
| 113 | if data.getVarFlag(func, "python", d): | ||
| 114 | exec_func_python(func, d) | ||
| 115 | else: | ||
| 116 | exec_func_shell(func, d) | ||
| 117 | |||
| 118 | if os.path.exists(prevdir): | ||
| 119 | os.chdir(prevdir) | ||
| 120 | |||
| 121 | def exec_func_python(func, d): | ||
| 122 | """Execute a python BB 'function'""" | ||
| 123 | import re, os | ||
| 124 | |||
| 125 | tmp = "def " + func + "():\n%s" % data.getVar(func, d) | ||
| 126 | comp = compile(tmp + '\n' + func + '()', bb.data.getVar('FILE', d, 1) + ':' + func, "exec") | ||
| 127 | prevdir = os.getcwd() | ||
| 128 | g = {} # globals | ||
| 129 | g['bb'] = bb | ||
| 130 | g['os'] = os | ||
| 131 | g['d'] = d | ||
| 132 | exec comp in g | ||
| 133 | if os.path.exists(prevdir): | ||
| 134 | os.chdir(prevdir) | ||
| 135 | |||
| 136 | def exec_func_shell(func, d): | ||
| 137 | """Execute a shell BB 'function' Returns true if execution was successful. | ||
| 138 | |||
| 139 | For this, it creates a bash shell script in the tmp dectory, writes the local | ||
| 140 | data into it and finally executes. The output of the shell will end in a log file and stdout. | ||
| 141 | |||
| 142 | Note on directory behavior. The 'dirs' varflag should contain a list | ||
| 143 | of the directories you need created prior to execution. The last | ||
| 144 | item in the list is where we will chdir/cd to. | ||
| 145 | """ | ||
| 146 | import sys | ||
| 147 | |||
| 148 | deps = data.getVarFlag(func, 'deps', d) | ||
| 149 | check = data.getVarFlag(func, 'check', d) | ||
| 150 | if check in globals(): | ||
| 151 | if globals()[check](func, deps): | ||
| 152 | return | ||
| 153 | |||
| 154 | global logfile | ||
| 155 | t = data.getVar('T', d, 1) | ||
| 156 | if not t: | ||
| 157 | return 0 | ||
| 158 | mkdirhier(t) | ||
| 159 | logfile = "%s/log.%s.%s" % (t, func, str(os.getpid())) | ||
| 160 | runfile = "%s/run.%s.%s" % (t, func, str(os.getpid())) | ||
| 161 | |||
| 162 | f = open(runfile, "w") | ||
| 163 | f.write("#!/bin/sh -e\n") | ||
| 164 | if bb.debug_level > 0: f.write("set -x\n") | ||
| 165 | data.emit_env(f, d) | ||
| 166 | |||
| 167 | f.write("cd %s\n" % os.getcwd()) | ||
| 168 | if func: f.write("%s\n" % func) | ||
| 169 | f.close() | ||
| 170 | os.chmod(runfile, 0775) | ||
| 171 | if not func: | ||
| 172 | error("Function not specified") | ||
| 173 | raise FuncFailed() | ||
| 174 | |||
| 175 | # open logs | ||
| 176 | si = file('/dev/null', 'r') | ||
| 177 | try: | ||
| 178 | if bb.debug_level > 0: | ||
| 179 | so = os.popen("tee \"%s\"" % logfile, "w") | ||
| 180 | else: | ||
| 181 | so = file(logfile, 'w') | ||
| 182 | except OSError, e: | ||
| 183 | bb.error("opening log file: %s" % e) | ||
| 184 | pass | ||
| 185 | |||
| 186 | se = so | ||
| 187 | |||
| 188 | # dup the existing fds so we dont lose them | ||
| 189 | osi = [os.dup(sys.stdin.fileno()), sys.stdin.fileno()] | ||
| 190 | oso = [os.dup(sys.stdout.fileno()), sys.stdout.fileno()] | ||
| 191 | ose = [os.dup(sys.stderr.fileno()), sys.stderr.fileno()] | ||
| 192 | |||
| 193 | # replace those fds with our own | ||
| 194 | os.dup2(si.fileno(), osi[1]) | ||
| 195 | os.dup2(so.fileno(), oso[1]) | ||
| 196 | os.dup2(se.fileno(), ose[1]) | ||
| 197 | |||
| 198 | # execute function | ||
| 199 | prevdir = os.getcwd() | ||
| 200 | if data.getVarFlag(func, "fakeroot", d): | ||
| 201 | maybe_fakeroot = "PATH=\"%s\" fakeroot " % bb.data.getVar("PATH", d, 1) | ||
| 202 | else: | ||
| 203 | maybe_fakeroot = '' | ||
| 204 | ret = os.system('%ssh -e %s' % (maybe_fakeroot, runfile)) | ||
| 205 | os.chdir(prevdir) | ||
| 206 | |||
| 207 | # restore the backups | ||
| 208 | os.dup2(osi[0], osi[1]) | ||
| 209 | os.dup2(oso[0], oso[1]) | ||
| 210 | os.dup2(ose[0], ose[1]) | ||
| 211 | |||
| 212 | # close our logs | ||
| 213 | si.close() | ||
| 214 | so.close() | ||
| 215 | se.close() | ||
| 216 | |||
| 217 | # close the backup fds | ||
| 218 | os.close(osi[0]) | ||
| 219 | os.close(oso[0]) | ||
| 220 | os.close(ose[0]) | ||
| 221 | |||
| 222 | if ret==0: | ||
| 223 | if bb.debug_level > 0: | ||
| 224 | os.remove(runfile) | ||
| 225 | # os.remove(logfile) | ||
| 226 | return | ||
| 227 | else: | ||
| 228 | error("function %s failed" % func) | ||
| 229 | if data.getVar("BBINCLUDELOGS", d): | ||
| 230 | error("log data follows (%s)" % logfile) | ||
| 231 | f = open(logfile, "r") | ||
| 232 | while True: | ||
| 233 | l = f.readline() | ||
| 234 | if l == '': | ||
| 235 | break | ||
| 236 | l = l.rstrip() | ||
| 237 | print '| %s' % l | ||
| 238 | f.close() | ||
| 239 | else: | ||
| 240 | error("see log in %s" % logfile) | ||
| 241 | raise FuncFailed( logfile ) | ||
| 242 | |||
| 243 | |||
| 244 | def exec_task(task, d): | ||
| 245 | """Execute an BB 'task' | ||
| 246 | |||
| 247 | The primary difference between executing a task versus executing | ||
| 248 | a function is that a task exists in the task digraph, and therefore | ||
| 249 | has dependencies amongst other tasks.""" | ||
| 250 | |||
| 251 | # check if the task is in the graph.. | ||
| 252 | task_graph = data.getVar('_task_graph', d) | ||
| 253 | if not task_graph: | ||
| 254 | task_graph = bb.digraph() | ||
| 255 | data.setVar('_task_graph', task_graph, d) | ||
| 256 | task_cache = data.getVar('_task_cache', d) | ||
| 257 | if not task_cache: | ||
| 258 | task_cache = [] | ||
| 259 | data.setVar('_task_cache', task_cache, d) | ||
| 260 | if not task_graph.hasnode(task): | ||
| 261 | raise EventException("Missing node in task graph", InvalidTask(task, d)) | ||
| 262 | |||
| 263 | # check whether this task needs executing.. | ||
| 264 | if not data.getVarFlag(task, 'force', d): | ||
| 265 | if stamp_is_current(task, d): | ||
| 266 | return 1 | ||
| 267 | |||
| 268 | # follow digraph path up, then execute our way back down | ||
| 269 | def execute(graph, item): | ||
| 270 | if data.getVarFlag(item, 'task', d): | ||
| 271 | if item in task_cache: | ||
| 272 | return 1 | ||
| 273 | |||
| 274 | if task != item: | ||
| 275 | # deeper than toplevel, exec w/ deps | ||
| 276 | exec_task(item, d) | ||
| 277 | return 1 | ||
| 278 | |||
| 279 | try: | ||
| 280 | debug(1, "Executing task %s" % item) | ||
| 281 | old_overrides = data.getVar('OVERRIDES', d, 0) | ||
| 282 | localdata = data.createCopy(d) | ||
| 283 | data.setVar('OVERRIDES', 'task_%s:%s' % (item, old_overrides), localdata) | ||
| 284 | data.update_data(localdata) | ||
| 285 | event.fire(TaskStarted(item, localdata)) | ||
| 286 | exec_func(item, localdata) | ||
| 287 | event.fire(TaskSucceeded(item, localdata)) | ||
| 288 | task_cache.append(item) | ||
| 289 | data.setVar('_task_cache', task_cache, d) | ||
| 290 | except FuncFailed, reason: | ||
| 291 | note( "Task failed: %s" % reason ) | ||
| 292 | failedevent = TaskFailed(item, d) | ||
| 293 | event.fire(failedevent) | ||
| 294 | raise EventException("Function failed in task: %s" % reason, failedevent) | ||
| 295 | |||
| 296 | # execute | ||
| 297 | task_graph.walkdown(task, execute) | ||
| 298 | |||
| 299 | # make stamp, or cause event and raise exception | ||
| 300 | if not data.getVarFlag(task, 'nostamp', d): | ||
| 301 | mkstamp(task, d) | ||
| 302 | |||
| 303 | |||
| 304 | def stamp_is_current(task, d, checkdeps = 1): | ||
| 305 | """Check status of a given task's stamp. returns 0 if it is not current and needs updating.""" | ||
| 306 | task_graph = data.getVar('_task_graph', d) | ||
| 307 | if not task_graph: | ||
| 308 | task_graph = bb.digraph() | ||
| 309 | data.setVar('_task_graph', task_graph, d) | ||
| 310 | stamp = data.getVar('STAMP', d) | ||
| 311 | if not stamp: | ||
| 312 | return 0 | ||
| 313 | stampfile = "%s.%s" % (data.expand(stamp, d), task) | ||
| 314 | if not os.access(stampfile, os.F_OK): | ||
| 315 | return 0 | ||
| 316 | |||
| 317 | if checkdeps == 0: | ||
| 318 | return 1 | ||
| 319 | |||
| 320 | import stat | ||
| 321 | tasktime = os.stat(stampfile)[stat.ST_MTIME] | ||
| 322 | |||
| 323 | _deps = [] | ||
| 324 | def checkStamp(graph, task): | ||
| 325 | # check for existance | ||
| 326 | if data.getVarFlag(task, 'nostamp', d): | ||
| 327 | return 1 | ||
| 328 | |||
| 329 | if not stamp_is_current(task, d, 0): | ||
| 330 | return 0 | ||
| 331 | |||
| 332 | depfile = "%s.%s" % (data.expand(stamp, d), task) | ||
| 333 | deptime = os.stat(depfile)[stat.ST_MTIME] | ||
| 334 | if deptime > tasktime: | ||
| 335 | return 0 | ||
| 336 | return 1 | ||
| 337 | |||
| 338 | return task_graph.walkdown(task, checkStamp) | ||
| 339 | |||
| 340 | |||
| 341 | def md5_is_current(task): | ||
| 342 | """Check if a md5 file for a given task is current""" | ||
| 343 | |||
| 344 | |||
| 345 | def mkstamp(task, d): | ||
| 346 | """Creates/updates a stamp for a given task""" | ||
| 347 | stamp = data.getVar('STAMP', d) | ||
| 348 | if not stamp: | ||
| 349 | return | ||
| 350 | stamp = "%s.%s" % (data.expand(stamp, d), task) | ||
| 351 | mkdirhier(os.path.dirname(stamp)) | ||
| 352 | open(stamp, "w+") | ||
| 353 | |||
| 354 | |||
| 355 | def add_task(task, deps, d): | ||
| 356 | task_graph = data.getVar('_task_graph', d) | ||
| 357 | if not task_graph: | ||
| 358 | task_graph = bb.digraph() | ||
| 359 | data.setVarFlag(task, 'task', 1, d) | ||
| 360 | task_graph.addnode(task, None) | ||
| 361 | for dep in deps: | ||
| 362 | if not task_graph.hasnode(dep): | ||
| 363 | task_graph.addnode(dep, None) | ||
| 364 | task_graph.addnode(task, dep) | ||
| 365 | # don't assume holding a reference | ||
| 366 | data.setVar('_task_graph', task_graph, d) | ||
| 367 | |||
| 368 | |||
| 369 | def remove_task(task, kill, d): | ||
| 370 | """Remove an BB 'task'. | ||
| 371 | |||
| 372 | If kill is 1, also remove tasks that depend on this task.""" | ||
| 373 | |||
| 374 | task_graph = data.getVar('_task_graph', d) | ||
| 375 | if not task_graph: | ||
| 376 | task_graph = bb.digraph() | ||
| 377 | if not task_graph.hasnode(task): | ||
| 378 | return | ||
| 379 | |||
| 380 | data.delVarFlag(task, 'task', d) | ||
| 381 | ref = 1 | ||
| 382 | if kill == 1: | ||
| 383 | ref = 2 | ||
| 384 | task_graph.delnode(task, ref) | ||
| 385 | data.setVar('_task_graph', task_graph, d) | ||
| 386 | |||
| 387 | def task_exists(task, d): | ||
| 388 | task_graph = data.getVar('_task_graph', d) | ||
| 389 | if not task_graph: | ||
| 390 | task_graph = bb.digraph() | ||
| 391 | data.setVar('_task_graph', task_graph, d) | ||
| 392 | return task_graph.hasnode(task) | ||
| 393 | |||
| 394 | def get_task_data(): | ||
| 395 | return _task_data | ||
diff --git a/bitbake/lib/bb/build.pyc b/bitbake/lib/bb/build.pyc new file mode 100644 index 0000000000..556a68dc9b --- /dev/null +++ b/bitbake/lib/bb/build.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/data.py b/bitbake/lib/bb/data.py new file mode 100644 index 0000000000..b7d707a920 --- /dev/null +++ b/bitbake/lib/bb/data.py | |||
| @@ -0,0 +1,580 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | """ | ||
| 5 | BitBake 'Data' implementations | ||
| 6 | |||
| 7 | Functions for interacting with the data structure used by the | ||
| 8 | BitBake build tools. | ||
| 9 | |||
| 10 | Copyright (C) 2003, 2004 Chris Larson | ||
| 11 | Copyright (C) 2005 Holger Hans Peter Freyther | ||
| 12 | |||
| 13 | This program is free software; you can redistribute it and/or modify it under | ||
| 14 | the terms of the GNU General Public License as published by the Free Software | ||
| 15 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 16 | version. | ||
| 17 | |||
| 18 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 19 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 20 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 21 | |||
| 22 | You should have received a copy of the GNU General Public License along with | ||
| 23 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 24 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 25 | |||
| 26 | Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 27 | """ | ||
| 28 | |||
| 29 | import sys, os, re, time, types | ||
| 30 | if sys.argv[0][-5:] == "pydoc": | ||
| 31 | path = os.path.dirname(os.path.dirname(sys.argv[1])) | ||
| 32 | else: | ||
| 33 | path = os.path.dirname(os.path.dirname(sys.argv[0])) | ||
| 34 | sys.path.append(path) | ||
| 35 | |||
| 36 | from bb import note, debug, data_smart | ||
| 37 | |||
| 38 | _dict_type = data_smart.DataSmart | ||
| 39 | _dict_p_type = data_smart.DataSmartPackage | ||
| 40 | |||
| 41 | class DataDictFull(dict): | ||
| 42 | """ | ||
| 43 | This implements our Package Data Storage Interface. | ||
| 44 | setDirty is a no op as all items are held in memory | ||
| 45 | """ | ||
| 46 | def setDirty(self, bbfile, data): | ||
| 47 | """ | ||
| 48 | No-Op we assume data was manipulated as some sort of | ||
| 49 | reference | ||
| 50 | """ | ||
| 51 | if not bbfile in self: | ||
| 52 | raise Exception("File %s was not in dictionary before" % bbfile) | ||
| 53 | |||
| 54 | self[bbfile] = data | ||
| 55 | |||
| 56 | class DataDictCache: | ||
| 57 | """ | ||
| 58 | Databacked Dictionary implementation | ||
| 59 | """ | ||
| 60 | def __init__(self, cache_dir, config): | ||
| 61 | self.cache_dir = cache_dir | ||
| 62 | self.files = [] | ||
| 63 | self.dirty = {} | ||
| 64 | self.config = config | ||
| 65 | |||
| 66 | def has_key(self,key): | ||
| 67 | return key in self.files | ||
| 68 | |||
| 69 | def keys(self): | ||
| 70 | return self.files | ||
| 71 | |||
| 72 | def __setitem__(self, key, data): | ||
| 73 | """ | ||
| 74 | Add the key to the list of known files and | ||
| 75 | place the data in the cache? | ||
| 76 | """ | ||
| 77 | if key in self.files: | ||
| 78 | return | ||
| 79 | |||
| 80 | self.files.append(key) | ||
| 81 | |||
| 82 | def __getitem__(self, key): | ||
| 83 | if not key in self.files: | ||
| 84 | return None | ||
| 85 | |||
| 86 | # if it was dirty we will | ||
| 87 | if key in self.dirty: | ||
| 88 | return self.dirty[key] | ||
| 89 | |||
| 90 | # not cached yet | ||
| 91 | return _dict_p_type(self.cache_dir, key,False,self.config) | ||
| 92 | |||
| 93 | def setDirty(self, bbfile, data): | ||
| 94 | """ | ||
| 95 | Only already added items can be declared dirty!!! | ||
| 96 | """ | ||
| 97 | |||
| 98 | if not bbfile in self.files: | ||
| 99 | raise Exception("File %s was not in dictionary before" % bbfile) | ||
| 100 | |||
| 101 | self.dirty[bbfile] = data | ||
| 102 | |||
| 103 | |||
| 104 | |||
| 105 | def init(): | ||
| 106 | return _dict_type() | ||
| 107 | |||
| 108 | def init_db(cache,name,clean,parent = None): | ||
| 109 | return _dict_p_type(cache,name,clean,parent) | ||
| 110 | |||
| 111 | def init_db_mtime(cache,cache_bbfile): | ||
| 112 | return _dict_p_type.mtime(cache,cache_bbfile) | ||
| 113 | |||
| 114 | def pkgdata(use_cache, cache, config = None): | ||
| 115 | """ | ||
| 116 | Return some sort of dictionary to lookup parsed dictionaires | ||
| 117 | """ | ||
| 118 | if use_cache: | ||
| 119 | return DataDictCache(cache, config) | ||
| 120 | return DataDictFull() | ||
| 121 | |||
| 122 | def createCopy(source): | ||
| 123 | """Link the source set to the destination | ||
| 124 | If one does not find the value in the destination set, | ||
| 125 | search will go on to the source set to get the value. | ||
| 126 | Value from source are copy-on-write. i.e. any try to | ||
| 127 | modify one of them will end up putting the modified value | ||
| 128 | in the destination set. | ||
| 129 | """ | ||
| 130 | return source.createCopy() | ||
| 131 | |||
| 132 | def initVar(var, d): | ||
| 133 | """Non-destructive var init for data structure""" | ||
| 134 | d.initVar(var) | ||
| 135 | |||
| 136 | |||
| 137 | def setVar(var, value, d): | ||
| 138 | """Set a variable to a given value | ||
| 139 | |||
| 140 | Example: | ||
| 141 | >>> d = init() | ||
| 142 | >>> setVar('TEST', 'testcontents', d) | ||
| 143 | >>> print getVar('TEST', d) | ||
| 144 | testcontents | ||
| 145 | """ | ||
| 146 | d.setVar(var,value) | ||
| 147 | |||
| 148 | |||
| 149 | def getVar(var, d, exp = 0): | ||
| 150 | """Gets the value of a variable | ||
| 151 | |||
| 152 | Example: | ||
| 153 | >>> d = init() | ||
| 154 | >>> setVar('TEST', 'testcontents', d) | ||
| 155 | >>> print getVar('TEST', d) | ||
| 156 | testcontents | ||
| 157 | """ | ||
| 158 | return d.getVar(var,exp) | ||
| 159 | |||
| 160 | def delVar(var, d): | ||
| 161 | """Removes a variable from the data set | ||
| 162 | |||
| 163 | Example: | ||
| 164 | >>> d = init() | ||
| 165 | >>> setVar('TEST', 'testcontents', d) | ||
| 166 | >>> print getVar('TEST', d) | ||
| 167 | testcontents | ||
| 168 | >>> delVar('TEST', d) | ||
| 169 | >>> print getVar('TEST', d) | ||
| 170 | None | ||
| 171 | """ | ||
| 172 | d.delVar(var) | ||
| 173 | |||
| 174 | def setVarFlag(var, flag, flagvalue, d): | ||
| 175 | """Set a flag for a given variable to a given value | ||
| 176 | |||
| 177 | Example: | ||
| 178 | >>> d = init() | ||
| 179 | >>> setVarFlag('TEST', 'python', 1, d) | ||
| 180 | >>> print getVarFlag('TEST', 'python', d) | ||
| 181 | 1 | ||
| 182 | """ | ||
| 183 | d.setVarFlag(var,flag,flagvalue) | ||
| 184 | |||
| 185 | def getVarFlag(var, flag, d): | ||
| 186 | """Gets given flag from given var | ||
| 187 | |||
| 188 | Example: | ||
| 189 | >>> d = init() | ||
| 190 | >>> setVarFlag('TEST', 'python', 1, d) | ||
| 191 | >>> print getVarFlag('TEST', 'python', d) | ||
| 192 | 1 | ||
| 193 | """ | ||
| 194 | return d.getVarFlag(var,flag) | ||
| 195 | |||
| 196 | def delVarFlag(var, flag, d): | ||
| 197 | """Removes a given flag from the variable's flags | ||
| 198 | |||
| 199 | Example: | ||
| 200 | >>> d = init() | ||
| 201 | >>> setVarFlag('TEST', 'testflag', 1, d) | ||
| 202 | >>> print getVarFlag('TEST', 'testflag', d) | ||
| 203 | 1 | ||
| 204 | >>> delVarFlag('TEST', 'testflag', d) | ||
| 205 | >>> print getVarFlag('TEST', 'testflag', d) | ||
| 206 | None | ||
| 207 | |||
| 208 | """ | ||
| 209 | d.delVarFlag(var,flag) | ||
| 210 | |||
| 211 | def setVarFlags(var, flags, d): | ||
| 212 | """Set the flags for a given variable | ||
| 213 | |||
| 214 | Example: | ||
| 215 | >>> d = init() | ||
| 216 | >>> myflags = {} | ||
| 217 | >>> myflags['test'] = 'blah' | ||
| 218 | >>> setVarFlags('TEST', myflags, d) | ||
| 219 | >>> print getVarFlag('TEST', 'test', d) | ||
| 220 | blah | ||
| 221 | """ | ||
| 222 | d.setVarFlags(var,flags) | ||
| 223 | |||
| 224 | def getVarFlags(var, d): | ||
| 225 | """Gets a variable's flags | ||
| 226 | |||
| 227 | Example: | ||
| 228 | >>> d = init() | ||
| 229 | >>> setVarFlag('TEST', 'test', 'blah', d) | ||
| 230 | >>> print getVarFlags('TEST', d)['test'] | ||
| 231 | blah | ||
| 232 | """ | ||
| 233 | return d.getVarFlags(var) | ||
| 234 | |||
| 235 | def delVarFlags(var, d): | ||
| 236 | """Removes a variable's flags | ||
| 237 | |||
| 238 | Example: | ||
| 239 | >>> data = init() | ||
| 240 | >>> setVarFlag('TEST', 'testflag', 1, data) | ||
| 241 | >>> print getVarFlag('TEST', 'testflag', data) | ||
| 242 | 1 | ||
| 243 | >>> delVarFlags('TEST', data) | ||
| 244 | >>> print getVarFlags('TEST', data) | ||
| 245 | None | ||
| 246 | |||
| 247 | """ | ||
| 248 | d.delVarFlags(var) | ||
| 249 | |||
| 250 | def keys(d): | ||
| 251 | """Return a list of keys in d | ||
| 252 | |||
| 253 | Example: | ||
| 254 | >>> d = init() | ||
| 255 | >>> setVar('TEST', 1, d) | ||
| 256 | >>> setVar('MOO' , 2, d) | ||
| 257 | >>> setVarFlag('TEST', 'test', 1, d) | ||
| 258 | >>> keys(d) | ||
| 259 | ['TEST', 'MOO'] | ||
| 260 | """ | ||
| 261 | return d.keys() | ||
| 262 | |||
| 263 | def getData(d): | ||
| 264 | """Returns the data object used""" | ||
| 265 | return d | ||
| 266 | |||
| 267 | def setData(newData, d): | ||
| 268 | """Sets the data object to the supplied value""" | ||
| 269 | d = newData | ||
| 270 | |||
| 271 | __expand_var_regexp__ = re.compile(r"\${[^{}]+}") | ||
| 272 | __expand_python_regexp__ = re.compile(r"\${@.+?}") | ||
| 273 | |||
| 274 | def expand(s, d, varname = None): | ||
| 275 | """Variable expansion using the data store. | ||
| 276 | |||
| 277 | Example: | ||
| 278 | Standard expansion: | ||
| 279 | >>> d = init() | ||
| 280 | >>> setVar('A', 'sshd', d) | ||
| 281 | >>> print expand('/usr/bin/${A}', d) | ||
| 282 | /usr/bin/sshd | ||
| 283 | |||
| 284 | Python expansion: | ||
| 285 | >>> d = init() | ||
| 286 | >>> print expand('result: ${@37 * 72}', d) | ||
| 287 | result: 2664 | ||
| 288 | |||
| 289 | Shell expansion: | ||
| 290 | >>> d = init() | ||
| 291 | >>> print expand('${TARGET_MOO}', d) | ||
| 292 | ${TARGET_MOO} | ||
| 293 | >>> setVar('TARGET_MOO', 'yupp', d) | ||
| 294 | >>> print expand('${TARGET_MOO}',d) | ||
| 295 | yupp | ||
| 296 | >>> setVar('SRC_URI', 'http://somebug.${TARGET_MOO}', d) | ||
| 297 | >>> delVar('TARGET_MOO', d) | ||
| 298 | >>> print expand('${SRC_URI}', d) | ||
| 299 | http://somebug.${TARGET_MOO} | ||
| 300 | """ | ||
| 301 | def var_sub(match): | ||
| 302 | key = match.group()[2:-1] | ||
| 303 | if varname and key: | ||
| 304 | if varname == key: | ||
| 305 | raise Exception("variable %s references itself!" % varname) | ||
| 306 | var = getVar(key, d, 1) | ||
| 307 | if var is not None: | ||
| 308 | return var | ||
| 309 | else: | ||
| 310 | return match.group() | ||
| 311 | |||
| 312 | def python_sub(match): | ||
| 313 | import bb | ||
| 314 | code = match.group()[3:-1] | ||
| 315 | locals()['d'] = d | ||
| 316 | s = eval(code) | ||
| 317 | if type(s) == types.IntType: s = str(s) | ||
| 318 | return s | ||
| 319 | |||
| 320 | if type(s) is not types.StringType: # sanity check | ||
| 321 | return s | ||
| 322 | |||
| 323 | while s.find('$') != -1: | ||
| 324 | olds = s | ||
| 325 | try: | ||
| 326 | s = __expand_var_regexp__.sub(var_sub, s) | ||
| 327 | s = __expand_python_regexp__.sub(python_sub, s) | ||
| 328 | if s == olds: break | ||
| 329 | if type(s) is not types.StringType: # sanity check | ||
| 330 | import bb | ||
| 331 | bb.error('expansion of %s returned non-string %s' % (olds, s)) | ||
| 332 | except KeyboardInterrupt: | ||
| 333 | raise | ||
| 334 | except: | ||
| 335 | note("%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s)) | ||
| 336 | raise | ||
| 337 | return s | ||
| 338 | |||
| 339 | def expandKeys(alterdata, readdata = None): | ||
| 340 | if readdata == None: | ||
| 341 | readdata = alterdata | ||
| 342 | |||
| 343 | for key in keys(alterdata): | ||
| 344 | ekey = expand(key, readdata) | ||
| 345 | if key == ekey: | ||
| 346 | continue | ||
| 347 | val = getVar(key, alterdata) | ||
| 348 | if val is None: | ||
| 349 | continue | ||
| 350 | # import copy | ||
| 351 | # setVarFlags(ekey, copy.copy(getVarFlags(key, readdata)), alterdata) | ||
| 352 | setVar(ekey, val, alterdata) | ||
| 353 | |||
| 354 | for i in ('_append', '_prepend', '_delete'): | ||
| 355 | dest = getVarFlag(ekey, i, alterdata) or [] | ||
| 356 | src = getVarFlag(key, i, readdata) or [] | ||
| 357 | dest.extend(src) | ||
| 358 | setVarFlag(ekey, i, dest, alterdata) | ||
| 359 | |||
| 360 | delVar(key, alterdata) | ||
| 361 | |||
| 362 | def expandData(alterdata, readdata = None): | ||
| 363 | """For each variable in alterdata, expand it, and update the var contents. | ||
| 364 | Replacements use data from readdata. | ||
| 365 | |||
| 366 | Example: | ||
| 367 | >>> a=init() | ||
| 368 | >>> b=init() | ||
| 369 | >>> setVar("dlmsg", "dl_dir is ${DL_DIR}", a) | ||
| 370 | >>> setVar("DL_DIR", "/path/to/whatever", b) | ||
| 371 | >>> expandData(a, b) | ||
| 372 | >>> print getVar("dlmsg", a) | ||
| 373 | dl_dir is /path/to/whatever | ||
| 374 | """ | ||
| 375 | if readdata == None: | ||
| 376 | readdata = alterdata | ||
| 377 | |||
| 378 | for key in keys(alterdata): | ||
| 379 | val = getVar(key, alterdata) | ||
| 380 | if type(val) is not types.StringType: | ||
| 381 | continue | ||
| 382 | expanded = expand(val, readdata) | ||
| 383 | # print "key is %s, val is %s, expanded is %s" % (key, val, expanded) | ||
| 384 | if val != expanded: | ||
| 385 | setVar(key, expanded, alterdata) | ||
| 386 | |||
| 387 | import os | ||
| 388 | |||
| 389 | def inheritFromOS(d): | ||
| 390 | """Inherit variables from the environment.""" | ||
| 391 | # fakeroot needs to be able to set these | ||
| 392 | non_inherit_vars = [ "LD_LIBRARY_PATH", "LD_PRELOAD" ] | ||
| 393 | for s in os.environ.keys(): | ||
| 394 | if not s in non_inherit_vars: | ||
| 395 | try: | ||
| 396 | setVar(s, os.environ[s], d) | ||
| 397 | setVarFlag(s, 'matchesenv', '1', d) | ||
| 398 | except TypeError: | ||
| 399 | pass | ||
| 400 | |||
| 401 | import sys | ||
| 402 | |||
| 403 | def emit_var(var, o=sys.__stdout__, d = init(), all=False): | ||
| 404 | """Emit a variable to be sourced by a shell.""" | ||
| 405 | if getVarFlag(var, "python", d): | ||
| 406 | return 0 | ||
| 407 | |||
| 408 | try: | ||
| 409 | if all: | ||
| 410 | oval = getVar(var, d, 0) | ||
| 411 | val = getVar(var, d, 1) | ||
| 412 | except KeyboardInterrupt: | ||
| 413 | raise | ||
| 414 | except: | ||
| 415 | excname = str(sys.exc_info()[0]) | ||
| 416 | if excname == "bb.build.FuncFailed": | ||
| 417 | raise | ||
| 418 | o.write('# expansion of %s threw %s\n' % (var, excname)) | ||
| 419 | return 0 | ||
| 420 | |||
| 421 | if all: | ||
| 422 | o.write('# %s=%s\n' % (var, oval)) | ||
| 423 | |||
| 424 | if type(val) is not types.StringType: | ||
| 425 | return 0 | ||
| 426 | |||
| 427 | if getVarFlag(var, 'matchesenv', d): | ||
| 428 | return 0 | ||
| 429 | |||
| 430 | if (var.find("-") != -1 or var.find(".") != -1 or var.find('{') != -1 or var.find('}') != -1 or var.find('+') != -1) and not all: | ||
| 431 | return 0 | ||
| 432 | |||
| 433 | val.rstrip() | ||
| 434 | if not val: | ||
| 435 | return 0 | ||
| 436 | |||
| 437 | if getVarFlag(var, "func", d): | ||
| 438 | # NOTE: should probably check for unbalanced {} within the var | ||
| 439 | o.write("%s() {\n%s\n}\n" % (var, val)) | ||
| 440 | else: | ||
| 441 | if getVarFlag(var, "export", d): | ||
| 442 | o.write('export ') | ||
| 443 | else: | ||
| 444 | if not all: | ||
| 445 | return 0 | ||
| 446 | # if we're going to output this within doublequotes, | ||
| 447 | # to a shell, we need to escape the quotes in the var | ||
| 448 | alter = re.sub('"', '\\"', val.strip()) | ||
| 449 | o.write('%s="%s"\n' % (var, alter)) | ||
| 450 | return 1 | ||
| 451 | |||
| 452 | |||
| 453 | def emit_env(o=sys.__stdout__, d = init(), all=False): | ||
| 454 | """Emits all items in the data store in a format such that it can be sourced by a shell.""" | ||
| 455 | |||
| 456 | env = keys(d) | ||
| 457 | |||
| 458 | for e in env: | ||
| 459 | if getVarFlag(e, "func", d): | ||
| 460 | continue | ||
| 461 | emit_var(e, o, d, all) and o.write('\n') | ||
| 462 | |||
| 463 | for e in env: | ||
| 464 | if not getVarFlag(e, "func", d): | ||
| 465 | continue | ||
| 466 | emit_var(e, o, d) and o.write('\n') | ||
| 467 | |||
| 468 | def update_data(d): | ||
| 469 | """Modifies the environment vars according to local overrides and commands. | ||
| 470 | Examples: | ||
| 471 | Appending to a variable: | ||
| 472 | >>> d = init() | ||
| 473 | >>> setVar('TEST', 'this is a', d) | ||
| 474 | >>> setVar('TEST_append', ' test', d) | ||
| 475 | >>> setVar('TEST_append', ' of the emergency broadcast system.', d) | ||
| 476 | >>> update_data(d) | ||
| 477 | >>> print getVar('TEST', d) | ||
| 478 | this is a test of the emergency broadcast system. | ||
| 479 | |||
| 480 | Prepending to a variable: | ||
| 481 | >>> setVar('TEST', 'virtual/libc', d) | ||
| 482 | >>> setVar('TEST_prepend', 'virtual/tmake ', d) | ||
| 483 | >>> setVar('TEST_prepend', 'virtual/patcher ', d) | ||
| 484 | >>> update_data(d) | ||
| 485 | >>> print getVar('TEST', d) | ||
| 486 | virtual/patcher virtual/tmake virtual/libc | ||
| 487 | |||
| 488 | Overrides: | ||
| 489 | >>> setVar('TEST_arm', 'target', d) | ||
| 490 | >>> setVar('TEST_ramses', 'machine', d) | ||
| 491 | >>> setVar('TEST_local', 'local', d) | ||
| 492 | >>> setVar('OVERRIDES', 'arm', d) | ||
| 493 | |||
| 494 | >>> setVar('TEST', 'original', d) | ||
| 495 | >>> update_data(d) | ||
| 496 | >>> print getVar('TEST', d) | ||
| 497 | target | ||
| 498 | |||
| 499 | >>> setVar('OVERRIDES', 'arm:ramses:local', d) | ||
| 500 | >>> setVar('TEST', 'original', d) | ||
| 501 | >>> update_data(d) | ||
| 502 | >>> print getVar('TEST', d) | ||
| 503 | local | ||
| 504 | """ | ||
| 505 | |||
| 506 | debug(2, "update_data()") | ||
| 507 | |||
| 508 | # can't do delete env[...] while iterating over the dictionary, so remember them | ||
| 509 | dodel = [] | ||
| 510 | overrides = (getVar('OVERRIDES', d, 1) or "").split(':') or [] | ||
| 511 | |||
| 512 | def applyOverrides(var, d): | ||
| 513 | if not overrides: | ||
| 514 | debug(1, "OVERRIDES not defined, nothing to do") | ||
| 515 | return | ||
| 516 | val = getVar(var, d) | ||
| 517 | for o in overrides: | ||
| 518 | if var.endswith("_" + o): | ||
| 519 | l = len(o)+1 | ||
| 520 | name = var[:-l] | ||
| 521 | d[name] = d[var] | ||
| 522 | |||
| 523 | for s in keys(d): | ||
| 524 | applyOverrides(s, d) | ||
| 525 | sval = getVar(s, d) or "" | ||
| 526 | |||
| 527 | # Handle line appends: | ||
| 528 | for (a, o) in getVarFlag(s, '_append', d) or []: | ||
| 529 | # maybe the OVERRIDE was not yet added so keep the append | ||
| 530 | if (o and o in overrides) or not o: | ||
| 531 | delVarFlag(s, '_append', d) | ||
| 532 | if o: | ||
| 533 | if not o in overrides: | ||
| 534 | continue | ||
| 535 | sval+=a | ||
| 536 | setVar(s, sval, d) | ||
| 537 | |||
| 538 | # Handle line prepends | ||
| 539 | for (a, o) in getVarFlag(s, '_prepend', d) or []: | ||
| 540 | # maybe the OVERRIDE was not yet added so keep the append | ||
| 541 | if (o and o in overrides) or not o: | ||
| 542 | delVarFlag(s, '_prepend', d) | ||
| 543 | if o: | ||
| 544 | if not o in overrides: | ||
| 545 | continue | ||
| 546 | sval=a+sval | ||
| 547 | setVar(s, sval, d) | ||
| 548 | |||
| 549 | # Handle line deletions | ||
| 550 | name = s + "_delete" | ||
| 551 | nameval = getVar(name, d) | ||
| 552 | if nameval: | ||
| 553 | sval = getVar(s, d) | ||
| 554 | if sval: | ||
| 555 | new = '' | ||
| 556 | pattern = nameval.replace('\n','').strip() | ||
| 557 | for line in sval.split('\n'): | ||
| 558 | if line.find(pattern) == -1: | ||
| 559 | new = new + '\n' + line | ||
| 560 | setVar(s, new, d) | ||
| 561 | dodel.append(name) | ||
| 562 | |||
| 563 | # delete all environment vars no longer needed | ||
| 564 | for s in dodel: | ||
| 565 | delVar(s, d) | ||
| 566 | |||
| 567 | def inherits_class(klass, d): | ||
| 568 | val = getVar('__inherit_cache', d) or "" | ||
| 569 | if os.path.join('classes', '%s.bbclass' % klass) in val.split(): | ||
| 570 | return True | ||
| 571 | return False | ||
| 572 | |||
| 573 | def _test(): | ||
| 574 | """Start a doctest run on this module""" | ||
| 575 | import doctest | ||
| 576 | from bb import data | ||
| 577 | doctest.testmod(data) | ||
| 578 | |||
| 579 | if __name__ == "__main__": | ||
| 580 | _test() | ||
diff --git a/bitbake/lib/bb/data.pyc b/bitbake/lib/bb/data.pyc new file mode 100644 index 0000000000..4b729e021f --- /dev/null +++ b/bitbake/lib/bb/data.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/data_dict.pyc b/bitbake/lib/bb/data_dict.pyc new file mode 100644 index 0000000000..16c6ea141e --- /dev/null +++ b/bitbake/lib/bb/data_dict.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/data_smart.py b/bitbake/lib/bb/data_smart.py new file mode 100644 index 0000000000..741790502f --- /dev/null +++ b/bitbake/lib/bb/data_smart.py | |||
| @@ -0,0 +1,351 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake Smart Dictionary Implementation | ||
| 5 | |||
| 6 | Functions for interacting with the data structure used by the | ||
| 7 | BitBake build tools. | ||
| 8 | |||
| 9 | Copyright (C) 2003, 2004 Chris Larson | ||
| 10 | Copyright (C) 2004, 2005 Seb Frankengul | ||
| 11 | Copyright (C) 2005 Holger Hans Peter Freyther | ||
| 12 | Copyright (C) 2005 Uli Luckas | ||
| 13 | Copyright (C) 2005 ROAD GmbH | ||
| 14 | |||
| 15 | This program is free software; you can redistribute it and/or modify it under | ||
| 16 | the terms of the GNU General Public License as published by the Free Software | ||
| 17 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 18 | version. | ||
| 19 | |||
| 20 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 21 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 22 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 23 | |||
| 24 | You should have received a copy of the GNU General Public License along with | ||
| 25 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 26 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 27 | |||
| 28 | Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 29 | """ | ||
| 30 | |||
| 31 | import copy, os, re, sys, time, types | ||
| 32 | from bb import note, debug, fatal | ||
| 33 | |||
| 34 | try: | ||
| 35 | import cPickle as pickle | ||
| 36 | except ImportError: | ||
| 37 | import pickle | ||
| 38 | print "NOTE: Importing cPickle failed. Falling back to a very slow implementation." | ||
| 39 | |||
| 40 | |||
| 41 | __setvar_keyword__ = ["_append","_prepend","_delete"] | ||
| 42 | __setvar_regexp__ = re.compile('(?P<base>.*?)(?P<keyword>_append|_prepend|_delete)(_(?P<add>.*))?') | ||
| 43 | __expand_var_regexp__ = re.compile(r"\${[^{}]+}") | ||
| 44 | __expand_python_regexp__ = re.compile(r"\${@.+?}") | ||
| 45 | |||
| 46 | |||
| 47 | class DataSmart: | ||
| 48 | def __init__(self): | ||
| 49 | self.dict = {} | ||
| 50 | |||
| 51 | def expand(self,s, varname): | ||
| 52 | def var_sub(match): | ||
| 53 | key = match.group()[2:-1] | ||
| 54 | if varname and key: | ||
| 55 | if varname == key: | ||
| 56 | raise Exception("variable %s references itself!" % varname) | ||
| 57 | var = self.getVar(key, 1) | ||
| 58 | if var is not None: | ||
| 59 | return var | ||
| 60 | else: | ||
| 61 | return match.group() | ||
| 62 | |||
| 63 | def python_sub(match): | ||
| 64 | import bb | ||
| 65 | code = match.group()[3:-1] | ||
| 66 | locals()['d'] = self | ||
| 67 | s = eval(code) | ||
| 68 | if type(s) == types.IntType: s = str(s) | ||
| 69 | return s | ||
| 70 | |||
| 71 | if type(s) is not types.StringType: # sanity check | ||
| 72 | return s | ||
| 73 | |||
| 74 | while s.find('$') != -1: | ||
| 75 | olds = s | ||
| 76 | try: | ||
| 77 | s = __expand_var_regexp__.sub(var_sub, s) | ||
| 78 | s = __expand_python_regexp__.sub(python_sub, s) | ||
| 79 | if s == olds: break | ||
| 80 | if type(s) is not types.StringType: # sanity check | ||
| 81 | import bb | ||
| 82 | bb.error('expansion of %s returned non-string %s' % (olds, s)) | ||
| 83 | except KeyboardInterrupt: | ||
| 84 | raise | ||
| 85 | except: | ||
| 86 | note("%s:%s while evaluating:\n%s" % (sys.exc_info()[0], sys.exc_info()[1], s)) | ||
| 87 | raise | ||
| 88 | return s | ||
| 89 | |||
| 90 | def initVar(self, var): | ||
| 91 | if not var in self.dict: | ||
| 92 | self.dict[var] = {} | ||
| 93 | |||
| 94 | def pickle_prep(self, cfg): | ||
| 95 | if "_data" in self.dict: | ||
| 96 | if self.dict["_data"] == cfg: | ||
| 97 | self.dict["_data"] = "cfg"; | ||
| 98 | else: # this is an unknown array for the moment | ||
| 99 | pass | ||
| 100 | |||
| 101 | def unpickle_prep(self, cfg): | ||
| 102 | if "_data" in self.dict: | ||
| 103 | if self.dict["_data"] == "cfg": | ||
| 104 | self.dict["_data"] = cfg; | ||
| 105 | |||
| 106 | def _findVar(self,var): | ||
| 107 | _dest = self.dict | ||
| 108 | |||
| 109 | while (_dest and var not in _dest): | ||
| 110 | if not "_data" in _dest: | ||
| 111 | _dest = None | ||
| 112 | break | ||
| 113 | _dest = _dest["_data"] | ||
| 114 | |||
| 115 | if _dest and var in _dest: | ||
| 116 | return _dest[var] | ||
| 117 | return None | ||
| 118 | |||
| 119 | def _copyVar(self,var,name): | ||
| 120 | local_var = self._findVar(var) | ||
| 121 | if local_var: | ||
| 122 | self.dict[name] = copy.copy(local_var) | ||
| 123 | else: | ||
| 124 | debug(1,"Warning, _copyVar %s to %s, %s does not exists" % (var,name,var)) | ||
| 125 | |||
| 126 | |||
| 127 | def _makeShadowCopy(self, var): | ||
| 128 | if var in self.dict: | ||
| 129 | return | ||
| 130 | |||
| 131 | local_var = self._findVar(var) | ||
| 132 | |||
| 133 | if local_var: | ||
| 134 | self.dict[var] = copy.copy(local_var) | ||
| 135 | else: | ||
| 136 | self.initVar(var) | ||
| 137 | |||
| 138 | def setVar(self,var,value): | ||
| 139 | match = __setvar_regexp__.match(var) | ||
| 140 | if match and match.group("keyword") in __setvar_keyword__: | ||
| 141 | base = match.group('base') | ||
| 142 | keyword = match.group("keyword") | ||
| 143 | override = match.group('add') | ||
| 144 | l = self.getVarFlag(base, keyword) or [] | ||
| 145 | if override == 'delete': | ||
| 146 | if l.count([value, None]): | ||
| 147 | del l[l.index([value, None])] | ||
| 148 | l.append([value, override]) | ||
| 149 | self.setVarFlag(base, match.group("keyword"), l) | ||
| 150 | return | ||
| 151 | |||
| 152 | if not var in self.dict: | ||
| 153 | self._makeShadowCopy(var) | ||
| 154 | if self.getVarFlag(var, 'matchesenv'): | ||
| 155 | self.delVarFlag(var, 'matchesenv') | ||
| 156 | self.setVarFlag(var, 'export', 1) | ||
| 157 | |||
| 158 | # setting var | ||
| 159 | self.dict[var]["content"] = value | ||
| 160 | |||
| 161 | def getVar(self,var,exp): | ||
| 162 | value = self.getVarFlag(var,"content") | ||
| 163 | |||
| 164 | if exp and value: | ||
| 165 | return self.expand(value,var) | ||
| 166 | return value | ||
| 167 | |||
| 168 | def delVar(self,var): | ||
| 169 | self.dict[var] = {} | ||
| 170 | |||
| 171 | def setVarFlag(self,var,flag,flagvalue): | ||
| 172 | if not var in self.dict: | ||
| 173 | self._makeShadowCopy(var) | ||
| 174 | self.dict[var][flag] = flagvalue | ||
| 175 | |||
| 176 | def getVarFlag(self,var,flag): | ||
| 177 | local_var = self._findVar(var) | ||
| 178 | if local_var: | ||
| 179 | if flag in local_var: | ||
| 180 | return copy.copy(local_var[flag]) | ||
| 181 | return None | ||
| 182 | |||
| 183 | def delVarFlag(self,var,flag): | ||
| 184 | local_var = self._findVar(var) | ||
| 185 | if not local_var: | ||
| 186 | return | ||
| 187 | if not var in self.dict: | ||
| 188 | self._makeShadowCopy(var) | ||
| 189 | |||
| 190 | if var in self.dict and flag in self.dict[var]: | ||
| 191 | del self.dict[var][flag] | ||
| 192 | |||
| 193 | def setVarFlags(self,var,flags): | ||
| 194 | if not var in self.dict: | ||
| 195 | self._makeShadowCopy(var) | ||
| 196 | |||
| 197 | for i in flags.keys(): | ||
| 198 | if i == "content": | ||
| 199 | continue | ||
| 200 | self.dict[var][i] = flags[i] | ||
| 201 | |||
| 202 | def getVarFlags(self,var): | ||
| 203 | local_var = self._findVar(var) | ||
| 204 | flags = {} | ||
| 205 | |||
| 206 | if local_var: | ||
| 207 | for i in self.dict[var].keys(): | ||
| 208 | if i == "content": | ||
| 209 | continue | ||
| 210 | flags[i] = self.dict[var][i] | ||
| 211 | |||
| 212 | if len(flags) == 0: | ||
| 213 | return None | ||
| 214 | return flags | ||
| 215 | |||
| 216 | |||
| 217 | def delVarFlags(self,var): | ||
| 218 | if not var in self.dict: | ||
| 219 | self._makeShadowCopy(var) | ||
| 220 | |||
| 221 | if var in self.dict: | ||
| 222 | content = None | ||
| 223 | |||
| 224 | # try to save the content | ||
| 225 | if "content" in self.dict[var]: | ||
| 226 | content = self.dict[var]["content"] | ||
| 227 | self.dict[var] = {} | ||
| 228 | self.dict[var]["content"] = content | ||
| 229 | else: | ||
| 230 | del self.dict[var] | ||
| 231 | |||
| 232 | |||
| 233 | def createCopy(self): | ||
| 234 | """ | ||
| 235 | Create a copy of self by setting _data to self | ||
| 236 | """ | ||
| 237 | # we really want this to be a DataSmart... | ||
| 238 | data = DataSmart() | ||
| 239 | data.dict["_data"] = self.dict | ||
| 240 | |||
| 241 | return data | ||
| 242 | |||
| 243 | # Dictionary Methods | ||
| 244 | def keys(self): | ||
| 245 | def _keys(d, mykey): | ||
| 246 | if "_data" in d: | ||
| 247 | _keys(d["_data"],mykey) | ||
| 248 | |||
| 249 | for key in d.keys(): | ||
| 250 | if key != "_data": | ||
| 251 | mykey[key] = None | ||
| 252 | keytab = {} | ||
| 253 | _keys(self.dict,keytab) | ||
| 254 | return keytab.keys() | ||
| 255 | |||
| 256 | def __getitem__(self,item): | ||
| 257 | start = self.dict | ||
| 258 | while start: | ||
| 259 | if item in start: | ||
| 260 | return start[item] | ||
| 261 | elif "_data" in start: | ||
| 262 | start = start["_data"] | ||
| 263 | else: | ||
| 264 | start = None | ||
| 265 | return None | ||
| 266 | |||
| 267 | def __setitem__(self,var,data): | ||
| 268 | self._makeShadowCopy(var) | ||
| 269 | self.dict[var] = data | ||
| 270 | |||
| 271 | |||
| 272 | class DataSmartPackage(DataSmart): | ||
| 273 | """ | ||
| 274 | Persistent Data Storage | ||
| 275 | """ | ||
| 276 | def sanitize_filename(bbfile): | ||
| 277 | return bbfile.replace( '/', '_' ) | ||
| 278 | sanitize_filename = staticmethod(sanitize_filename) | ||
| 279 | |||
| 280 | def unpickle(self): | ||
| 281 | """ | ||
| 282 | Restore the dict from memory | ||
| 283 | """ | ||
| 284 | cache_bbfile = self.sanitize_filename(self.bbfile) | ||
| 285 | p = pickle.Unpickler( file("%s/%s"%(self.cache,cache_bbfile),"rb")) | ||
| 286 | self.dict = p.load() | ||
| 287 | self.unpickle_prep() | ||
| 288 | funcstr = self.getVar('__functions__', 0) | ||
| 289 | if funcstr: | ||
| 290 | comp = compile(funcstr, "<pickled>", "exec") | ||
| 291 | exec comp in __builtins__ | ||
| 292 | |||
| 293 | def linkDataSet(self): | ||
| 294 | if not self.parent == None: | ||
| 295 | # assume parent is a DataSmartInstance | ||
| 296 | self.dict["_data"] = self.parent.dict | ||
| 297 | |||
| 298 | |||
| 299 | def __init__(self,cache,name,clean,parent): | ||
| 300 | """ | ||
| 301 | Construct a persistent data instance | ||
| 302 | """ | ||
| 303 | #Initialize the dictionary | ||
| 304 | DataSmart.__init__(self) | ||
| 305 | |||
| 306 | self.cache = cache | ||
| 307 | self.bbfile = os.path.abspath( name ) | ||
| 308 | self.parent = parent | ||
| 309 | |||
| 310 | # Either unpickle the data or do copy on write | ||
| 311 | if clean: | ||
| 312 | self.linkDataSet() | ||
| 313 | else: | ||
| 314 | self.unpickle() | ||
| 315 | |||
| 316 | def commit(self, mtime): | ||
| 317 | """ | ||
| 318 | Save the package to a permanent storage | ||
| 319 | """ | ||
| 320 | self.pickle_prep() | ||
| 321 | |||
| 322 | cache_bbfile = self.sanitize_filename(self.bbfile) | ||
| 323 | p = pickle.Pickler(file("%s/%s" %(self.cache,cache_bbfile), "wb" ), -1 ) | ||
| 324 | p.dump( self.dict ) | ||
| 325 | |||
| 326 | self.unpickle_prep() | ||
| 327 | |||
| 328 | def mtime(cache,bbfile): | ||
| 329 | cache_bbfile = DataSmartPackage.sanitize_filename(bbfile) | ||
| 330 | try: | ||
| 331 | return os.stat( "%s/%s" % (cache,cache_bbfile) )[8] | ||
| 332 | except OSError: | ||
| 333 | return 0 | ||
| 334 | mtime = staticmethod(mtime) | ||
| 335 | |||
| 336 | def pickle_prep(self): | ||
| 337 | """ | ||
| 338 | If self.dict contains a _data key and it is a configuration | ||
| 339 | we will remember we had a configuration instance attached | ||
| 340 | """ | ||
| 341 | if "_data" in self.dict: | ||
| 342 | if self.dict["_data"] == self.parent: | ||
| 343 | dest["_data"] = "cfg" | ||
| 344 | |||
| 345 | def unpickle_prep(self): | ||
| 346 | """ | ||
| 347 | If we had a configuration instance attached, we will reattach it | ||
| 348 | """ | ||
| 349 | if "_data" in self.dict: | ||
| 350 | if self.dict["_data"] == "cfg": | ||
| 351 | self.dict["_data"] = self.parent | ||
diff --git a/bitbake/lib/bb/data_smart.pyc b/bitbake/lib/bb/data_smart.pyc new file mode 100644 index 0000000000..0f6996a301 --- /dev/null +++ b/bitbake/lib/bb/data_smart.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/event.py b/bitbake/lib/bb/event.py new file mode 100644 index 0000000000..c4e88fa35d --- /dev/null +++ b/bitbake/lib/bb/event.py | |||
| @@ -0,0 +1,210 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | """ | ||
| 5 | BitBake 'Event' implementation | ||
| 6 | |||
| 7 | Classes and functions for manipulating 'events' in the | ||
| 8 | BitBake build tools. | ||
| 9 | |||
| 10 | Copyright (C) 2003, 2004 Chris Larson | ||
| 11 | |||
| 12 | This program is free software; you can redistribute it and/or modify it under | ||
| 13 | the terms of the GNU General Public License as published by the Free Software | ||
| 14 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 15 | version. | ||
| 16 | |||
| 17 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 18 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 19 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 20 | |||
| 21 | You should have received a copy of the GNU General Public License along with | ||
| 22 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 23 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 24 | """ | ||
| 25 | |||
| 26 | import os, re | ||
| 27 | import bb.data | ||
| 28 | |||
| 29 | class Event: | ||
| 30 | """Base class for events""" | ||
| 31 | type = "Event" | ||
| 32 | |||
| 33 | def __init__(self, d = bb.data.init()): | ||
| 34 | self._data = d | ||
| 35 | |||
| 36 | def getData(self): | ||
| 37 | return self._data | ||
| 38 | |||
| 39 | def setData(self, data): | ||
| 40 | self._data = data | ||
| 41 | |||
| 42 | data = property(getData, setData, None, "data property") | ||
| 43 | |||
| 44 | NotHandled = 0 | ||
| 45 | Handled = 1 | ||
| 46 | handlers = [] | ||
| 47 | |||
| 48 | def tmpHandler(event): | ||
| 49 | """Default handler for code events""" | ||
| 50 | return NotHandled | ||
| 51 | |||
| 52 | def defaultTmpHandler(): | ||
| 53 | tmp = "def tmpHandler(e):\n\t\"\"\"heh\"\"\"\n\treturn 0" | ||
| 54 | comp = compile(tmp, "tmpHandler(e)", "exec") | ||
| 55 | return comp | ||
| 56 | |||
| 57 | def fire(event): | ||
| 58 | """Fire off an Event""" | ||
| 59 | for h in handlers: | ||
| 60 | if type(h).__name__ == "code": | ||
| 61 | exec(h) | ||
| 62 | if tmpHandler(event) == Handled: | ||
| 63 | return Handled | ||
| 64 | else: | ||
| 65 | if h(event) == Handled: | ||
| 66 | return Handled | ||
| 67 | return NotHandled | ||
| 68 | |||
| 69 | def register(handler): | ||
| 70 | """Register an Event handler""" | ||
| 71 | if handler is not None: | ||
| 72 | # handle string containing python code | ||
| 73 | if type(handler).__name__ == "str": | ||
| 74 | return registerCode(handler) | ||
| 75 | # prevent duplicate registration | ||
| 76 | if not handler in handlers: | ||
| 77 | handlers.append(handler) | ||
| 78 | |||
| 79 | def registerCode(handlerStr): | ||
| 80 | """Register a 'code' Event. | ||
| 81 | Deprecated interface; call register instead. | ||
| 82 | |||
| 83 | Expects to be passed python code as a string, which will | ||
| 84 | be passed in turn to compile() and then exec(). Note that | ||
| 85 | the code will be within a function, so should have had | ||
| 86 | appropriate tabbing put in place.""" | ||
| 87 | tmp = "def tmpHandler(e):\n%s" % handlerStr | ||
| 88 | comp = compile(tmp, "tmpHandler(e)", "exec") | ||
| 89 | # prevent duplicate registration | ||
| 90 | if not comp in handlers: | ||
| 91 | handlers.append(comp) | ||
| 92 | |||
| 93 | def remove(handler): | ||
| 94 | """Remove an Event handler""" | ||
| 95 | for h in handlers: | ||
| 96 | if type(handler).__name__ == "str": | ||
| 97 | return removeCode(handler) | ||
| 98 | |||
| 99 | if handler is h: | ||
| 100 | handlers.remove(handler) | ||
| 101 | |||
| 102 | def removeCode(handlerStr): | ||
| 103 | """Remove a 'code' Event handler | ||
| 104 | Deprecated interface; call remove instead.""" | ||
| 105 | tmp = "def tmpHandler(e):\n%s" % handlerStr | ||
| 106 | comp = compile(tmp, "tmpHandler(e)", "exec") | ||
| 107 | handlers.remove(comp) | ||
| 108 | |||
| 109 | def getName(e): | ||
| 110 | """Returns the name of a class or class instance""" | ||
| 111 | if getattr(e, "__name__", None) == None: | ||
| 112 | return e.__class__.__name__ | ||
| 113 | else: | ||
| 114 | return e.__name__ | ||
| 115 | |||
| 116 | |||
| 117 | class PkgBase(Event): | ||
| 118 | """Base class for package events""" | ||
| 119 | |||
| 120 | def __init__(self, t, d = {}): | ||
| 121 | self._pkg = t | ||
| 122 | Event.__init__(self, d) | ||
| 123 | |||
| 124 | def getPkg(self): | ||
| 125 | return self._pkg | ||
| 126 | |||
| 127 | def setPkg(self, pkg): | ||
| 128 | self._pkg = pkg | ||
| 129 | |||
| 130 | pkg = property(getPkg, setPkg, None, "pkg property") | ||
| 131 | |||
| 132 | |||
| 133 | class BuildBase(Event): | ||
| 134 | """Base class for bbmake run events""" | ||
| 135 | |||
| 136 | def __init__(self, n, p, c): | ||
| 137 | self._name = n | ||
| 138 | self._pkgs = p | ||
| 139 | Event.__init__(self, c) | ||
| 140 | |||
| 141 | def getPkgs(self): | ||
| 142 | return self._pkgs | ||
| 143 | |||
| 144 | def setPkgs(self, pkgs): | ||
| 145 | self._pkgs = pkgs | ||
| 146 | |||
| 147 | def getName(self): | ||
| 148 | return self._name | ||
| 149 | |||
| 150 | def setName(self, name): | ||
| 151 | self._name = name | ||
| 152 | |||
| 153 | def getCfg(self): | ||
| 154 | return self.data | ||
| 155 | |||
| 156 | def setCfg(self, cfg): | ||
| 157 | self.data = cfg | ||
| 158 | |||
| 159 | pkgs = property(getPkgs, setPkgs, None, "pkgs property") | ||
| 160 | name = property(getName, setName, None, "name property") | ||
| 161 | cfg = property(getCfg, setCfg, None, "cfg property") | ||
| 162 | |||
| 163 | |||
| 164 | class DepBase(PkgBase): | ||
| 165 | """Base class for dependency events""" | ||
| 166 | |||
| 167 | def __init__(self, t, data, d): | ||
| 168 | self._dep = d | ||
| 169 | PkgBase.__init__(self, t, data) | ||
| 170 | |||
| 171 | def getDep(self): | ||
| 172 | return self._dep | ||
| 173 | |||
| 174 | def setDep(self, dep): | ||
| 175 | self._dep = dep | ||
| 176 | |||
| 177 | dep = property(getDep, setDep, None, "dep property") | ||
| 178 | |||
| 179 | |||
| 180 | class PkgStarted(PkgBase): | ||
| 181 | """Package build started""" | ||
| 182 | |||
| 183 | |||
| 184 | class PkgFailed(PkgBase): | ||
| 185 | """Package build failed""" | ||
| 186 | |||
| 187 | |||
| 188 | class PkgSucceeded(PkgBase): | ||
| 189 | """Package build completed""" | ||
| 190 | |||
| 191 | |||
| 192 | class BuildStarted(BuildBase): | ||
| 193 | """bbmake build run started""" | ||
| 194 | |||
| 195 | |||
| 196 | class BuildCompleted(BuildBase): | ||
| 197 | """bbmake build run completed""" | ||
| 198 | |||
| 199 | |||
| 200 | class UnsatisfiedDep(DepBase): | ||
| 201 | """Unsatisfied Dependency""" | ||
| 202 | |||
| 203 | |||
| 204 | class RecursiveDep(DepBase): | ||
| 205 | """Recursive Dependency""" | ||
| 206 | |||
| 207 | |||
| 208 | class MultipleProviders(PkgBase): | ||
| 209 | """Multiple Providers""" | ||
| 210 | |||
diff --git a/bitbake/lib/bb/event.pyc b/bitbake/lib/bb/event.pyc new file mode 100644 index 0000000000..b6ed391811 --- /dev/null +++ b/bitbake/lib/bb/event.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/fetch.py b/bitbake/lib/bb/fetch.py new file mode 100644 index 0000000000..982ab51b76 --- /dev/null +++ b/bitbake/lib/bb/fetch.py | |||
| @@ -0,0 +1,656 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | """ | ||
| 5 | BitBake 'Fetch' implementations | ||
| 6 | |||
| 7 | Classes for obtaining upstream sources for the | ||
| 8 | BitBake build tools. | ||
| 9 | |||
| 10 | Copyright (C) 2003, 2004 Chris Larson | ||
| 11 | |||
| 12 | This program is free software; you can redistribute it and/or modify it under | ||
| 13 | the terms of the GNU General Public License as published by the Free Software | ||
| 14 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 15 | version. | ||
| 16 | |||
| 17 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 18 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 19 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 20 | |||
| 21 | You should have received a copy of the GNU General Public License along with | ||
| 22 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 23 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 24 | |||
| 25 | Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 26 | """ | ||
| 27 | |||
| 28 | import os, re | ||
| 29 | import bb | ||
| 30 | from bb import data | ||
| 31 | |||
| 32 | class FetchError(Exception): | ||
| 33 | """Exception raised when a download fails""" | ||
| 34 | |||
| 35 | class NoMethodError(Exception): | ||
| 36 | """Exception raised when there is no method to obtain a supplied url or set of urls""" | ||
| 37 | |||
| 38 | class MissingParameterError(Exception): | ||
| 39 | """Exception raised when a fetch method is missing a critical parameter in the url""" | ||
| 40 | |||
| 41 | class MD5SumError(Exception): | ||
| 42 | """Exception raised when a MD5SUM of a file does not match the expected one""" | ||
| 43 | |||
| 44 | def uri_replace(uri, uri_find, uri_replace, d): | ||
| 45 | # bb.note("uri_replace: operating on %s" % uri) | ||
| 46 | if not uri or not uri_find or not uri_replace: | ||
| 47 | bb.debug(1, "uri_replace: passed an undefined value, not replacing") | ||
| 48 | uri_decoded = list(bb.decodeurl(uri)) | ||
| 49 | uri_find_decoded = list(bb.decodeurl(uri_find)) | ||
| 50 | uri_replace_decoded = list(bb.decodeurl(uri_replace)) | ||
| 51 | result_decoded = ['','','','','',{}] | ||
| 52 | for i in uri_find_decoded: | ||
| 53 | loc = uri_find_decoded.index(i) | ||
| 54 | result_decoded[loc] = uri_decoded[loc] | ||
| 55 | import types | ||
| 56 | if type(i) == types.StringType: | ||
| 57 | import re | ||
| 58 | if (re.match(i, uri_decoded[loc])): | ||
| 59 | result_decoded[loc] = re.sub(i, uri_replace_decoded[loc], uri_decoded[loc]) | ||
| 60 | if uri_find_decoded.index(i) == 2: | ||
| 61 | if d: | ||
| 62 | localfn = bb.fetch.localpath(uri, d) | ||
| 63 | if localfn: | ||
| 64 | result_decoded[loc] = os.path.dirname(result_decoded[loc]) + "/" + os.path.basename(bb.fetch.localpath(uri, d)) | ||
| 65 | # bb.note("uri_replace: matching %s against %s and replacing with %s" % (i, uri_decoded[loc], uri_replace_decoded[loc])) | ||
| 66 | else: | ||
| 67 | # bb.note("uri_replace: no match") | ||
| 68 | return uri | ||
| 69 | # else: | ||
| 70 | # for j in i.keys(): | ||
| 71 | # FIXME: apply replacements against options | ||
| 72 | return bb.encodeurl(result_decoded) | ||
| 73 | |||
| 74 | methods = [] | ||
| 75 | |||
| 76 | def init(urls = [], d = None): | ||
| 77 | if d == None: | ||
| 78 | bb.debug(2,"BUG init called with None as data object!!!") | ||
| 79 | return | ||
| 80 | |||
| 81 | for m in methods: | ||
| 82 | m.urls = [] | ||
| 83 | |||
| 84 | for u in urls: | ||
| 85 | for m in methods: | ||
| 86 | m.data = d | ||
| 87 | if m.supports(u, d): | ||
| 88 | m.urls.append(u) | ||
| 89 | |||
| 90 | def go(d): | ||
| 91 | """Fetch all urls""" | ||
| 92 | for m in methods: | ||
| 93 | if m.urls: | ||
| 94 | m.go(d) | ||
| 95 | |||
| 96 | def localpaths(d): | ||
| 97 | """Return a list of the local filenames, assuming successful fetch""" | ||
| 98 | local = [] | ||
| 99 | for m in methods: | ||
| 100 | for u in m.urls: | ||
| 101 | local.append(m.localpath(u, d)) | ||
| 102 | return local | ||
| 103 | |||
| 104 | def localpath(url, d): | ||
| 105 | for m in methods: | ||
| 106 | if m.supports(url, d): | ||
| 107 | return m.localpath(url, d) | ||
| 108 | return url | ||
| 109 | |||
| 110 | class Fetch(object): | ||
| 111 | """Base class for 'fetch'ing data""" | ||
| 112 | |||
| 113 | def __init__(self, urls = []): | ||
| 114 | self.urls = [] | ||
| 115 | for url in urls: | ||
| 116 | if self.supports(bb.decodeurl(url), d) is 1: | ||
| 117 | self.urls.append(url) | ||
| 118 | |||
| 119 | def supports(url, d): | ||
| 120 | """Check to see if this fetch class supports a given url. | ||
| 121 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
| 122 | """ | ||
| 123 | return 0 | ||
| 124 | supports = staticmethod(supports) | ||
| 125 | |||
| 126 | def localpath(url, d): | ||
| 127 | """Return the local filename of a given url assuming a successful fetch. | ||
| 128 | """ | ||
| 129 | return url | ||
| 130 | localpath = staticmethod(localpath) | ||
| 131 | |||
| 132 | def setUrls(self, urls): | ||
| 133 | self.__urls = urls | ||
| 134 | |||
| 135 | def getUrls(self): | ||
| 136 | return self.__urls | ||
| 137 | |||
| 138 | urls = property(getUrls, setUrls, None, "Urls property") | ||
| 139 | |||
| 140 | def setData(self, data): | ||
| 141 | self.__data = data | ||
| 142 | |||
| 143 | def getData(self): | ||
| 144 | return self.__data | ||
| 145 | |||
| 146 | data = property(getData, setData, None, "Data property") | ||
| 147 | |||
| 148 | def go(self, urls = []): | ||
| 149 | """Fetch urls""" | ||
| 150 | raise NoMethodError("Missing implementation for url") | ||
| 151 | |||
| 152 | class Wget(Fetch): | ||
| 153 | """Class to fetch urls via 'wget'""" | ||
| 154 | def supports(url, d): | ||
| 155 | """Check to see if a given url can be fetched using wget. | ||
| 156 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
| 157 | """ | ||
| 158 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
| 159 | return type in ['http','https','ftp'] | ||
| 160 | supports = staticmethod(supports) | ||
| 161 | |||
| 162 | def localpath(url, d): | ||
| 163 | # strip off parameters | ||
| 164 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
| 165 | if "localpath" in parm: | ||
| 166 | # if user overrides local path, use it. | ||
| 167 | return parm["localpath"] | ||
| 168 | url = bb.encodeurl([type, host, path, user, pswd, {}]) | ||
| 169 | |||
| 170 | return os.path.join(data.getVar("DL_DIR", d), os.path.basename(url)) | ||
| 171 | localpath = staticmethod(localpath) | ||
| 172 | |||
| 173 | def go(self, d, urls = []): | ||
| 174 | """Fetch urls""" | ||
| 175 | |||
| 176 | def md5_sum(basename, data): | ||
| 177 | """ | ||
| 178 | Fast and incomplete OVERRIDE implementation for MD5SUM handling | ||
| 179 | MD5SUM_basename = "SUM" and fallback to MD5SUM_basename | ||
| 180 | """ | ||
| 181 | var = "MD5SUM_%s" % basename | ||
| 182 | return getVar(var, data) or get("MD5SUM",data) | ||
| 183 | |||
| 184 | |||
| 185 | def fetch_uri(uri, basename, dl, md5, parm, d): | ||
| 186 | if os.path.exists(dl): | ||
| 187 | # file exists, but we didnt complete it.. trying again.. | ||
| 188 | fetchcmd = data.getVar("RESUMECOMMAND", d, 1) | ||
| 189 | else: | ||
| 190 | fetchcmd = data.getVar("FETCHCOMMAND", d, 1) | ||
| 191 | |||
| 192 | bb.note("fetch " + uri) | ||
| 193 | fetchcmd = fetchcmd.replace("${URI}", uri) | ||
| 194 | fetchcmd = fetchcmd.replace("${FILE}", basename) | ||
| 195 | bb.debug(2, "executing " + fetchcmd) | ||
| 196 | ret = os.system(fetchcmd) | ||
| 197 | if ret != 0: | ||
| 198 | return False | ||
| 199 | |||
| 200 | # check if sourceforge did send us to the mirror page | ||
| 201 | dl_dir = data.getVar("DL_DIR", d, True) | ||
| 202 | if not os.path.exists(dl): | ||
| 203 | os.system("rm %s*" % dl) # FIXME shell quote it | ||
| 204 | bb.debug(2,"sourceforge.net send us to the mirror on %s" % basename) | ||
| 205 | return False | ||
| 206 | |||
| 207 | # supposedly complete.. write out md5sum | ||
| 208 | if bb.which(data.getVar('PATH', d), 'md5sum'): | ||
| 209 | try: | ||
| 210 | md5pipe = os.popen('md5sum ' + dl) | ||
| 211 | md5data = (md5pipe.readline().split() or [ "" ])[0] | ||
| 212 | md5pipe.close() | ||
| 213 | except OSError: | ||
| 214 | md5data = "" | ||
| 215 | md5out = file(md5, 'w') | ||
| 216 | md5out.write(md5data) | ||
| 217 | md5out.close() | ||
| 218 | else: | ||
| 219 | md5out = file(md5, 'w') | ||
| 220 | md5out.write("") | ||
| 221 | md5out.close() | ||
| 222 | return True | ||
| 223 | |||
| 224 | if not urls: | ||
| 225 | urls = self.urls | ||
| 226 | |||
| 227 | localdata = data.createCopy(d) | ||
| 228 | data.setVar('OVERRIDES', "wget:" + data.getVar('OVERRIDES', localdata), localdata) | ||
| 229 | data.update_data(localdata) | ||
| 230 | |||
| 231 | for uri in urls: | ||
| 232 | completed = 0 | ||
| 233 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(uri, localdata)) | ||
| 234 | basename = os.path.basename(path) | ||
| 235 | dl = self.localpath(uri, d) | ||
| 236 | dl = data.expand(dl, localdata) | ||
| 237 | md5 = dl + '.md5' | ||
| 238 | |||
| 239 | if os.path.exists(md5): | ||
| 240 | # complete, nothing to see here.. | ||
| 241 | continue | ||
| 242 | |||
| 243 | premirrors = [ i.split() for i in (data.getVar('PREMIRRORS', localdata, 1) or "").split('\n') if i ] | ||
| 244 | for (find, replace) in premirrors: | ||
| 245 | newuri = uri_replace(uri, find, replace, d) | ||
| 246 | if newuri != uri: | ||
| 247 | if fetch_uri(newuri, basename, dl, md5, parm, localdata): | ||
| 248 | completed = 1 | ||
| 249 | break | ||
| 250 | |||
| 251 | if completed: | ||
| 252 | continue | ||
| 253 | |||
| 254 | if fetch_uri(uri, basename, dl, md5, parm, localdata): | ||
| 255 | continue | ||
| 256 | |||
| 257 | # try mirrors | ||
| 258 | mirrors = [ i.split() for i in (data.getVar('MIRRORS', localdata, 1) or "").split('\n') if i ] | ||
| 259 | for (find, replace) in mirrors: | ||
| 260 | newuri = uri_replace(uri, find, replace, d) | ||
| 261 | if newuri != uri: | ||
| 262 | if fetch_uri(newuri, basename, dl, md5, parm, localdata): | ||
| 263 | completed = 1 | ||
| 264 | break | ||
| 265 | |||
| 266 | if not completed: | ||
| 267 | raise FetchError(uri) | ||
| 268 | |||
| 269 | del localdata | ||
| 270 | |||
| 271 | |||
| 272 | methods.append(Wget()) | ||
| 273 | |||
| 274 | class Cvs(Fetch): | ||
| 275 | """Class to fetch a module or modules from cvs repositories""" | ||
| 276 | def supports(url, d): | ||
| 277 | """Check to see if a given url can be fetched with cvs. | ||
| 278 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
| 279 | """ | ||
| 280 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
| 281 | return type in ['cvs', 'pserver'] | ||
| 282 | supports = staticmethod(supports) | ||
| 283 | |||
| 284 | def localpath(url, d): | ||
| 285 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
| 286 | if "localpath" in parm: | ||
| 287 | # if user overrides local path, use it. | ||
| 288 | return parm["localpath"] | ||
| 289 | |||
| 290 | if not "module" in parm: | ||
| 291 | raise MissingParameterError("cvs method needs a 'module' parameter") | ||
| 292 | else: | ||
| 293 | module = parm["module"] | ||
| 294 | if 'tag' in parm: | ||
| 295 | tag = parm['tag'] | ||
| 296 | else: | ||
| 297 | tag = "" | ||
| 298 | if 'date' in parm: | ||
| 299 | date = parm['date'] | ||
| 300 | else: | ||
| 301 | if not tag: | ||
| 302 | date = data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
| 303 | else: | ||
| 304 | date = "" | ||
| 305 | |||
| 306 | return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, tag, date), d)) | ||
| 307 | localpath = staticmethod(localpath) | ||
| 308 | |||
| 309 | def go(self, d, urls = []): | ||
| 310 | """Fetch urls""" | ||
| 311 | if not urls: | ||
| 312 | urls = self.urls | ||
| 313 | |||
| 314 | localdata = data.createCopy(d) | ||
| 315 | data.setVar('OVERRIDES', "cvs:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
| 316 | data.update_data(localdata) | ||
| 317 | |||
| 318 | for loc in urls: | ||
| 319 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) | ||
| 320 | if not "module" in parm: | ||
| 321 | raise MissingParameterError("cvs method needs a 'module' parameter") | ||
| 322 | else: | ||
| 323 | module = parm["module"] | ||
| 324 | |||
| 325 | dlfile = self.localpath(loc, localdata) | ||
| 326 | dldir = data.getVar('DL_DIR', localdata, 1) | ||
| 327 | # if local path contains the cvs | ||
| 328 | # module, consider the dir above it to be the | ||
| 329 | # download directory | ||
| 330 | # pos = dlfile.find(module) | ||
| 331 | # if pos: | ||
| 332 | # dldir = dlfile[:pos] | ||
| 333 | # else: | ||
| 334 | # dldir = os.path.dirname(dlfile) | ||
| 335 | |||
| 336 | # setup cvs options | ||
| 337 | options = [] | ||
| 338 | if 'tag' in parm: | ||
| 339 | tag = parm['tag'] | ||
| 340 | else: | ||
| 341 | tag = "" | ||
| 342 | |||
| 343 | if 'date' in parm: | ||
| 344 | date = parm['date'] | ||
| 345 | else: | ||
| 346 | if not tag: | ||
| 347 | date = data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
| 348 | else: | ||
| 349 | date = "" | ||
| 350 | |||
| 351 | if "method" in parm: | ||
| 352 | method = parm["method"] | ||
| 353 | else: | ||
| 354 | method = "pserver" | ||
| 355 | |||
| 356 | if "localdir" in parm: | ||
| 357 | localdir = parm["localdir"] | ||
| 358 | else: | ||
| 359 | localdir = module | ||
| 360 | |||
| 361 | cvs_rsh = None | ||
| 362 | if method == "ext": | ||
| 363 | if "rsh" in parm: | ||
| 364 | cvs_rsh = parm["rsh"] | ||
| 365 | |||
| 366 | tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, tag, date), localdata) | ||
| 367 | data.setVar('TARFILES', dlfile, localdata) | ||
| 368 | data.setVar('TARFN', tarfn, localdata) | ||
| 369 | |||
| 370 | dl = os.path.join(dldir, tarfn) | ||
| 371 | if os.access(dl, os.R_OK): | ||
| 372 | bb.debug(1, "%s already exists, skipping cvs checkout." % tarfn) | ||
| 373 | continue | ||
| 374 | |||
| 375 | pn = data.getVar('PN', d, 1) | ||
| 376 | cvs_tarball_stash = None | ||
| 377 | if pn: | ||
| 378 | cvs_tarball_stash = data.getVar('CVS_TARBALL_STASH_%s' % pn, d, 1) | ||
| 379 | if cvs_tarball_stash == None: | ||
| 380 | cvs_tarball_stash = data.getVar('CVS_TARBALL_STASH', d, 1) | ||
| 381 | if cvs_tarball_stash: | ||
| 382 | fetchcmd = data.getVar("FETCHCOMMAND_wget", d, 1) | ||
| 383 | uri = cvs_tarball_stash + tarfn | ||
| 384 | bb.note("fetch " + uri) | ||
| 385 | fetchcmd = fetchcmd.replace("${URI}", uri) | ||
| 386 | ret = os.system(fetchcmd) | ||
| 387 | if ret == 0: | ||
| 388 | bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) | ||
| 389 | continue | ||
| 390 | |||
| 391 | if date: | ||
| 392 | options.append("-D %s" % date) | ||
| 393 | if tag: | ||
| 394 | options.append("-r %s" % tag) | ||
| 395 | |||
| 396 | olddir = os.path.abspath(os.getcwd()) | ||
| 397 | os.chdir(data.expand(dldir, localdata)) | ||
| 398 | |||
| 399 | # setup cvsroot | ||
| 400 | if method == "dir": | ||
| 401 | cvsroot = path | ||
| 402 | else: | ||
| 403 | cvsroot = ":" + method + ":" + user | ||
| 404 | if pswd: | ||
| 405 | cvsroot += ":" + pswd | ||
| 406 | cvsroot += "@" + host + ":" + path | ||
| 407 | |||
| 408 | data.setVar('CVSROOT', cvsroot, localdata) | ||
| 409 | data.setVar('CVSCOOPTS', " ".join(options), localdata) | ||
| 410 | data.setVar('CVSMODULE', module, localdata) | ||
| 411 | cvscmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
| 412 | cvsupdatecmd = data.getVar('UPDATECOMMAND', localdata, 1) | ||
| 413 | |||
| 414 | if cvs_rsh: | ||
| 415 | cvscmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvscmd) | ||
| 416 | cvsupdatecmd = "CVS_RSH=\"%s\" %s" % (cvs_rsh, cvsupdatecmd) | ||
| 417 | |||
| 418 | # create module directory | ||
| 419 | bb.debug(2, "Fetch: checking for module directory") | ||
| 420 | pkg=data.expand('${PN}', d) | ||
| 421 | pkgdir=os.path.join(data.expand('${CVSDIR}', localdata), pkg) | ||
| 422 | moddir=os.path.join(pkgdir,localdir) | ||
| 423 | if os.access(os.path.join(moddir,'CVS'), os.R_OK): | ||
| 424 | bb.note("Update " + loc) | ||
| 425 | # update sources there | ||
| 426 | os.chdir(moddir) | ||
| 427 | myret = os.system(cvsupdatecmd) | ||
| 428 | else: | ||
| 429 | bb.note("Fetch " + loc) | ||
| 430 | # check out sources there | ||
| 431 | bb.mkdirhier(pkgdir) | ||
| 432 | os.chdir(pkgdir) | ||
| 433 | bb.debug(1, "Running %s" % cvscmd) | ||
| 434 | myret = os.system(cvscmd) | ||
| 435 | |||
| 436 | if myret != 0: | ||
| 437 | try: | ||
| 438 | os.rmdir(moddir) | ||
| 439 | except OSError: | ||
| 440 | pass | ||
| 441 | raise FetchError(module) | ||
| 442 | |||
| 443 | os.chdir(moddir) | ||
| 444 | os.chdir('..') | ||
| 445 | # tar them up to a defined filename | ||
| 446 | myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(moddir))) | ||
| 447 | if myret != 0: | ||
| 448 | try: | ||
| 449 | os.unlink(tarfn) | ||
| 450 | except OSError: | ||
| 451 | pass | ||
| 452 | os.chdir(olddir) | ||
| 453 | del localdata | ||
| 454 | |||
| 455 | methods.append(Cvs()) | ||
| 456 | |||
| 457 | class Bk(Fetch): | ||
| 458 | def supports(url, d): | ||
| 459 | """Check to see if a given url can be fetched via bitkeeper. | ||
| 460 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
| 461 | """ | ||
| 462 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
| 463 | return type in ['bk'] | ||
| 464 | supports = staticmethod(supports) | ||
| 465 | |||
| 466 | methods.append(Bk()) | ||
| 467 | |||
| 468 | class Local(Fetch): | ||
| 469 | def supports(url, d): | ||
| 470 | """Check to see if a given url can be fetched in the local filesystem. | ||
| 471 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
| 472 | """ | ||
| 473 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
| 474 | return type in ['file','patch'] | ||
| 475 | supports = staticmethod(supports) | ||
| 476 | |||
| 477 | def localpath(url, d): | ||
| 478 | """Return the local filename of a given url assuming a successful fetch. | ||
| 479 | """ | ||
| 480 | path = url.split("://")[1] | ||
| 481 | newpath = path | ||
| 482 | if path[0] != "/": | ||
| 483 | filespath = data.getVar('FILESPATH', d, 1) | ||
| 484 | if filespath: | ||
| 485 | newpath = bb.which(filespath, path) | ||
| 486 | if not newpath: | ||
| 487 | filesdir = data.getVar('FILESDIR', d, 1) | ||
| 488 | if filesdir: | ||
| 489 | newpath = os.path.join(filesdir, path) | ||
| 490 | return newpath | ||
| 491 | localpath = staticmethod(localpath) | ||
| 492 | |||
| 493 | def go(self, urls = []): | ||
| 494 | """Fetch urls (no-op for Local method)""" | ||
| 495 | # no need to fetch local files, we'll deal with them in place. | ||
| 496 | return 1 | ||
| 497 | |||
| 498 | methods.append(Local()) | ||
| 499 | |||
| 500 | class Svn(Fetch): | ||
| 501 | """Class to fetch a module or modules from svn repositories""" | ||
| 502 | def supports(url, d): | ||
| 503 | """Check to see if a given url can be fetched with svn. | ||
| 504 | Expects supplied url in list form, as outputted by bb.decodeurl(). | ||
| 505 | """ | ||
| 506 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
| 507 | return type in ['svn'] | ||
| 508 | supports = staticmethod(supports) | ||
| 509 | |||
| 510 | def localpath(url, d): | ||
| 511 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(url, d)) | ||
| 512 | if "localpath" in parm: | ||
| 513 | # if user overrides local path, use it. | ||
| 514 | return parm["localpath"] | ||
| 515 | |||
| 516 | if not "module" in parm: | ||
| 517 | raise MissingParameterError("svn method needs a 'module' parameter") | ||
| 518 | else: | ||
| 519 | module = parm["module"] | ||
| 520 | if 'rev' in parm: | ||
| 521 | revision = parm['rev'] | ||
| 522 | else: | ||
| 523 | revision = "" | ||
| 524 | |||
| 525 | date = data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
| 526 | |||
| 527 | return os.path.join(data.getVar("DL_DIR", d, 1),data.expand('%s_%s_%s_%s.tar.gz' % ( module.replace('/', '.'), host, revision, date), d)) | ||
| 528 | localpath = staticmethod(localpath) | ||
| 529 | |||
| 530 | def go(self, d, urls = []): | ||
| 531 | """Fetch urls""" | ||
| 532 | if not urls: | ||
| 533 | urls = self.urls | ||
| 534 | |||
| 535 | localdata = data.createCopy(d) | ||
| 536 | data.setVar('OVERRIDES', "svn:%s" % data.getVar('OVERRIDES', localdata), localdata) | ||
| 537 | data.update_data(localdata) | ||
| 538 | |||
| 539 | for loc in urls: | ||
| 540 | (type, host, path, user, pswd, parm) = bb.decodeurl(data.expand(loc, localdata)) | ||
| 541 | if not "module" in parm: | ||
| 542 | raise MissingParameterError("svn method needs a 'module' parameter") | ||
| 543 | else: | ||
| 544 | module = parm["module"] | ||
| 545 | |||
| 546 | dlfile = self.localpath(loc, localdata) | ||
| 547 | dldir = data.getVar('DL_DIR', localdata, 1) | ||
| 548 | # if local path contains the svn | ||
| 549 | # module, consider the dir above it to be the | ||
| 550 | # download directory | ||
| 551 | # pos = dlfile.find(module) | ||
| 552 | # if pos: | ||
| 553 | # dldir = dlfile[:pos] | ||
| 554 | # else: | ||
| 555 | # dldir = os.path.dirname(dlfile) | ||
| 556 | |||
| 557 | # setup svn options | ||
| 558 | options = [] | ||
| 559 | if 'rev' in parm: | ||
| 560 | revision = parm['rev'] | ||
| 561 | else: | ||
| 562 | revision = "" | ||
| 563 | |||
| 564 | date = data.getVar("CVSDATE", d, 1) or data.getVar("DATE", d, 1) | ||
| 565 | |||
| 566 | if "method" in parm: | ||
| 567 | method = parm["method"] | ||
| 568 | else: | ||
| 569 | method = "pserver" | ||
| 570 | |||
| 571 | if "proto" in parm: | ||
| 572 | proto = parm["proto"] | ||
| 573 | else: | ||
| 574 | proto = "svn" | ||
| 575 | |||
| 576 | svn_rsh = None | ||
| 577 | if method == "ext": | ||
| 578 | if "rsh" in parm: | ||
| 579 | svn_rsh = parm["rsh"] | ||
| 580 | |||
| 581 | tarfn = data.expand('%s_%s_%s_%s.tar.gz' % (module.replace('/', '.'), host, revision, date), localdata) | ||
| 582 | data.setVar('TARFILES', dlfile, localdata) | ||
| 583 | data.setVar('TARFN', tarfn, localdata) | ||
| 584 | |||
| 585 | dl = os.path.join(dldir, tarfn) | ||
| 586 | if os.access(dl, os.R_OK): | ||
| 587 | bb.debug(1, "%s already exists, skipping svn checkout." % tarfn) | ||
| 588 | continue | ||
| 589 | |||
| 590 | svn_tarball_stash = data.getVar('CVS_TARBALL_STASH', d, 1) | ||
| 591 | if svn_tarball_stash: | ||
| 592 | fetchcmd = data.getVar("FETCHCOMMAND_wget", d, 1) | ||
| 593 | uri = svn_tarball_stash + tarfn | ||
| 594 | bb.note("fetch " + uri) | ||
| 595 | fetchcmd = fetchcmd.replace("${URI}", uri) | ||
| 596 | ret = os.system(fetchcmd) | ||
| 597 | if ret == 0: | ||
| 598 | bb.note("Fetched %s from tarball stash, skipping checkout" % tarfn) | ||
| 599 | continue | ||
| 600 | |||
| 601 | olddir = os.path.abspath(os.getcwd()) | ||
| 602 | os.chdir(data.expand(dldir, localdata)) | ||
| 603 | |||
| 604 | # setup svnroot | ||
| 605 | # svnroot = ":" + method + ":" + user | ||
| 606 | # if pswd: | ||
| 607 | # svnroot += ":" + pswd | ||
| 608 | svnroot = host + path | ||
| 609 | |||
| 610 | data.setVar('SVNROOT', svnroot, localdata) | ||
| 611 | data.setVar('SVNCOOPTS', " ".join(options), localdata) | ||
| 612 | data.setVar('SVNMODULE', module, localdata) | ||
| 613 | svncmd = data.getVar('FETCHCOMMAND', localdata, 1) | ||
| 614 | svncmd = "svn co %s://%s/%s" % (proto, svnroot, module) | ||
| 615 | |||
| 616 | if revision: | ||
| 617 | svncmd = "svn co -r %s %s://%s/%s" % (revision, proto, svnroot, module) | ||
| 618 | if svn_rsh: | ||
| 619 | svncmd = "svn_RSH=\"%s\" %s" % (svn_rsh, svncmd) | ||
| 620 | |||
| 621 | # create temp directory | ||
| 622 | bb.debug(2, "Fetch: creating temporary directory") | ||
| 623 | bb.mkdirhier(data.expand('${WORKDIR}', localdata)) | ||
| 624 | data.setVar('TMPBASE', data.expand('${WORKDIR}/oesvn.XXXXXX', localdata), localdata) | ||
| 625 | tmppipe = os.popen(data.getVar('MKTEMPDIRCMD', localdata, 1) or "false") | ||
| 626 | tmpfile = tmppipe.readline().strip() | ||
| 627 | if not tmpfile: | ||
| 628 | bb.error("Fetch: unable to create temporary directory.. make sure 'mktemp' is in the PATH.") | ||
| 629 | raise FetchError(module) | ||
| 630 | |||
| 631 | # check out sources there | ||
| 632 | os.chdir(tmpfile) | ||
| 633 | bb.note("Fetch " + loc) | ||
| 634 | bb.debug(1, "Running %s" % svncmd) | ||
| 635 | myret = os.system(svncmd) | ||
| 636 | if myret != 0: | ||
| 637 | try: | ||
| 638 | os.rmdir(tmpfile) | ||
| 639 | except OSError: | ||
| 640 | pass | ||
| 641 | raise FetchError(module) | ||
| 642 | |||
| 643 | os.chdir(os.path.join(tmpfile, os.path.dirname(module))) | ||
| 644 | # tar them up to a defined filename | ||
| 645 | myret = os.system("tar -czf %s %s" % (os.path.join(dldir,tarfn), os.path.basename(module))) | ||
| 646 | if myret != 0: | ||
| 647 | try: | ||
| 648 | os.unlink(tarfn) | ||
| 649 | except OSError: | ||
| 650 | pass | ||
| 651 | # cleanup | ||
| 652 | os.system('rm -rf %s' % tmpfile) | ||
| 653 | os.chdir(olddir) | ||
| 654 | del localdata | ||
| 655 | |||
| 656 | methods.append(Svn()) | ||
diff --git a/bitbake/lib/bb/fetch.pyc b/bitbake/lib/bb/fetch.pyc new file mode 100644 index 0000000000..d2e33835b5 --- /dev/null +++ b/bitbake/lib/bb/fetch.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/make.pyc b/bitbake/lib/bb/make.pyc new file mode 100644 index 0000000000..f6fabc07a2 --- /dev/null +++ b/bitbake/lib/bb/make.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/manifest.py b/bitbake/lib/bb/manifest.py new file mode 100644 index 0000000000..30bb454724 --- /dev/null +++ b/bitbake/lib/bb/manifest.py | |||
| @@ -0,0 +1,144 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | # | ||
| 4 | # Copyright (C) 2003, 2004 Chris Larson | ||
| 5 | # | ||
| 6 | # This program is free software; you can redistribute it and/or modify it under | ||
| 7 | # the terms of the GNU General Public License as published by the Free Software | ||
| 8 | # Foundation; either version 2 of the License, or (at your option) any later | ||
| 9 | # version. | ||
| 10 | # | ||
| 11 | # This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 12 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 13 | # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 14 | # | ||
| 15 | # You should have received a copy of the GNU General Public License along with | ||
| 16 | # this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 17 | # Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 18 | |||
| 19 | import os, sys | ||
| 20 | import bb, bb.data | ||
| 21 | |||
| 22 | def getfields(line): | ||
| 23 | fields = {} | ||
| 24 | fieldmap = ( "pkg", "src", "dest", "type", "mode", "uid", "gid", "major", "minor", "start", "inc", "count" ) | ||
| 25 | for f in xrange(len(fieldmap)): | ||
| 26 | fields[fieldmap[f]] = None | ||
| 27 | |||
| 28 | if not line: | ||
| 29 | return None | ||
| 30 | |||
| 31 | splitline = line.split() | ||
| 32 | if not len(splitline): | ||
| 33 | return None | ||
| 34 | |||
| 35 | try: | ||
| 36 | for f in xrange(len(fieldmap)): | ||
| 37 | if splitline[f] == '-': | ||
| 38 | continue | ||
| 39 | fields[fieldmap[f]] = splitline[f] | ||
| 40 | except IndexError: | ||
| 41 | pass | ||
| 42 | return fields | ||
| 43 | |||
| 44 | def parse (mfile, d): | ||
| 45 | manifest = [] | ||
| 46 | while 1: | ||
| 47 | line = mfile.readline() | ||
| 48 | if not line: | ||
| 49 | break | ||
| 50 | if line.startswith("#"): | ||
| 51 | continue | ||
| 52 | fields = getfields(line) | ||
| 53 | if not fields: | ||
| 54 | continue | ||
| 55 | manifest.append(fields) | ||
| 56 | return manifest | ||
| 57 | |||
| 58 | def emit (func, manifest, d): | ||
| 59 | #str = "%s () {\n" % func | ||
| 60 | str = "" | ||
| 61 | for line in manifest: | ||
| 62 | emittedline = emit_line(func, line, d) | ||
| 63 | if not emittedline: | ||
| 64 | continue | ||
| 65 | str += emittedline + "\n" | ||
| 66 | # str += "}\n" | ||
| 67 | return str | ||
| 68 | |||
| 69 | def mangle (func, line, d): | ||
| 70 | import copy | ||
| 71 | newline = copy.copy(line) | ||
| 72 | src = bb.data.expand(newline["src"], d) | ||
| 73 | |||
| 74 | if src: | ||
| 75 | if not os.path.isabs(src): | ||
| 76 | src = "${WORKDIR}/" + src | ||
| 77 | |||
| 78 | dest = newline["dest"] | ||
| 79 | if not dest: | ||
| 80 | return | ||
| 81 | |||
| 82 | if dest.startswith("/"): | ||
| 83 | dest = dest[1:] | ||
| 84 | |||
| 85 | if func is "do_install": | ||
| 86 | dest = "${D}/" + dest | ||
| 87 | |||
| 88 | elif func is "do_populate": | ||
| 89 | dest = "${WORKDIR}/install/" + newline["pkg"] + "/" + dest | ||
| 90 | |||
| 91 | elif func is "do_stage": | ||
| 92 | varmap = {} | ||
| 93 | varmap["${bindir}"] = "${STAGING_DIR}/${HOST_SYS}/bin" | ||
| 94 | varmap["${libdir}"] = "${STAGING_DIR}/${HOST_SYS}/lib" | ||
| 95 | varmap["${includedir}"] = "${STAGING_DIR}/${HOST_SYS}/include" | ||
| 96 | varmap["${datadir}"] = "${STAGING_DATADIR}" | ||
| 97 | |||
| 98 | matched = 0 | ||
| 99 | for key in varmap.keys(): | ||
| 100 | if dest.startswith(key): | ||
| 101 | dest = varmap[key] + "/" + dest[len(key):] | ||
| 102 | matched = 1 | ||
| 103 | if not matched: | ||
| 104 | newline = None | ||
| 105 | return | ||
| 106 | else: | ||
| 107 | newline = None | ||
| 108 | return | ||
| 109 | |||
| 110 | newline["src"] = src | ||
| 111 | newline["dest"] = dest | ||
| 112 | return newline | ||
| 113 | |||
| 114 | def emit_line (func, line, d): | ||
| 115 | import copy | ||
| 116 | newline = copy.deepcopy(line) | ||
| 117 | newline = mangle(func, newline, d) | ||
| 118 | if not newline: | ||
| 119 | return None | ||
| 120 | |||
| 121 | str = "" | ||
| 122 | type = newline["type"] | ||
| 123 | mode = newline["mode"] | ||
| 124 | src = newline["src"] | ||
| 125 | dest = newline["dest"] | ||
| 126 | if type is "d": | ||
| 127 | str = "install -d " | ||
| 128 | if mode: | ||
| 129 | str += "-m %s " % mode | ||
| 130 | str += dest | ||
| 131 | elif type is "f": | ||
| 132 | if not src: | ||
| 133 | return None | ||
| 134 | if dest.endswith("/"): | ||
| 135 | str = "install -d " | ||
| 136 | str += dest + "\n" | ||
| 137 | str += "install " | ||
| 138 | else: | ||
| 139 | str = "install -D " | ||
| 140 | if mode: | ||
| 141 | str += "-m %s " % mode | ||
| 142 | str += src + " " + dest | ||
| 143 | del newline | ||
| 144 | return str | ||
diff --git a/bitbake/lib/bb/manifest.pyc b/bitbake/lib/bb/manifest.pyc new file mode 100644 index 0000000000..02fb18ae4a --- /dev/null +++ b/bitbake/lib/bb/manifest.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/parse/BBHandler.pyc b/bitbake/lib/bb/parse/BBHandler.pyc new file mode 100644 index 0000000000..047a6853a3 --- /dev/null +++ b/bitbake/lib/bb/parse/BBHandler.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/parse/ConfHandler.pyc b/bitbake/lib/bb/parse/ConfHandler.pyc new file mode 100644 index 0000000000..620af52942 --- /dev/null +++ b/bitbake/lib/bb/parse/ConfHandler.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/parse/__init__.py b/bitbake/lib/bb/parse/__init__.py new file mode 100644 index 0000000000..b8839c09fd --- /dev/null +++ b/bitbake/lib/bb/parse/__init__.py | |||
| @@ -0,0 +1,70 @@ | |||
| 1 | """ | ||
| 2 | BitBake Parsers | ||
| 3 | |||
| 4 | File parsers for the BitBake build tools. | ||
| 5 | |||
| 6 | Copyright (C) 2003, 2004 Chris Larson | ||
| 7 | Copyright (C) 2003, 2004 Phil Blundell | ||
| 8 | |||
| 9 | This program is free software; you can redistribute it and/or modify it under | ||
| 10 | the terms of the GNU General Public License as published by the Free Software | ||
| 11 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 12 | version. | ||
| 13 | |||
| 14 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 15 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 16 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 17 | |||
| 18 | You should have received a copy of the GNU General Public License along with | ||
| 19 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 20 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 21 | |||
| 22 | Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 23 | """ | ||
| 24 | |||
| 25 | __all__ = [ 'ParseError', 'SkipPackage', 'cached_mtime', 'mark_dependency', | ||
| 26 | 'supports', 'handle', 'init' ] | ||
| 27 | handlers = [] | ||
| 28 | |||
| 29 | class ParseError(Exception): | ||
| 30 | """Exception raised when parsing fails""" | ||
| 31 | |||
| 32 | class SkipPackage(Exception): | ||
| 33 | """Exception raised to skip this package""" | ||
| 34 | |||
| 35 | __mtime_cache = {} | ||
| 36 | def cached_mtime(f): | ||
| 37 | import os | ||
| 38 | if not __mtime_cache.has_key(f): | ||
| 39 | __mtime_cache[f] = os.stat(f)[8] | ||
| 40 | return __mtime_cache[f] | ||
| 41 | |||
| 42 | def mark_dependency(d, f): | ||
| 43 | import bb, os | ||
| 44 | if f.startswith('./'): | ||
| 45 | f = "%s/%s" % (os.getcwd(), f[2:]) | ||
| 46 | deps = (bb.data.getVar('__depends', d) or "").split() | ||
| 47 | deps.append("%s@%s" % (f, cached_mtime(f))) | ||
| 48 | bb.data.setVar('__depends', " ".join(deps), d) | ||
| 49 | |||
| 50 | def supports(fn, data): | ||
| 51 | """Returns true if we have a handler for this file, false otherwise""" | ||
| 52 | for h in handlers: | ||
| 53 | if h['supports'](fn, data): | ||
| 54 | return 1 | ||
| 55 | return 0 | ||
| 56 | |||
| 57 | def handle(fn, data, include = 0): | ||
| 58 | """Call the handler that is appropriate for this file""" | ||
| 59 | for h in handlers: | ||
| 60 | if h['supports'](fn, data): | ||
| 61 | return h['handle'](fn, data, include) | ||
| 62 | raise ParseError("%s is not a BitBake file" % fn) | ||
| 63 | |||
| 64 | def init(fn, data): | ||
| 65 | for h in handlers: | ||
| 66 | if h['supports'](fn): | ||
| 67 | return h['init'](data) | ||
| 68 | |||
| 69 | |||
| 70 | from parse_py import __version__, ConfHandler, BBHandler | ||
diff --git a/bitbake/lib/bb/parse/__init__.pyc b/bitbake/lib/bb/parse/__init__.pyc new file mode 100644 index 0000000000..a2c2d6ae54 --- /dev/null +++ b/bitbake/lib/bb/parse/__init__.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/parse/parse_c/bitbakeparser.l b/bitbake/lib/bb/parse/parse_c/bitbakeparser.l new file mode 100644 index 0000000000..ee4ce14839 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_c/bitbakeparser.l | |||
| @@ -0,0 +1,288 @@ | |||
| 1 | /* bbf.flex | ||
| 2 | |||
| 3 | written by Marc Singer | ||
| 4 | 6 January 2005 | ||
| 5 | |||
| 6 | This program is free software; you can redistribute it and/or | ||
| 7 | modify it under the terms of the GNU General Public License as | ||
| 8 | published by the Free Software Foundation; either version 2 of the | ||
| 9 | License, or (at your option) any later version. | ||
| 10 | |||
| 11 | This program is distributed in the hope that it will be useful, but | ||
| 12 | WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 14 | General Public License for more details. | ||
| 15 | |||
| 16 | You should have received a copy of the GNU General Public License | ||
| 17 | along with this program; if not, write to the Free Software | ||
| 18 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | ||
| 19 | USA. | ||
| 20 | |||
| 21 | DESCRIPTION | ||
| 22 | ----------- | ||
| 23 | |||
| 24 | flex lexer specification for a BitBake input file parser. | ||
| 25 | |||
| 26 | Unfortunately, flex doesn't welcome comments within the rule sets. | ||
| 27 | I say unfortunately because this lexer is unreasonably complex and | ||
| 28 | comments would make the code much easier to comprehend. | ||
| 29 | |||
| 30 | The BitBake grammar is not regular. In order to interpret all | ||
| 31 | of the available input files, the lexer maintains much state as it | ||
| 32 | parses. There are places where this lexer will emit tokens that | ||
| 33 | are invalid. The parser will tend to catch these. | ||
| 34 | |||
| 35 | The lexer requires C++ at the moment. The only reason for this has | ||
| 36 | to do with a very small amount of managed state. Producing a C | ||
| 37 | lexer should be a reasonably easy task as long as the %reentrant | ||
| 38 | option is used. | ||
| 39 | |||
| 40 | |||
| 41 | NOTES | ||
| 42 | ----- | ||
| 43 | |||
| 44 | o RVALUES. There are three kinds of RVALUES. There are unquoted | ||
| 45 | values, double quote enclosed strings, and single quote | ||
| 46 | strings. Quoted strings may contain unescaped quotes (of either | ||
| 47 | type), *and* any type may span more than one line by using a | ||
| 48 | continuation '\' at the end of the line. This requires us to | ||
| 49 | recognize all types of values with a single expression. | ||
| 50 | Moreover, the only reason to quote a value is to include | ||
| 51 | trailing or leading whitespace. Whitespace within a value is | ||
| 52 | preserved, ugh. | ||
| 53 | |||
| 54 | o CLASSES. C_ patterns define classes. Classes ought not include | ||
| 55 | a repitition operator, instead letting the reference to the class | ||
| 56 | define the repitition count. | ||
| 57 | |||
| 58 | C_SS - symbol start | ||
| 59 | C_SB - symbol body | ||
| 60 | C_SP - whitespace | ||
| 61 | |||
| 62 | */ | ||
| 63 | |||
| 64 | %option never-interactive | ||
| 65 | %option yylineno | ||
| 66 | %option noyywrap | ||
| 67 | %option reentrant stack | ||
| 68 | |||
| 69 | |||
| 70 | %{ | ||
| 71 | |||
| 72 | #include "token.h" | ||
| 73 | #include "lexer.h" | ||
| 74 | #include <ctype.h> | ||
| 75 | |||
| 76 | extern void *bbparseAlloc(void *(*mallocProc)(size_t)); | ||
| 77 | extern void bbparseFree(void *p, void (*freeProc)(void*)); | ||
| 78 | extern void *bbparseAlloc(void *(*mallocProc)(size_t)); | ||
| 79 | extern void *bbparse(void*, int, token_t, lex_t*); | ||
| 80 | extern void bbparseTrace(FILE *TraceFILE, char *zTracePrompt); | ||
| 81 | |||
| 82 | //static const char* rgbInput; | ||
| 83 | //static size_t cbInput; | ||
| 84 | |||
| 85 | |||
| 86 | int lineError; | ||
| 87 | int errorParse; | ||
| 88 | |||
| 89 | enum { | ||
| 90 | errorNone = 0, | ||
| 91 | errorUnexpectedInput, | ||
| 92 | errorUnsupportedFeature, | ||
| 93 | }; | ||
| 94 | |||
| 95 | #define YY_EXTRA_TYPE lex_t* | ||
| 96 | |||
| 97 | /* Read from buffer */ | ||
| 98 | #define YY_INPUT(buf,result,max_size) \ | ||
| 99 | { yyextra->input(buf, &result, max_size); } | ||
| 100 | |||
| 101 | //#define YY_DECL static size_t yylex () | ||
| 102 | |||
| 103 | #define ERROR(e) \ | ||
| 104 | do { lineError = yylineno; errorParse = e; yyterminate (); } while (0) | ||
| 105 | |||
| 106 | static const char* fixup_escapes (const char* sz); | ||
| 107 | |||
| 108 | %} | ||
| 109 | |||
| 110 | |||
| 111 | C_SP [ \t] | ||
| 112 | COMMENT #.*\n | ||
| 113 | OP_ASSIGN "=" | ||
| 114 | OP_IMMEDIATE ":=" | ||
| 115 | OP_PREPEND "=+" | ||
| 116 | OP_APPEND "+=" | ||
| 117 | OP_COND "?=" | ||
| 118 | B_OPEN "{" | ||
| 119 | B_CLOSE "}" | ||
| 120 | |||
| 121 | K_ADDTASK "addtask" | ||
| 122 | K_ADDHANDLER "addhandler" | ||
| 123 | K_AFTER "after" | ||
| 124 | K_BEFORE "before" | ||
| 125 | K_DEF "def" | ||
| 126 | K_INCLUDE "include" | ||
| 127 | K_INHERIT "inherit" | ||
| 128 | K_PYTHON "python" | ||
| 129 | K_FAKEROOT "fakeroot" | ||
| 130 | K_EXPORT "export" | ||
| 131 | K_EXPORT_FUNC "EXPORT_FUNCTIONS" | ||
| 132 | |||
| 133 | STRING \"([^\n\r]|"\\\n")*\" | ||
| 134 | SSTRING \'([^\n\r]|"\\\n")*\' | ||
| 135 | VALUE ([^'" \t\n])|([^'" \t\n]([^\n]|(\\\n))*[^'" \t\n]) | ||
| 136 | |||
| 137 | C_SS [a-zA-Z_] | ||
| 138 | C_SB [a-zA-Z0-9_+-.] | ||
| 139 | REF $\{{C_SS}{C_SB}*\} | ||
| 140 | SYMBOL {C_SS}{C_SB}* | ||
| 141 | VARIABLE $?{C_SS}({C_SB}*|{REF})*(\[[a-zA-Z0-9_]*\])? | ||
| 142 | FILENAME ([a-zA-Z_./]|{REF})(([-+a-zA-Z0-9_./]*)|{REF})* | ||
| 143 | |||
| 144 | PROC \({C_SP}*\) | ||
| 145 | |||
| 146 | %s S_DEF | ||
| 147 | %s S_DEF_ARGS | ||
| 148 | %s S_DEF_BODY | ||
| 149 | %s S_FUNC | ||
| 150 | %s S_INCLUDE | ||
| 151 | %s S_INHERIT | ||
| 152 | %s S_PROC | ||
| 153 | %s S_RVALUE | ||
| 154 | %s S_TASK | ||
| 155 | |||
| 156 | %% | ||
| 157 | |||
| 158 | {OP_APPEND} { BEGIN S_RVALUE; | ||
| 159 | yyextra->accept (T_OP_APPEND); } | ||
| 160 | {OP_PREPEND} { BEGIN S_RVALUE; | ||
| 161 | yyextra->accept (T_OP_PREPEND); } | ||
| 162 | {OP_IMMEDIATE} { BEGIN S_RVALUE; | ||
| 163 | yyextra->accept (T_OP_IMMEDIATE); } | ||
| 164 | {OP_ASSIGN} { BEGIN S_RVALUE; | ||
| 165 | yyextra->accept (T_OP_ASSIGN); } | ||
| 166 | {OP_COND} { BEGIN S_RVALUE; | ||
| 167 | yyextra->accept (T_OP_COND); } | ||
| 168 | |||
| 169 | <S_RVALUE>\\\n{C_SP}* { } | ||
| 170 | <S_RVALUE>{STRING} { BEGIN INITIAL; | ||
| 171 | size_t cb = yyleng; | ||
| 172 | while (cb && isspace (yytext[cb - 1])) | ||
| 173 | --cb; | ||
| 174 | yytext[cb - 1] = 0; | ||
| 175 | yyextra->accept (T_STRING, yytext + 1); } | ||
| 176 | <S_RVALUE>{SSTRING} { BEGIN INITIAL; | ||
| 177 | size_t cb = yyleng; | ||
| 178 | while (cb && isspace (yytext[cb - 1])) | ||
| 179 | --cb; | ||
| 180 | yytext[cb - 1] = 0; | ||
| 181 | yyextra->accept (T_STRING, yytext + 1); } | ||
| 182 | |||
| 183 | <S_RVALUE>{VALUE} { ERROR (errorUnexpectedInput); } | ||
| 184 | <S_RVALUE>{C_SP}*\n+ { BEGIN INITIAL; | ||
| 185 | yyextra->accept (T_STRING, NULL); } | ||
| 186 | |||
| 187 | {K_INCLUDE} { BEGIN S_INCLUDE; | ||
| 188 | yyextra->accept (T_INCLUDE); } | ||
| 189 | {K_INHERIT} { BEGIN S_INHERIT; | ||
| 190 | yyextra->accept (T_INHERIT); } | ||
| 191 | {K_ADDTASK} { BEGIN S_TASK; | ||
| 192 | yyextra->accept (T_ADDTASK); } | ||
| 193 | {K_ADDHANDLER} { yyextra->accept (T_ADDHANDLER); } | ||
| 194 | {K_EXPORT_FUNC} { BEGIN S_FUNC; | ||
| 195 | yyextra->accept (T_EXPORT_FUNC); } | ||
| 196 | <S_TASK>{K_BEFORE} { yyextra->accept (T_BEFORE); } | ||
| 197 | <S_TASK>{K_AFTER} { yyextra->accept (T_AFTER); } | ||
| 198 | <INITIAL>{K_EXPORT} { yyextra->accept (T_EXPORT); } | ||
| 199 | |||
| 200 | <INITIAL>{K_FAKEROOT} { yyextra->accept (T_FAKEROOT); } | ||
| 201 | <INITIAL>{K_PYTHON} { yyextra->accept (T_PYTHON); } | ||
| 202 | {PROC}{C_SP}*{B_OPEN}{C_SP}*\n* { BEGIN S_PROC; | ||
| 203 | yyextra->accept (T_PROC_OPEN); } | ||
| 204 | <S_PROC>{B_CLOSE}{C_SP}*\n* { BEGIN INITIAL; | ||
| 205 | yyextra->accept (T_PROC_CLOSE); } | ||
| 206 | <S_PROC>([^}][^\n]*)?\n* { yyextra->accept (T_PROC_BODY, yytext); } | ||
| 207 | |||
| 208 | {K_DEF} { BEGIN S_DEF; } | ||
| 209 | <S_DEF>{SYMBOL} { BEGIN S_DEF_ARGS; | ||
| 210 | yyextra->accept (T_SYMBOL, yytext); } | ||
| 211 | <S_DEF_ARGS>[^\n:]*: { yyextra->accept (T_DEF_ARGS, yytext); } | ||
| 212 | <S_DEF_ARGS>{C_SP}*\n { BEGIN S_DEF_BODY; } | ||
| 213 | <S_DEF_BODY>{C_SP}+[^\n]*\n { yyextra->accept (T_DEF_BODY, yytext); } | ||
| 214 | <S_DEF_BODY>\n { yyextra->accept (T_DEF_BODY, yytext); } | ||
| 215 | <S_DEF_BODY>. { BEGIN INITIAL; unput (yytext[0]); } | ||
| 216 | |||
| 217 | {COMMENT} { } | ||
| 218 | |||
| 219 | <INITIAL>{SYMBOL} { yyextra->accept (T_SYMBOL, yytext); } | ||
| 220 | <INITIAL>{VARIABLE} { yyextra->accept (T_VARIABLE, yytext); } | ||
| 221 | |||
| 222 | <S_TASK>{SYMBOL} { yyextra->accept (T_TSYMBOL, yytext); } | ||
| 223 | <S_FUNC>{SYMBOL} { yyextra->accept (T_FSYMBOL, yytext); } | ||
| 224 | <S_INHERIT>{SYMBOL} { yyextra->accept (T_ISYMBOL, yytext); } | ||
| 225 | <S_INCLUDE>{FILENAME} { BEGIN INITIAL; | ||
| 226 | yyextra->accept (T_ISYMBOL, yytext); } | ||
| 227 | |||
| 228 | <S_TASK>\n { BEGIN INITIAL; } | ||
| 229 | <S_FUNC>\n { BEGIN INITIAL; } | ||
| 230 | <S_INHERIT>\n { BEGIN INITIAL; } | ||
| 231 | |||
| 232 | [ \t\r\n] /* Insignificant whitespace */ | ||
| 233 | |||
| 234 | . { ERROR (errorUnexpectedInput); } | ||
| 235 | |||
| 236 | /* Check for premature termination */ | ||
| 237 | <<EOF>> { return T_EOF; } | ||
| 238 | |||
| 239 | %% | ||
| 240 | |||
| 241 | void lex_t::accept (int token, const char* sz) | ||
| 242 | { | ||
| 243 | token_t t; | ||
| 244 | memset (&t, 0, sizeof (t)); | ||
| 245 | t.copyString(sz); | ||
| 246 | |||
| 247 | /* tell lemon to parse the token */ | ||
| 248 | parse (parser, token, t, this); | ||
| 249 | } | ||
| 250 | |||
| 251 | int lex_t::line ()const | ||
| 252 | { | ||
| 253 | return yyget_lineno (scanner); | ||
| 254 | } | ||
| 255 | |||
| 256 | const char* lex_t::filename ()const | ||
| 257 | { | ||
| 258 | return m_fileName; | ||
| 259 | } | ||
| 260 | |||
| 261 | void parse (MappedFile* mf) | ||
| 262 | { | ||
| 263 | void* parser = bbparseAlloc (malloc); | ||
| 264 | yyscan_t scanner; | ||
| 265 | lex_t lex; | ||
| 266 | |||
| 267 | yylex_init (&scanner); | ||
| 268 | |||
| 269 | lex.parser = parser; | ||
| 270 | lex.scanner = scanner; | ||
| 271 | lex.mf = mf; | ||
| 272 | lex.rgbInput = mf->m_rgb; | ||
| 273 | lex.cbInput = mf->m_cb; | ||
| 274 | lex.parse = bbparse; | ||
| 275 | yyset_extra (&lex, scanner); | ||
| 276 | |||
| 277 | |||
| 278 | int result = yylex (scanner); | ||
| 279 | |||
| 280 | lex.accept (0); | ||
| 281 | bbparseTrace (NULL, NULL); | ||
| 282 | |||
| 283 | if (result != T_EOF) | ||
| 284 | WARNING ("premature end of file\n"); | ||
| 285 | |||
| 286 | yylex_destroy (scanner); | ||
| 287 | bbparseFree (parser, free); | ||
| 288 | } | ||
diff --git a/bitbake/lib/bb/parse/parse_c/bitbakeparser.py b/bitbake/lib/bb/parse/parse_c/bitbakeparser.py new file mode 100644 index 0000000000..ed7b13eef9 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_c/bitbakeparser.py | |||
| @@ -0,0 +1,133 @@ | |||
| 1 | """ | ||
| 2 | |||
| 3 | BitBake C Parser Python Code | ||
| 4 | |||
| 5 | Copyright (C) 2005 Holger Hans Peter Freyther | ||
| 6 | |||
| 7 | Permission is hereby granted, free of charge, to any person obtaining a copy | ||
| 8 | of this software and associated documentation files (the "Software"), to deal | ||
| 9 | in the Software without restriction, including without limitation the rights | ||
| 10 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
| 11 | copies of the Software, and to permit persons to whom the Software is | ||
| 12 | furnished to do so, subject to the following conditions: | ||
| 13 | |||
| 14 | The above copyright notice and this permission notice shall be included in all | ||
| 15 | copies or substantial portions of the Software. | ||
| 16 | |||
| 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
| 18 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
| 19 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | ||
| 20 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | ||
| 21 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | ||
| 22 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR | ||
| 23 | THE USE OR OTHER DEALINGS IN THE SOFTWARE. | ||
| 24 | """ | ||
| 25 | |||
| 26 | __version__ = "0xdeadbeef" | ||
| 27 | |||
| 28 | class CParser: | ||
| 29 | """ | ||
| 30 | The C-based Parser for Bitbake | ||
| 31 | """ | ||
| 32 | def __init__(self, data, type): | ||
| 33 | """ | ||
| 34 | Constructor | ||
| 35 | """ | ||
| 36 | self._data = data | ||
| 37 | |||
| 38 | def _syntax_error(self, file, line): | ||
| 39 | """ | ||
| 40 | lemon/flex reports an syntax error to us and we will | ||
| 41 | raise an exception | ||
| 42 | """ | ||
| 43 | pass | ||
| 44 | |||
| 45 | def _export(self, data): | ||
| 46 | """ | ||
| 47 | EXPORT VAR = "MOO" | ||
| 48 | we will now export VAR | ||
| 49 | """ | ||
| 50 | pass | ||
| 51 | |||
| 52 | def _assign(self, key, value): | ||
| 53 | """ | ||
| 54 | VAR = "MOO" | ||
| 55 | we will assign moo to VAR | ||
| 56 | """ | ||
| 57 | pass | ||
| 58 | |||
| 59 | def _assign(self, key, value): | ||
| 60 | """ | ||
| 61 | """ | ||
| 62 | pass | ||
| 63 | |||
| 64 | def _append(self, key, value): | ||
| 65 | """ | ||
| 66 | VAR += "MOO" | ||
| 67 | we will append " MOO" to var | ||
| 68 | """ | ||
| 69 | pass | ||
| 70 | |||
| 71 | def _prepend(self, key, value): | ||
| 72 | """ | ||
| 73 | VAR =+ "MOO" | ||
| 74 | we will prepend "MOO " to var | ||
| 75 | """ | ||
| 76 | pass | ||
| 77 | |||
| 78 | def _immediate(self, key, value): | ||
| 79 | """ | ||
| 80 | VAR := "MOO ${CVSDATE}" | ||
| 81 | we will assign immediately and expand vars | ||
| 82 | """ | ||
| 83 | pass | ||
| 84 | |||
| 85 | def _conditional(self, key, value): | ||
| 86 | """ | ||
| 87 | """ | ||
| 88 | pass | ||
| 89 | |||
| 90 | def _add_task(self, task, before = None, after = None): | ||
| 91 | """ | ||
| 92 | """ | ||
| 93 | pass | ||
| 94 | |||
| 95 | def _include(self, file): | ||
| 96 | """ | ||
| 97 | """ | ||
| 98 | pass | ||
| 99 | |||
| 100 | def _inherit(self, file): | ||
| 101 | """ | ||
| 102 | """ | ||
| 103 | pass | ||
| 104 | |||
| 105 | def _shell_procedure(self, name, body): | ||
| 106 | """ | ||
| 107 | """ | ||
| 108 | pass | ||
| 109 | |||
| 110 | def _python_procedure(self, name, body): | ||
| 111 | """ | ||
| 112 | """ | ||
| 113 | pass | ||
| 114 | |||
| 115 | def _fakeroot_procedure(self, name, body): | ||
| 116 | """ | ||
| 117 | """ | ||
| 118 | pass | ||
| 119 | |||
| 120 | def _def_procedure(self, a, b, c): | ||
| 121 | """ | ||
| 122 | """ | ||
| 123 | pass | ||
| 124 | |||
| 125 | def _export_func(self, name): | ||
| 126 | """ | ||
| 127 | """ | ||
| 128 | pass | ||
| 129 | |||
| 130 | def _add_handler(self, handler): | ||
| 131 | """ | ||
| 132 | """ | ||
| 133 | pass | ||
diff --git a/bitbake/lib/bb/parse/parse_c/bitbakeparser.y b/bitbake/lib/bb/parse/parse_c/bitbakeparser.y new file mode 100644 index 0000000000..4bc81a913a --- /dev/null +++ b/bitbake/lib/bb/parse/parse_c/bitbakeparser.y | |||
| @@ -0,0 +1,161 @@ | |||
| 1 | /* bbp.lemon | ||
| 2 | |||
| 3 | written by Marc Singer | ||
| 4 | 6 January 2005 | ||
| 5 | |||
| 6 | This program is free software; you can redistribute it and/or | ||
| 7 | modify it under the terms of the GNU General Public License as | ||
| 8 | published by the Free Software Foundation; either version 2 of the | ||
| 9 | License, or (at your option) any later version. | ||
| 10 | |||
| 11 | This program is distributed in the hope that it will be useful, but | ||
| 12 | WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 13 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 14 | General Public License for more details. | ||
| 15 | |||
| 16 | You should have received a copy of the GNU General Public License | ||
| 17 | along with this program; if not, write to the Free Software | ||
| 18 | Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 | ||
| 19 | USA. | ||
| 20 | |||
| 21 | DESCRIPTION | ||
| 22 | ----------- | ||
| 23 | |||
| 24 | lemon parser specification file for a BitBake input file parser. | ||
| 25 | |||
| 26 | Most of the interesting shenanigans are done in the lexer. The | ||
| 27 | BitBake grammar is not regular. In order to emit tokens that | ||
| 28 | the parser can properly interpret in LALR fashion, the lexer | ||
| 29 | manages the interpretation state. This is why there are ISYMBOLs, | ||
| 30 | SYMBOLS, and TSYMBOLS. | ||
| 31 | |||
| 32 | This parser was developed by reading the limited available | ||
| 33 | documentation for BitBake and by analyzing the available BB files. | ||
| 34 | There is no assertion of correctness to be made about this parser. | ||
| 35 | |||
| 36 | */ | ||
| 37 | |||
| 38 | %token_type {token_t} | ||
| 39 | %name bbparse | ||
| 40 | %token_prefix T_ | ||
| 41 | %extra_argument {lex_t* lex} | ||
| 42 | |||
| 43 | %include { | ||
| 44 | #include "token.h" | ||
| 45 | } | ||
| 46 | |||
| 47 | |||
| 48 | %token_destructor { $$.release_this (); } | ||
| 49 | |||
| 50 | %syntax_error { printf ("%s:%d: syntax error\n", | ||
| 51 | lex->filename (), lex->line ()); } | ||
| 52 | |||
| 53 | program ::= statements. | ||
| 54 | |||
| 55 | statements ::= statements statement. | ||
| 56 | statements ::= . | ||
| 57 | |||
| 58 | variable(r) ::= SYMBOL(s). | ||
| 59 | { r.assignString( s.string() ); | ||
| 60 | s.assignString( 0 ); | ||
| 61 | s.release_this(); } | ||
| 62 | variable(r) ::= VARIABLE(v). | ||
| 63 | { | ||
| 64 | r.assignString( v.string() ); | ||
| 65 | v.assignString( 0 ); | ||
| 66 | v.release_this(); } | ||
| 67 | |||
| 68 | statement ::= EXPORT variable(s) OP_ASSIGN STRING(v). | ||
| 69 | { e_assign( s.string(), v.string() ); | ||
| 70 | e_export( s.string() ); | ||
| 71 | s.release_this(); v.release_this(); } | ||
| 72 | statement ::= EXPORT variable(s) OP_IMMEDIATE STRING(v). | ||
| 73 | { e_immediate (s.string(), v.string() ); | ||
| 74 | e_export( s.string() ); | ||
| 75 | s.release_this(); v.release_this(); } | ||
| 76 | statement ::= EXPORT variable(s) OP_COND STRING(v). | ||
| 77 | { e_cond( s.string(), v.string() ); | ||
| 78 | s.release_this(); v.release_this(); } | ||
| 79 | |||
| 80 | statement ::= variable(s) OP_ASSIGN STRING(v). | ||
| 81 | { e_assign( s.string(), v.string() ); | ||
| 82 | s.release_this(); v.release_this(); } | ||
| 83 | statement ::= variable(s) OP_PREPEND STRING(v). | ||
| 84 | { e_prepend( s.string(), v.string() ); | ||
| 85 | s.release_this(); v.release_this(); } | ||
| 86 | statement ::= variable(s) OP_APPEND STRING(v). | ||
| 87 | { e_append( s.string() , v.string() ); | ||
| 88 | s.release_this(); v.release_this(); } | ||
| 89 | statement ::= variable(s) OP_IMMEDIATE STRING(v). | ||
| 90 | { e_immediate( s.string(), v.string() ); | ||
| 91 | s.release_this(); v.release_this(); } | ||
| 92 | statement ::= variable(s) OP_COND STRING(v). | ||
| 93 | { e_cond( s.string(), v.string() ); | ||
| 94 | s.release_this(); v.release_this(); } | ||
| 95 | |||
| 96 | task ::= TSYMBOL(t) BEFORE TSYMBOL(b) AFTER TSYMBOL(a). | ||
| 97 | { e_addtask( t.string(), b.string(), a.string() ); | ||
| 98 | t.release_this(); b.release_this(); a.release_this(); } | ||
| 99 | task ::= TSYMBOL(t) AFTER TSYMBOL(a) BEFORE TSYMBOL(b). | ||
| 100 | { e_addtask( t.string(), b.string(), a.string()); | ||
| 101 | t.release_this(); a.release_this(); b.release_this(); } | ||
| 102 | task ::= TSYMBOL(t). | ||
| 103 | { e_addtask( t.string(), NULL, NULL); | ||
| 104 | t.release_this();} | ||
| 105 | task ::= TSYMBOL(t) BEFORE TSYMBOL(b). | ||
| 106 | { e_addtask( t.string(), b.string(), NULL); | ||
| 107 | t.release_this(); b.release_this(); } | ||
| 108 | task ::= TSYMBOL(t) AFTER TSYMBOL(a). | ||
| 109 | { e_addtask( t.string(), NULL, a.string()); | ||
| 110 | t.release_this(); a.release_this(); } | ||
| 111 | tasks ::= tasks task. | ||
| 112 | tasks ::= task. | ||
| 113 | statement ::= ADDTASK tasks. | ||
| 114 | |||
| 115 | statement ::= ADDHANDLER SYMBOL(s). | ||
| 116 | { e_addhandler( s.string()); s.release_this (); } | ||
| 117 | |||
| 118 | func ::= FSYMBOL(f). { e_export_func(f.string()); f.release_this(); } | ||
| 119 | funcs ::= funcs func. | ||
| 120 | funcs ::= func. | ||
| 121 | statement ::= EXPORT_FUNC funcs. | ||
| 122 | |||
| 123 | inherit ::= ISYMBOL(i). { e_inherit(i.string() ); i.release_this (); } | ||
| 124 | inherits ::= inherits inherit. | ||
| 125 | inherits ::= inherit. | ||
| 126 | statement ::= INHERIT inherits. | ||
| 127 | |||
| 128 | statement ::= INCLUDE ISYMBOL(i). | ||
| 129 | { e_include(i.string() ); i.release_this(); } | ||
| 130 | |||
| 131 | proc_body(r) ::= proc_body(l) PROC_BODY(b). | ||
| 132 | { /* concatenate body lines */ | ||
| 133 | r.assignString( token_t::concatString(l.string(), b.string()) ); | ||
| 134 | l.release_this (); | ||
| 135 | b.release_this (); | ||
| 136 | } | ||
| 137 | proc_body(b) ::= . { b.assignString(0); } | ||
| 138 | statement ::= variable(p) PROC_OPEN proc_body(b) PROC_CLOSE. | ||
| 139 | { e_proc( p.string(), b.string() ); | ||
| 140 | p.release_this(); b.release_this(); } | ||
| 141 | statement ::= PYTHON SYMBOL(p) PROC_OPEN proc_body(b) PROC_CLOSE. | ||
| 142 | { e_proc_python (p.string(), b.string() ); | ||
| 143 | p.release_this(); b.release_this(); } | ||
| 144 | statement ::= PYTHON PROC_OPEN proc_body(b) PROC_CLOSE. | ||
| 145 | { e_proc_python( NULL, b.string()); | ||
| 146 | b.release_this (); } | ||
| 147 | |||
| 148 | statement ::= FAKEROOT SYMBOL(p) PROC_OPEN proc_body(b) PROC_CLOSE. | ||
| 149 | { e_proc_fakeroot(p.string(), b.string() ); | ||
| 150 | p.release_this (); b.release_this (); } | ||
| 151 | |||
| 152 | def_body(r) ::= def_body(l) DEF_BODY(b). | ||
| 153 | { /* concatenate body lines */ | ||
| 154 | r.assignString( token_t::concatString(l.string(), b.string()); | ||
| 155 | l.release_this (); b.release_this (); | ||
| 156 | } | ||
| 157 | def_body(b) ::= . { b.sz = 0; } | ||
| 158 | statement ::= SYMBOL(p) DEF_ARGS(a) def_body(b). | ||
| 159 | { e_def( p.string(), a.string(), b.string()); | ||
| 160 | p.release_this(); a.release_this(); b.release_this(); } | ||
| 161 | |||
diff --git a/bitbake/lib/bb/parse/parse_c/lexer.h b/bitbake/lib/bb/parse/parse_c/lexer.h new file mode 100644 index 0000000000..1edf72dcf5 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_c/lexer.h | |||
| @@ -0,0 +1,41 @@ | |||
| 1 | /* | ||
| 2 | Copyright (C) 2005 Holger Hans Peter Freyther | ||
| 3 | |||
| 4 | Permission is hereby granted, free of charge, to any person obtaining a copy | ||
| 5 | of this software and associated documentation files (the "Software"), to deal | ||
| 6 | in the Software without restriction, including without limitation the rights | ||
| 7 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
| 8 | copies of the Software, and to permit persons to whom the Software is | ||
| 9 | furnished to do so, subject to the following conditions: | ||
| 10 | |||
| 11 | The above copyright notice and this permission notice shall be included in all | ||
| 12 | copies or substantial portions of the Software. | ||
| 13 | |||
| 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
| 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
| 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | ||
| 17 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | ||
| 18 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | ||
| 19 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR | ||
| 20 | THE USE OR OTHER DEALINGS IN THE SOFTWARE. | ||
| 21 | |||
| 22 | */ | ||
| 23 | |||
| 24 | #ifndef LEXER_H | ||
| 25 | #define LEXER_H | ||
| 26 | |||
| 27 | struct lex_t { | ||
| 28 | void *parser; | ||
| 29 | void *scanner; | ||
| 30 | void* (*parse)(void*, int, token_t, lex_t*); | ||
| 31 | |||
| 32 | void accept(int token, const char* string = 0); | ||
| 33 | void input(char *buf, int *result, int_max_size); | ||
| 34 | int line()const; | ||
| 35 | const char* filename()const; | ||
| 36 | private: | ||
| 37 | const char* m_fileName; | ||
| 38 | }; | ||
| 39 | |||
| 40 | |||
| 41 | #endif | ||
diff --git a/bitbake/lib/bb/parse/parse_c/token.h b/bitbake/lib/bb/parse/parse_c/token.h new file mode 100644 index 0000000000..2351fda6b5 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_c/token.h | |||
| @@ -0,0 +1,83 @@ | |||
| 1 | /* | ||
| 2 | Copyright (C) 2005 Holger Hans Peter Freyther | ||
| 3 | |||
| 4 | Permission is hereby granted, free of charge, to any person obtaining a copy | ||
| 5 | of this software and associated documentation files (the "Software"), to deal | ||
| 6 | in the Software without restriction, including without limitation the rights | ||
| 7 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | ||
| 8 | copies of the Software, and to permit persons to whom the Software is | ||
| 9 | furnished to do so, subject to the following conditions: | ||
| 10 | |||
| 11 | The above copyright notice and this permission notice shall be included in all | ||
| 12 | copies or substantial portions of the Software. | ||
| 13 | |||
| 14 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | ||
| 15 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | ||
| 16 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT | ||
| 17 | SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, | ||
| 18 | DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR | ||
| 19 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR | ||
| 20 | THE USE OR OTHER DEALINGS IN THE SOFTWARE. | ||
| 21 | |||
| 22 | */ | ||
| 23 | |||
| 24 | #ifndef TOKEN_H | ||
| 25 | #define TOKEN_H | ||
| 26 | |||
| 27 | #define PURE_METHOD | ||
| 28 | |||
| 29 | struct token_t { | ||
| 30 | const char* string()const PURE_METHOD; | ||
| 31 | |||
| 32 | static char* concatString(const char* l, const char* r); | ||
| 33 | void assignString(const char* str); | ||
| 34 | void copyString(const char* str); | ||
| 35 | |||
| 36 | void release_this(); | ||
| 37 | |||
| 38 | private: | ||
| 39 | char *m_string; | ||
| 40 | size_t m_stringLen; | ||
| 41 | }; | ||
| 42 | |||
| 43 | inline const char* token_t::string()const | ||
| 44 | { | ||
| 45 | return m_string; | ||
| 46 | } | ||
| 47 | |||
| 48 | /* | ||
| 49 | * append str to the current string | ||
| 50 | */ | ||
| 51 | inline char* token_t::concatString(const char* l, const char* r) | ||
| 52 | { | ||
| 53 | size_t cb = (l ? strlen (l) : 0) + strlen (r) + 1; | ||
| 54 | r_sz = new char[cb]; | ||
| 55 | *r_sz = 0; | ||
| 56 | if (l) strcat (r_sz, l); | ||
| 57 | strcat (r_sz, r); | ||
| 58 | |||
| 59 | return r_sz; | ||
| 60 | } | ||
| 61 | |||
| 62 | inline void token_t::assignString(const char* str) | ||
| 63 | { | ||
| 64 | m_string = str; | ||
| 65 | m_stringLen = str ? strlen(str) : 0; | ||
| 66 | } | ||
| 67 | |||
| 68 | inline void token_t::copyString(const char* str) | ||
| 69 | { | ||
| 70 | if( str ) { | ||
| 71 | m_stringLen = strlen(str); | ||
| 72 | m_string = new char[m_stringLen+1]; | ||
| 73 | strcpy(m_string, str) | ||
| 74 | } | ||
| 75 | } | ||
| 76 | |||
| 77 | inline void token_t::release_this() | ||
| 78 | { | ||
| 79 | delete m_string; | ||
| 80 | m_string = 0; | ||
| 81 | } | ||
| 82 | |||
| 83 | #endif | ||
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.py b/bitbake/lib/bb/parse/parse_py/BBHandler.py new file mode 100644 index 0000000000..fac3e85b36 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.py | |||
| @@ -0,0 +1,378 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | """class for handling .bb files | ||
| 5 | |||
| 6 | Reads a .bb file and obtains its metadata | ||
| 7 | |||
| 8 | Copyright (C) 2003, 2004 Chris Larson | ||
| 9 | Copyright (C) 2003, 2004 Phil Blundell | ||
| 10 | |||
| 11 | This program is free software; you can redistribute it and/or modify it under | ||
| 12 | the terms of the GNU General Public License as published by the Free Software | ||
| 13 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 14 | version. | ||
| 15 | |||
| 16 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 17 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 18 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 19 | |||
| 20 | You should have received a copy of the GNU General Public License along with | ||
| 21 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 22 | Place, Suite 330, Boston, MA 02111-1307 USA.""" | ||
| 23 | |||
| 24 | import re, bb, os, sys | ||
| 25 | import bb.fetch, bb.build | ||
| 26 | from bb import debug, data, fetch, fatal | ||
| 27 | |||
| 28 | from ConfHandler import include, localpath, obtain, init | ||
| 29 | from bb.parse import ParseError | ||
| 30 | |||
| 31 | __func_start_regexp__ = re.compile( r"(((?P<py>python)|(?P<fr>fakeroot))\s*)*(?P<func>[\w\.\-\+\{\}\$]+)?\s*\(\s*\)\s*{$" ) | ||
| 32 | __inherit_regexp__ = re.compile( r"inherit\s+(.+)" ) | ||
| 33 | __export_func_regexp__ = re.compile( r"EXPORT_FUNCTIONS\s+(.+)" ) | ||
| 34 | __addtask_regexp__ = re.compile("addtask\s+(?P<func>\w+)\s*((before\s*(?P<before>((.*(?=after))|(.*))))|(after\s*(?P<after>((.*(?=before))|(.*)))))*") | ||
| 35 | __addhandler_regexp__ = re.compile( r"addhandler\s+(.+)" ) | ||
| 36 | __def_regexp__ = re.compile( r"def\s+(\w+).*:" ) | ||
| 37 | __python_func_regexp__ = re.compile( r"(\s+.*)|(^$)" ) | ||
| 38 | __word__ = re.compile(r"\S+") | ||
| 39 | |||
| 40 | __infunc__ = "" | ||
| 41 | __inpython__ = False | ||
| 42 | __body__ = [] | ||
| 43 | __bbpath_found__ = 0 | ||
| 44 | __classname__ = "" | ||
| 45 | classes = [ None, ] | ||
| 46 | |||
| 47 | def supports(fn, d): | ||
| 48 | localfn = localpath(fn, d) | ||
| 49 | return localfn[-3:] == ".bb" or localfn[-8:] == ".bbclass" or localfn[-4:] == ".inc" | ||
| 50 | |||
| 51 | def inherit(files, d): | ||
| 52 | __inherit_cache = data.getVar('__inherit_cache', d) or "" | ||
| 53 | fn = "" | ||
| 54 | lineno = 0 | ||
| 55 | for f in files: | ||
| 56 | file = data.expand(f, d) | ||
| 57 | if file[0] != "/" and file[-8:] != ".bbclass": | ||
| 58 | file = os.path.join('classes', '%s.bbclass' % file) | ||
| 59 | |||
| 60 | if not file in __inherit_cache.split(): | ||
| 61 | debug(2, "BB %s:%d: inheriting %s" % (fn, lineno, file)) | ||
| 62 | __inherit_cache += " %s" % file | ||
| 63 | include(fn, file, d) | ||
| 64 | data.setVar('__inherit_cache', __inherit_cache, d) | ||
| 65 | |||
| 66 | |||
| 67 | def handle(fn, d, include = 0): | ||
| 68 | global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __infunc__, __body__, __bbpath_found__, __residue__ | ||
| 69 | __body__ = [] | ||
| 70 | __bbpath_found__ = 0 | ||
| 71 | __infunc__ = "" | ||
| 72 | __classname__ = "" | ||
| 73 | __residue__ = [] | ||
| 74 | |||
| 75 | if include == 0: | ||
| 76 | debug(2, "BB " + fn + ": handle(data)") | ||
| 77 | else: | ||
| 78 | debug(2, "BB " + fn + ": handle(data, include)") | ||
| 79 | |||
| 80 | (root, ext) = os.path.splitext(os.path.basename(fn)) | ||
| 81 | init(d) | ||
| 82 | |||
| 83 | if ext == ".bbclass": | ||
| 84 | __classname__ = root | ||
| 85 | classes.append(__classname__) | ||
| 86 | |||
| 87 | if include != 0: | ||
| 88 | oldfile = data.getVar('FILE', d) | ||
| 89 | else: | ||
| 90 | oldfile = None | ||
| 91 | |||
| 92 | fn = obtain(fn, d) | ||
| 93 | bbpath = (data.getVar('BBPATH', d, 1) or '').split(':') | ||
| 94 | if not os.path.isabs(fn): | ||
| 95 | f = None | ||
| 96 | for p in bbpath: | ||
| 97 | p = data.expand(p, d) | ||
| 98 | j = os.path.join(p, fn) | ||
| 99 | if os.access(j, os.R_OK): | ||
| 100 | abs_fn = j | ||
| 101 | f = open(j, 'r') | ||
| 102 | break | ||
| 103 | if f is None: | ||
| 104 | raise IOError("file not found") | ||
| 105 | else: | ||
| 106 | f = open(fn,'r') | ||
| 107 | abs_fn = fn | ||
| 108 | |||
| 109 | if ext != ".bbclass": | ||
| 110 | bbpath.insert(0, os.path.dirname(abs_fn)) | ||
| 111 | data.setVar('BBPATH', ":".join(bbpath), d) | ||
| 112 | |||
| 113 | if include: | ||
| 114 | bb.parse.mark_dependency(d, abs_fn) | ||
| 115 | |||
| 116 | if ext != ".bbclass": | ||
| 117 | data.setVar('FILE', fn, d) | ||
| 118 | i = (data.getVar("INHERIT", d, 1) or "").split() | ||
| 119 | if not "base" in i and __classname__ != "base": | ||
| 120 | i[0:0] = ["base"] | ||
| 121 | inherit(i, d) | ||
| 122 | |||
| 123 | lineno = 0 | ||
| 124 | while 1: | ||
| 125 | lineno = lineno + 1 | ||
| 126 | s = f.readline() | ||
| 127 | if not s: break | ||
| 128 | s = s.rstrip() | ||
| 129 | feeder(lineno, s, fn, d) | ||
| 130 | if __inpython__: | ||
| 131 | # add a blank line to close out any python definition | ||
| 132 | feeder(lineno + 1, "", fn, d) | ||
| 133 | if ext == ".bbclass": | ||
| 134 | classes.remove(__classname__) | ||
| 135 | else: | ||
| 136 | if include == 0: | ||
| 137 | data.expandKeys(d) | ||
| 138 | data.update_data(d) | ||
| 139 | anonqueue = data.getVar("__anonqueue", d, 1) or [] | ||
| 140 | for anon in anonqueue: | ||
| 141 | data.setVar("__anonfunc", anon["content"], d) | ||
| 142 | data.setVarFlags("__anonfunc", anon["flags"], d) | ||
| 143 | from bb import build | ||
| 144 | try: | ||
| 145 | t = data.getVar('T', d) | ||
| 146 | data.setVar('T', '${TMPDIR}/', d) | ||
| 147 | build.exec_func("__anonfunc", d) | ||
| 148 | data.delVar('T', d) | ||
| 149 | if t: | ||
| 150 | data.setVar('T', t, d) | ||
| 151 | except Exception, e: | ||
| 152 | bb.debug(1, "executing anonymous function: %s" % e) | ||
| 153 | raise | ||
| 154 | data.delVar("__anonqueue", d) | ||
| 155 | data.delVar("__anonfunc", d) | ||
| 156 | set_additional_vars(fn, d, include) | ||
| 157 | data.update_data(d) | ||
| 158 | |||
| 159 | for var in data.keys(d): | ||
| 160 | if data.getVarFlag(var, 'handler', d): | ||
| 161 | bb.event.register(data.getVar(var, d)) | ||
| 162 | continue | ||
| 163 | |||
| 164 | if not data.getVarFlag(var, 'task', d): | ||
| 165 | continue | ||
| 166 | |||
| 167 | deps = data.getVarFlag(var, 'deps', d) or [] | ||
| 168 | postdeps = data.getVarFlag(var, 'postdeps', d) or [] | ||
| 169 | bb.build.add_task(var, deps, d) | ||
| 170 | for p in postdeps: | ||
| 171 | pdeps = data.getVarFlag(p, 'deps', d) or [] | ||
| 172 | pdeps.append(var) | ||
| 173 | data.setVarFlag(p, 'deps', pdeps, d) | ||
| 174 | bb.build.add_task(p, pdeps, d) | ||
| 175 | bbpath.pop(0) | ||
| 176 | if oldfile: | ||
| 177 | bb.data.setVar("FILE", oldfile, d) | ||
| 178 | return d | ||
| 179 | |||
| 180 | def feeder(lineno, s, fn, d): | ||
| 181 | global __func_start_regexp__, __inherit_regexp__, __export_func_regexp__, __addtask_regexp__, __addhandler_regexp__, __def_regexp__, __python_func_regexp__, __inpython__,__infunc__, __body__, __bbpath_found__, classes, bb, __residue__ | ||
| 182 | if __infunc__: | ||
| 183 | if s == '}': | ||
| 184 | __body__.append('') | ||
| 185 | data.setVar(__infunc__, '\n'.join(__body__), d) | ||
| 186 | data.setVarFlag(__infunc__, "func", 1, d) | ||
| 187 | if __infunc__ == "__anonymous": | ||
| 188 | anonqueue = bb.data.getVar("__anonqueue", d) or [] | ||
| 189 | anonitem = {} | ||
| 190 | anonitem["content"] = bb.data.getVar("__anonymous", d) | ||
| 191 | anonitem["flags"] = bb.data.getVarFlags("__anonymous", d) | ||
| 192 | anonqueue.append(anonitem) | ||
| 193 | bb.data.setVar("__anonqueue", anonqueue, d) | ||
| 194 | bb.data.delVarFlags("__anonymous", d) | ||
| 195 | bb.data.delVar("__anonymous", d) | ||
| 196 | __infunc__ = "" | ||
| 197 | __body__ = [] | ||
| 198 | else: | ||
| 199 | __body__.append(s) | ||
| 200 | return | ||
| 201 | |||
| 202 | if __inpython__: | ||
| 203 | m = __python_func_regexp__.match(s) | ||
| 204 | if m: | ||
| 205 | __body__.append(s) | ||
| 206 | return | ||
| 207 | else: | ||
| 208 | text = '\n'.join(__body__) | ||
| 209 | comp = compile(text, "<bb>", "exec") | ||
| 210 | exec comp in __builtins__ | ||
| 211 | __body__ = [] | ||
| 212 | __inpython__ = False | ||
| 213 | funcs = data.getVar('__functions__', d) or "" | ||
| 214 | data.setVar('__functions__', "%s\n%s" % (funcs, text), d) | ||
| 215 | # fall through | ||
| 216 | |||
| 217 | if s == '' or s[0] == '#': return # skip comments and empty lines | ||
| 218 | |||
| 219 | if s[-1] == '\\': | ||
| 220 | __residue__.append(s[:-1]) | ||
| 221 | return | ||
| 222 | |||
| 223 | s = "".join(__residue__) + s | ||
| 224 | __residue__ = [] | ||
| 225 | |||
| 226 | m = __func_start_regexp__.match(s) | ||
| 227 | if m: | ||
| 228 | __infunc__ = m.group("func") or "__anonymous" | ||
| 229 | key = __infunc__ | ||
| 230 | if data.getVar(key, d): | ||
| 231 | # clean up old version of this piece of metadata, as its | ||
| 232 | # flags could cause problems | ||
| 233 | data.setVarFlag(key, 'python', None, d) | ||
| 234 | data.setVarFlag(key, 'fakeroot', None, d) | ||
| 235 | if m.group("py") is not None: | ||
| 236 | data.setVarFlag(key, "python", "1", d) | ||
| 237 | else: | ||
| 238 | data.delVarFlag(key, "python", d) | ||
| 239 | if m.group("fr") is not None: | ||
| 240 | data.setVarFlag(key, "fakeroot", "1", d) | ||
| 241 | else: | ||
| 242 | data.delVarFlag(key, "fakeroot", d) | ||
| 243 | return | ||
| 244 | |||
| 245 | m = __def_regexp__.match(s) | ||
| 246 | if m: | ||
| 247 | __body__.append(s) | ||
| 248 | __inpython__ = True | ||
| 249 | return | ||
| 250 | |||
| 251 | m = __export_func_regexp__.match(s) | ||
| 252 | if m: | ||
| 253 | fns = m.group(1) | ||
| 254 | n = __word__.findall(fns) | ||
| 255 | for f in n: | ||
| 256 | allvars = [] | ||
| 257 | allvars.append(f) | ||
| 258 | allvars.append(classes[-1] + "_" + f) | ||
| 259 | |||
| 260 | vars = [[ allvars[0], allvars[1] ]] | ||
| 261 | if len(classes) > 1 and classes[-2] is not None: | ||
| 262 | allvars.append(classes[-2] + "_" + f) | ||
| 263 | vars = [] | ||
| 264 | vars.append([allvars[2], allvars[1]]) | ||
| 265 | vars.append([allvars[0], allvars[2]]) | ||
| 266 | |||
| 267 | for (var, calledvar) in vars: | ||
| 268 | if data.getVar(var, d) and not data.getVarFlag(var, 'export_func', d): | ||
| 269 | continue | ||
| 270 | |||
| 271 | if data.getVar(var, d): | ||
| 272 | data.setVarFlag(var, 'python', None, d) | ||
| 273 | data.setVarFlag(var, 'func', None, d) | ||
| 274 | |||
| 275 | for flag in [ "func", "python" ]: | ||
| 276 | if data.getVarFlag(calledvar, flag, d): | ||
| 277 | data.setVarFlag(var, flag, data.getVarFlag(calledvar, flag, d), d) | ||
| 278 | for flag in [ "dirs" ]: | ||
| 279 | if data.getVarFlag(var, flag, d): | ||
| 280 | data.setVarFlag(calledvar, flag, data.getVarFlag(var, flag, d), d) | ||
| 281 | |||
| 282 | if data.getVarFlag(calledvar, "python", d): | ||
| 283 | data.setVar(var, "\tbb.build.exec_func('" + calledvar + "', d)\n", d) | ||
| 284 | else: | ||
| 285 | data.setVar(var, "\t" + calledvar + "\n", d) | ||
| 286 | data.setVarFlag(var, 'export_func', '1', d) | ||
| 287 | |||
| 288 | return | ||
| 289 | |||
| 290 | m = __addtask_regexp__.match(s) | ||
| 291 | if m: | ||
| 292 | func = m.group("func") | ||
| 293 | before = m.group("before") | ||
| 294 | after = m.group("after") | ||
| 295 | if func is None: | ||
| 296 | return | ||
| 297 | var = "do_" + func | ||
| 298 | |||
| 299 | data.setVarFlag(var, "task", 1, d) | ||
| 300 | |||
| 301 | if after is not None: | ||
| 302 | # set up deps for function | ||
| 303 | data.setVarFlag(var, "deps", after.split(), d) | ||
| 304 | if before is not None: | ||
| 305 | # set up things that depend on this func | ||
| 306 | data.setVarFlag(var, "postdeps", before.split(), d) | ||
| 307 | return | ||
| 308 | |||
| 309 | m = __addhandler_regexp__.match(s) | ||
| 310 | if m: | ||
| 311 | fns = m.group(1) | ||
| 312 | hs = __word__.findall(fns) | ||
| 313 | for h in hs: | ||
| 314 | data.setVarFlag(h, "handler", 1, d) | ||
| 315 | return | ||
| 316 | |||
| 317 | m = __inherit_regexp__.match(s) | ||
| 318 | if m: | ||
| 319 | |||
| 320 | files = m.group(1) | ||
| 321 | n = __word__.findall(files) | ||
| 322 | inherit(n, d) | ||
| 323 | return | ||
| 324 | |||
| 325 | from bb.parse import ConfHandler | ||
| 326 | return ConfHandler.feeder(lineno, s, fn, d) | ||
| 327 | |||
| 328 | __pkgsplit_cache__={} | ||
| 329 | def vars_from_file(mypkg, d): | ||
| 330 | if not mypkg: | ||
| 331 | return (None, None, None) | ||
| 332 | if mypkg in __pkgsplit_cache__: | ||
| 333 | return __pkgsplit_cache__[mypkg] | ||
| 334 | |||
| 335 | myfile = os.path.splitext(os.path.basename(mypkg)) | ||
| 336 | parts = myfile[0].split('_') | ||
| 337 | __pkgsplit_cache__[mypkg] = parts | ||
| 338 | exp = 3 - len(parts) | ||
| 339 | tmplist = [] | ||
| 340 | while exp != 0: | ||
| 341 | exp -= 1 | ||
| 342 | tmplist.append(None) | ||
| 343 | parts.extend(tmplist) | ||
| 344 | return parts | ||
| 345 | |||
| 346 | def set_additional_vars(file, d, include): | ||
| 347 | """Deduce rest of variables, e.g. ${A} out of ${SRC_URI}""" | ||
| 348 | |||
| 349 | debug(2,"BB %s: set_additional_vars" % file) | ||
| 350 | |||
| 351 | src_uri = data.getVar('SRC_URI', d) | ||
| 352 | if not src_uri: | ||
| 353 | return | ||
| 354 | src_uri = data.expand(src_uri, d) | ||
| 355 | |||
| 356 | a = data.getVar('A', d) | ||
| 357 | if a: | ||
| 358 | a = data.expand(a, d).split() | ||
| 359 | else: | ||
| 360 | a = [] | ||
| 361 | |||
| 362 | from bb import fetch | ||
| 363 | try: | ||
| 364 | fetch.init(src_uri.split(), d) | ||
| 365 | except fetch.NoMethodError: | ||
| 366 | pass | ||
| 367 | except bb.MalformedUrl,e: | ||
| 368 | raise ParseError("Unable to generate local paths for SRC_URI due to malformed uri: %s" % e) | ||
| 369 | |||
| 370 | a += fetch.localpaths(d) | ||
| 371 | del fetch | ||
| 372 | data.setVar('A', " ".join(a), d) | ||
| 373 | |||
| 374 | |||
| 375 | # Add us to the handlers list | ||
| 376 | from bb.parse import handlers | ||
| 377 | handlers.append({'supports': supports, 'handle': handle, 'init': init}) | ||
| 378 | del handlers | ||
diff --git a/bitbake/lib/bb/parse/parse_py/BBHandler.pyc b/bitbake/lib/bb/parse/parse_py/BBHandler.pyc new file mode 100644 index 0000000000..bfaa4c6004 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_py/BBHandler.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.py b/bitbake/lib/bb/parse/parse_py/ConfHandler.py new file mode 100644 index 0000000000..41ef96d557 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.py | |||
| @@ -0,0 +1,199 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | """class for handling configuration data files | ||
| 5 | |||
| 6 | Reads a .conf file and obtains its metadata | ||
| 7 | |||
| 8 | Copyright (C) 2003, 2004 Chris Larson | ||
| 9 | Copyright (C) 2003, 2004 Phil Blundell | ||
| 10 | |||
| 11 | This program is free software; you can redistribute it and/or modify it under | ||
| 12 | the terms of the GNU General Public License as published by the Free Software | ||
| 13 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 14 | version. | ||
| 15 | |||
| 16 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 17 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 18 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 19 | |||
| 20 | You should have received a copy of the GNU General Public License along with | ||
| 21 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 22 | Place, Suite 330, Boston, MA 02111-1307 USA.""" | ||
| 23 | |||
| 24 | import re, bb.data, os, sys | ||
| 25 | from bb import debug, fatal | ||
| 26 | from bb.parse import ParseError | ||
| 27 | |||
| 28 | #__config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}]+)\s*(?P<colon>:)?(?P<ques>\?)?=\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$") | ||
| 29 | __config_regexp__ = re.compile( r"(?P<exp>export\s*)?(?P<var>[a-zA-Z0-9\-_+.${}/]+)(\[(?P<flag>[a-zA-Z0-9\-_+.]+)\])?\s*((?P<colon>:=)|(?P<ques>\?=)|(?P<append>\+=)|(?P<prepend>=\+)|(?P<predot>=\.)|(?P<postdot>\.=)|=)\s*(?P<apo>['\"]?)(?P<value>.*)(?P=apo)$") | ||
| 30 | __include_regexp__ = re.compile( r"include\s+(.+)" ) | ||
| 31 | |||
| 32 | def init(data): | ||
| 33 | if not bb.data.getVar('TOPDIR', data): | ||
| 34 | bb.data.setVar('TOPDIR', os.getcwd(), data) | ||
| 35 | if not bb.data.getVar('BBPATH', data): | ||
| 36 | bb.data.setVar('BBPATH', os.path.join(sys.prefix, 'share', 'bitbake'), data) | ||
| 37 | |||
| 38 | def supports(fn, d): | ||
| 39 | return localpath(fn, d)[-5:] == ".conf" | ||
| 40 | |||
| 41 | def localpath(fn, d): | ||
| 42 | if os.path.exists(fn): | ||
| 43 | return fn | ||
| 44 | |||
| 45 | localfn = None | ||
| 46 | try: | ||
| 47 | localfn = bb.fetch.localpath(fn, d) | ||
| 48 | except bb.MalformedUrl: | ||
| 49 | pass | ||
| 50 | |||
| 51 | if not localfn: | ||
| 52 | localfn = fn | ||
| 53 | return localfn | ||
| 54 | |||
| 55 | def obtain(fn, data = bb.data.init()): | ||
| 56 | import sys, bb | ||
| 57 | fn = bb.data.expand(fn, data) | ||
| 58 | localfn = bb.data.expand(localpath(fn, data), data) | ||
| 59 | |||
| 60 | if localfn != fn: | ||
| 61 | dldir = bb.data.getVar('DL_DIR', data, 1) | ||
| 62 | if not dldir: | ||
| 63 | debug(1, "obtain: DL_DIR not defined") | ||
| 64 | return localfn | ||
| 65 | bb.mkdirhier(dldir) | ||
| 66 | try: | ||
| 67 | bb.fetch.init([fn]) | ||
| 68 | except bb.fetch.NoMethodError: | ||
| 69 | (type, value, traceback) = sys.exc_info() | ||
| 70 | debug(1, "obtain: no method: %s" % value) | ||
| 71 | return localfn | ||
| 72 | |||
| 73 | try: | ||
| 74 | bb.fetch.go(data) | ||
| 75 | except bb.fetch.MissingParameterError: | ||
| 76 | (type, value, traceback) = sys.exc_info() | ||
| 77 | debug(1, "obtain: missing parameters: %s" % value) | ||
| 78 | return localfn | ||
| 79 | except bb.fetch.FetchError: | ||
| 80 | (type, value, traceback) = sys.exc_info() | ||
| 81 | debug(1, "obtain: failed: %s" % value) | ||
| 82 | return localfn | ||
| 83 | return localfn | ||
| 84 | |||
| 85 | |||
| 86 | def include(oldfn, fn, data = bb.data.init()): | ||
| 87 | if oldfn == fn: # prevent infinate recursion | ||
| 88 | return None | ||
| 89 | |||
| 90 | import bb | ||
| 91 | fn = bb.data.expand(fn, data) | ||
| 92 | oldfn = bb.data.expand(oldfn, data) | ||
| 93 | |||
| 94 | from bb.parse import handle | ||
| 95 | try: | ||
| 96 | ret = handle(fn, data, 1) | ||
| 97 | except IOError: | ||
| 98 | debug(2, "CONF file '%s' not found" % fn) | ||
| 99 | |||
| 100 | def handle(fn, data = bb.data.init(), include = 0): | ||
| 101 | if include: | ||
| 102 | inc_string = "including" | ||
| 103 | else: | ||
| 104 | inc_string = "reading" | ||
| 105 | init(data) | ||
| 106 | |||
| 107 | if include == 0: | ||
| 108 | bb.data.inheritFromOS(data) | ||
| 109 | oldfile = None | ||
| 110 | else: | ||
| 111 | oldfile = bb.data.getVar('FILE', data) | ||
| 112 | |||
| 113 | fn = obtain(fn, data) | ||
| 114 | bbpath = [] | ||
| 115 | if not os.path.isabs(fn): | ||
| 116 | f = None | ||
| 117 | vbbpath = bb.data.getVar("BBPATH", data) | ||
| 118 | if vbbpath: | ||
| 119 | bbpath += vbbpath.split(":") | ||
| 120 | for p in bbpath: | ||
| 121 | currname = os.path.join(bb.data.expand(p, data), fn) | ||
| 122 | if os.access(currname, os.R_OK): | ||
| 123 | f = open(currname, 'r') | ||
| 124 | abs_fn = currname | ||
| 125 | debug(1, "CONF %s %s" % (inc_string, currname)) | ||
| 126 | break | ||
| 127 | if f is None: | ||
| 128 | raise IOError("file not found") | ||
| 129 | else: | ||
| 130 | f = open(fn,'r') | ||
| 131 | debug(1, "CONF %s %s" % (inc_string,fn)) | ||
| 132 | abs_fn = fn | ||
| 133 | |||
| 134 | if include: | ||
| 135 | bb.parse.mark_dependency(data, abs_fn) | ||
| 136 | |||
| 137 | lineno = 0 | ||
| 138 | bb.data.setVar('FILE', fn, data) | ||
| 139 | while 1: | ||
| 140 | lineno = lineno + 1 | ||
| 141 | s = f.readline() | ||
| 142 | if not s: break | ||
| 143 | w = s.strip() | ||
| 144 | if not w: continue # skip empty lines | ||
| 145 | s = s.rstrip() | ||
| 146 | if s[0] == '#': continue # skip comments | ||
| 147 | while s[-1] == '\\': | ||
| 148 | s2 = f.readline()[:-1].strip() | ||
| 149 | lineno = lineno + 1 | ||
| 150 | s = s[:-1] + s2 | ||
| 151 | feeder(lineno, s, fn, data) | ||
| 152 | |||
| 153 | if oldfile: | ||
| 154 | bb.data.setVar('FILE', oldfile, data) | ||
| 155 | return data | ||
| 156 | |||
| 157 | def feeder(lineno, s, fn, data = bb.data.init()): | ||
| 158 | m = __config_regexp__.match(s) | ||
| 159 | if m: | ||
| 160 | groupd = m.groupdict() | ||
| 161 | key = groupd["var"] | ||
| 162 | if "exp" in groupd and groupd["exp"] != None: | ||
| 163 | bb.data.setVarFlag(key, "export", 1, data) | ||
| 164 | if "ques" in groupd and groupd["ques"] != None: | ||
| 165 | val = bb.data.getVar(key, data) | ||
| 166 | if val == None: | ||
| 167 | val = groupd["value"] | ||
| 168 | elif "colon" in groupd and groupd["colon"] != None: | ||
| 169 | val = bb.data.expand(groupd["value"], data) | ||
| 170 | elif "append" in groupd and groupd["append"] != None: | ||
| 171 | val = "%s %s" % ((bb.data.getVar(key, data) or ""), groupd["value"]) | ||
| 172 | elif "prepend" in groupd and groupd["prepend"] != None: | ||
| 173 | val = "%s %s" % (groupd["value"], (bb.data.getVar(key, data) or "")) | ||
| 174 | elif "postdot" in groupd and groupd["postdot"] != None: | ||
| 175 | val = "%s%s" % ((bb.data.getVar(key, data) or ""), groupd["value"]) | ||
| 176 | elif "predot" in groupd and groupd["predot"] != None: | ||
| 177 | val = "%s%s" % (groupd["value"], (bb.data.getVar(key, data) or "")) | ||
| 178 | else: | ||
| 179 | val = groupd["value"] | ||
| 180 | if 'flag' in groupd and groupd['flag'] != None: | ||
| 181 | # bb.note("setVarFlag(%s, %s, %s, data)" % (key, groupd['flag'], val)) | ||
| 182 | bb.data.setVarFlag(key, groupd['flag'], val, data) | ||
| 183 | else: | ||
| 184 | bb.data.setVar(key, val, data) | ||
| 185 | return | ||
| 186 | |||
| 187 | m = __include_regexp__.match(s) | ||
| 188 | if m: | ||
| 189 | s = bb.data.expand(m.group(1), data) | ||
| 190 | # debug(2, "CONF %s:%d: including %s" % (fn, lineno, s)) | ||
| 191 | include(fn, s, data) | ||
| 192 | return | ||
| 193 | |||
| 194 | raise ParseError("%s:%d: unparsed line: '%s'" % (fn, lineno, s)); | ||
| 195 | |||
| 196 | # Add us to the handlers list | ||
| 197 | from bb.parse import handlers | ||
| 198 | handlers.append({'supports': supports, 'handle': handle, 'init': init}) | ||
| 199 | del handlers | ||
diff --git a/bitbake/lib/bb/parse/parse_py/ConfHandler.pyc b/bitbake/lib/bb/parse/parse_py/ConfHandler.pyc new file mode 100644 index 0000000000..e0ec666ed1 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_py/ConfHandler.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/parse/parse_py/__init__.py b/bitbake/lib/bb/parse/parse_py/__init__.py new file mode 100644 index 0000000000..6a2ce4059d --- /dev/null +++ b/bitbake/lib/bb/parse/parse_py/__init__.py | |||
| @@ -0,0 +1,32 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | """ | ||
| 5 | BitBake Parsers | ||
| 6 | |||
| 7 | File parsers for the BitBake build tools. | ||
| 8 | |||
| 9 | Copyright (C) 2003, 2004 Chris Larson | ||
| 10 | Copyright (C) 2003, 2004 Phil Blundell | ||
| 11 | |||
| 12 | This program is free software; you can redistribute it and/or modify it under | ||
| 13 | the terms of the GNU General Public License as published by the Free Software | ||
| 14 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 15 | version. | ||
| 16 | |||
| 17 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 18 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 19 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 20 | |||
| 21 | You should have received a copy of the GNU General Public License along with | ||
| 22 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 23 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 24 | |||
| 25 | Based on functions from the base bb module, Copyright 2003 Holger Schurig | ||
| 26 | """ | ||
| 27 | __version__ = '1.0' | ||
| 28 | |||
| 29 | __all__ = [ 'ConfHandler', 'BBHandler'] | ||
| 30 | |||
| 31 | import ConfHandler | ||
| 32 | import BBHandler | ||
diff --git a/bitbake/lib/bb/parse/parse_py/__init__.pyc b/bitbake/lib/bb/parse/parse_py/__init__.pyc new file mode 100644 index 0000000000..c081e02727 --- /dev/null +++ b/bitbake/lib/bb/parse/parse_py/__init__.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/shell.py b/bitbake/lib/bb/shell.py new file mode 100644 index 0000000000..97e61e1169 --- /dev/null +++ b/bitbake/lib/bb/shell.py | |||
| @@ -0,0 +1,779 @@ | |||
| 1 | #!/usr/bin/env python | ||
| 2 | # ex:ts=4:sw=4:sts=4:et | ||
| 3 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 4 | ########################################################################## | ||
| 5 | # | ||
| 6 | # Copyright (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de>, Vanille Media | ||
| 7 | # | ||
| 8 | # This program is free software; you can redistribute it and/or modify it under | ||
| 9 | # the terms of the GNU General Public License as published by the Free Software | ||
| 10 | # Foundation; version 2 of the License. | ||
| 11 | # | ||
| 12 | # This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 13 | # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 14 | # FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 15 | # | ||
| 16 | # You should have received a copy of the GNU General Public License along with | ||
| 17 | # this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 18 | # Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 19 | # | ||
| 20 | ########################################################################## | ||
| 21 | |||
| 22 | """ | ||
| 23 | BitBake Shell | ||
| 24 | |||
| 25 | IDEAS: | ||
| 26 | * list defined tasks per package | ||
| 27 | * list classes | ||
| 28 | * toggle force | ||
| 29 | * command to reparse just one (or more) bbfile(s) | ||
| 30 | * automatic check if reparsing is necessary (inotify?) | ||
| 31 | * frontend for bb file manipulation | ||
| 32 | * more shell-like features: | ||
| 33 | - output control, i.e. pipe output into grep, sort, etc. | ||
| 34 | - job control, i.e. bring running commands into background and foreground | ||
| 35 | * start parsing in background right after startup | ||
| 36 | * ncurses interface | ||
| 37 | |||
| 38 | PROBLEMS: | ||
| 39 | * force doesn't always work | ||
| 40 | * readline completion for commands with more than one parameters | ||
| 41 | |||
| 42 | """ | ||
| 43 | |||
| 44 | ########################################################################## | ||
| 45 | # Import and setup global variables | ||
| 46 | ########################################################################## | ||
| 47 | |||
| 48 | try: | ||
| 49 | set | ||
| 50 | except NameError: | ||
| 51 | from sets import Set as set | ||
| 52 | import sys, os, imp, readline, socket, httplib, urllib, commands, popen2, copy, shlex, Queue, fnmatch | ||
| 53 | imp.load_source( "bitbake", os.path.dirname( sys.argv[0] )+"/bitbake" ) | ||
| 54 | from bb import data, parse, build, fatal | ||
| 55 | |||
| 56 | __version__ = "0.5.2" | ||
| 57 | __credits__ = """BitBake Shell Version %s (C) 2005 Michael 'Mickey' Lauer <mickey@Vanille.de> | ||
| 58 | Type 'help' for more information, press CTRL-D to exit.""" % __version__ | ||
| 59 | |||
| 60 | cmds = {} | ||
| 61 | leave_mainloop = False | ||
| 62 | last_exception = None | ||
| 63 | cooker = None | ||
| 64 | parsed = False | ||
| 65 | initdata = None | ||
| 66 | debug = os.environ.get( "BBSHELL_DEBUG", "" ) | ||
| 67 | |||
| 68 | ########################################################################## | ||
| 69 | # Class BitBakeShellCommands | ||
| 70 | ########################################################################## | ||
| 71 | |||
| 72 | class BitBakeShellCommands: | ||
| 73 | """This class contains the valid commands for the shell""" | ||
| 74 | |||
| 75 | def __init__( self, shell ): | ||
| 76 | """Register all the commands""" | ||
| 77 | self._shell = shell | ||
| 78 | for attr in BitBakeShellCommands.__dict__: | ||
| 79 | if not attr.startswith( "_" ): | ||
| 80 | if attr.endswith( "_" ): | ||
| 81 | command = attr[:-1].lower() | ||
| 82 | else: | ||
| 83 | command = attr[:].lower() | ||
| 84 | method = getattr( BitBakeShellCommands, attr ) | ||
| 85 | debugOut( "registering command '%s'" % command ) | ||
| 86 | # scan number of arguments | ||
| 87 | usage = getattr( method, "usage", "" ) | ||
| 88 | if usage != "<...>": | ||
| 89 | numArgs = len( usage.split() ) | ||
| 90 | else: | ||
| 91 | numArgs = -1 | ||
| 92 | shell.registerCommand( command, method, numArgs, "%s %s" % ( command, usage ), method.__doc__ ) | ||
| 93 | |||
| 94 | def _checkParsed( self ): | ||
| 95 | if not parsed: | ||
| 96 | print "SHELL: This command needs to parse bbfiles..." | ||
| 97 | self.parse( None ) | ||
| 98 | |||
| 99 | def _findProvider( self, item ): | ||
| 100 | self._checkParsed() | ||
| 101 | preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 ) | ||
| 102 | if not preferred: preferred = item | ||
| 103 | try: | ||
| 104 | lv, lf, pv, pf = cooker.findBestProvider( preferred ) | ||
| 105 | except KeyError: | ||
| 106 | if item in cooker.status.providers: | ||
| 107 | pf = cooker.status.providers[item][0] | ||
| 108 | else: | ||
| 109 | pf = None | ||
| 110 | return pf | ||
| 111 | |||
| 112 | def alias( self, params ): | ||
| 113 | """Register a new name for a command""" | ||
| 114 | new, old = params | ||
| 115 | if not old in cmds: | ||
| 116 | print "ERROR: Command '%s' not known" % old | ||
| 117 | else: | ||
| 118 | cmds[new] = cmds[old] | ||
| 119 | print "OK" | ||
| 120 | alias.usage = "<alias> <command>" | ||
| 121 | |||
| 122 | def buffer( self, params ): | ||
| 123 | """Dump specified output buffer""" | ||
| 124 | index = params[0] | ||
| 125 | print self._shell.myout.buffer( int( index ) ) | ||
| 126 | buffer.usage = "<index>" | ||
| 127 | |||
| 128 | def buffers( self, params ): | ||
| 129 | """Show the available output buffers""" | ||
| 130 | commands = self._shell.myout.bufferedCommands() | ||
| 131 | if not commands: | ||
| 132 | print "SHELL: No buffered commands available yet. Start doing something." | ||
| 133 | else: | ||
| 134 | print "="*35, "Available Output Buffers", "="*27 | ||
| 135 | for index, cmd in enumerate( commands ): | ||
| 136 | print "| %s %s" % ( str( index ).ljust( 3 ), cmd ) | ||
| 137 | print "="*88 | ||
| 138 | |||
| 139 | def build( self, params, cmd = "build" ): | ||
| 140 | """Build a providee""" | ||
| 141 | globexpr = params[0] | ||
| 142 | self._checkParsed() | ||
| 143 | names = globfilter( cooker.status.pkg_pn.keys(), globexpr ) | ||
| 144 | if len( names ) == 0: names = [ globexpr ] | ||
| 145 | print "SHELL: Building %s" % ' '.join( names ) | ||
| 146 | |||
| 147 | oldcmd = cooker.configuration.cmd | ||
| 148 | cooker.configuration.cmd = cmd | ||
| 149 | cooker.build_cache = [] | ||
| 150 | cooker.build_cache_fail = [] | ||
| 151 | |||
| 152 | for name in names: | ||
| 153 | try: | ||
| 154 | cooker.buildProvider( name ) | ||
| 155 | except build.EventException, e: | ||
| 156 | print "ERROR: Couldn't build '%s'" % name | ||
| 157 | global last_exception | ||
| 158 | last_exception = e | ||
| 159 | break | ||
| 160 | |||
| 161 | cooker.configuration.cmd = oldcmd | ||
| 162 | |||
| 163 | build.usage = "<providee>" | ||
| 164 | |||
| 165 | def clean( self, params ): | ||
| 166 | """Clean a providee""" | ||
| 167 | self.build( params, "clean" ) | ||
| 168 | clean.usage = "<providee>" | ||
| 169 | |||
| 170 | def compile( self, params ): | ||
| 171 | """Execute 'compile' on a providee""" | ||
| 172 | self.build( params, "compile" ) | ||
| 173 | compile.usage = "<providee>" | ||
| 174 | |||
| 175 | def configure( self, params ): | ||
| 176 | """Execute 'configure' on a providee""" | ||
| 177 | self.build( params, "configure" ) | ||
| 178 | configure.usage = "<providee>" | ||
| 179 | |||
| 180 | def edit( self, params ): | ||
| 181 | """Call $EDITOR on a providee""" | ||
| 182 | name = params[0] | ||
| 183 | bbfile = self._findProvider( name ) | ||
| 184 | if bbfile is not None: | ||
| 185 | os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), bbfile ) ) | ||
| 186 | else: | ||
| 187 | print "ERROR: Nothing provides '%s'" % name | ||
| 188 | edit.usage = "<providee>" | ||
| 189 | |||
| 190 | def environment( self, params ): | ||
| 191 | """Dump out the outer BitBake environment (see bbread)""" | ||
| 192 | data.emit_env(sys.__stdout__, cooker.configuration.data, True) | ||
| 193 | |||
| 194 | def exit_( self, params ): | ||
| 195 | """Leave the BitBake Shell""" | ||
| 196 | debugOut( "setting leave_mainloop to true" ) | ||
| 197 | global leave_mainloop | ||
| 198 | leave_mainloop = True | ||
| 199 | |||
| 200 | def fetch( self, params ): | ||
| 201 | """Fetch a providee""" | ||
| 202 | self.build( params, "fetch" ) | ||
| 203 | fetch.usage = "<providee>" | ||
| 204 | |||
| 205 | def fileBuild( self, params, cmd = "build" ): | ||
| 206 | """Parse and build a .bb file""" | ||
| 207 | name = params[0] | ||
| 208 | bf = completeFilePath( name ) | ||
| 209 | print "SHELL: Calling '%s' on '%s'" % ( cmd, bf ) | ||
| 210 | |||
| 211 | oldcmd = cooker.configuration.cmd | ||
| 212 | cooker.configuration.cmd = cmd | ||
| 213 | cooker.build_cache = [] | ||
| 214 | cooker.build_cache_fail = [] | ||
| 215 | |||
| 216 | thisdata = copy.deepcopy( initdata ) | ||
| 217 | # Caution: parse.handle modifies thisdata, hence it would | ||
| 218 | # lead to pollution cooker.configuration.data, which is | ||
| 219 | # why we use it on a safe copy we obtained from cooker right after | ||
| 220 | # parsing the initial *.conf files | ||
| 221 | try: | ||
| 222 | bbfile_data = parse.handle( bf, thisdata ) | ||
| 223 | except parse.ParseError: | ||
| 224 | print "ERROR: Unable to open or parse '%s'" % bf | ||
| 225 | else: | ||
| 226 | item = data.getVar('PN', bbfile_data, 1) | ||
| 227 | data.setVar( "_task_cache", [], bbfile_data ) # force | ||
| 228 | try: | ||
| 229 | cooker.tryBuildPackage( os.path.abspath( bf ), item, bbfile_data ) | ||
| 230 | except build.EventException, e: | ||
| 231 | print "ERROR: Couldn't build '%s'" % name | ||
| 232 | global last_exception | ||
| 233 | last_exception = e | ||
| 234 | |||
| 235 | cooker.configuration.cmd = oldcmd | ||
| 236 | fileBuild.usage = "<bbfile>" | ||
| 237 | |||
| 238 | def fileClean( self, params ): | ||
| 239 | """Clean a .bb file""" | ||
| 240 | self.fileBuild( params, "clean" ) | ||
| 241 | fileClean.usage = "<bbfile>" | ||
| 242 | |||
| 243 | def fileEdit( self, params ): | ||
| 244 | """Call $EDITOR on a .bb file""" | ||
| 245 | name = params[0] | ||
| 246 | os.system( "%s %s" % ( os.environ.get( "EDITOR", "vi" ), completeFilePath( name ) ) ) | ||
| 247 | fileEdit.usage = "<bbfile>" | ||
| 248 | |||
| 249 | def fileRebuild( self, params ): | ||
| 250 | """Rebuild (clean & build) a .bb file""" | ||
| 251 | self.fileClean( params ) | ||
| 252 | self.fileBuild( params ) | ||
| 253 | fileRebuild.usage = "<bbfile>" | ||
| 254 | |||
| 255 | def force( self, params ): | ||
| 256 | """Toggle force task execution flag (see bitbake -f)""" | ||
| 257 | cooker.configuration.force = not cooker.configuration.force | ||
| 258 | print "SHELL: Force Flag is now '%s'" % repr( cooker.configuration.force ) | ||
| 259 | |||
| 260 | def help( self, params ): | ||
| 261 | """Show a comprehensive list of commands and their purpose""" | ||
| 262 | print "="*30, "Available Commands", "="*30 | ||
| 263 | allcmds = cmds.keys() | ||
| 264 | allcmds.sort() | ||
| 265 | for cmd in allcmds: | ||
| 266 | function,numparams,usage,helptext = cmds[cmd] | ||
| 267 | print "| %s | %s" % (usage.ljust(30), helptext) | ||
| 268 | print "="*78 | ||
| 269 | |||
| 270 | def lastError( self, params ): | ||
| 271 | """Show the reason or log that was produced by the last BitBake event exception""" | ||
| 272 | if last_exception is None: | ||
| 273 | print "SHELL: No Errors yet (Phew)..." | ||
| 274 | else: | ||
| 275 | reason, event = last_exception.args | ||
| 276 | print "SHELL: Reason for the last error: '%s'" % reason | ||
| 277 | if ':' in reason: | ||
| 278 | msg, filename = reason.split( ':' ) | ||
| 279 | filename = filename.strip() | ||
| 280 | print "SHELL: Dumping log file for last error:" | ||
| 281 | try: | ||
| 282 | print open( filename ).read() | ||
| 283 | except IOError: | ||
| 284 | print "ERROR: Couldn't open '%s'" % filename | ||
| 285 | |||
| 286 | def match( self, params ): | ||
| 287 | """Dump all files or providers matching a glob expression""" | ||
| 288 | what, globexpr = params | ||
| 289 | if what == "files": | ||
| 290 | self._checkParsed() | ||
| 291 | for key in globfilter( cooker.pkgdata.keys(), globexpr ): print key | ||
| 292 | elif what == "providers": | ||
| 293 | self._checkParsed() | ||
| 294 | for key in globfilter( cooker.status.pkg_pn.keys(), globexpr ): print key | ||
| 295 | else: | ||
| 296 | print "Usage: match %s" % self.print_.usage | ||
| 297 | match.usage = "<files|providers> <glob>" | ||
| 298 | |||
| 299 | def new( self, params ): | ||
| 300 | """Create a new .bb file and open the editor""" | ||
| 301 | dirname, filename = params | ||
| 302 | packages = '/'.join( data.getVar( "BBFILES", cooker.configuration.data, 1 ).split('/')[:-2] ) | ||
| 303 | fulldirname = "%s/%s" % ( packages, dirname ) | ||
| 304 | |||
| 305 | if not os.path.exists( fulldirname ): | ||
| 306 | print "SHELL: Creating '%s'" % fulldirname | ||
| 307 | os.mkdir( fulldirname ) | ||
| 308 | if os.path.exists( fulldirname ) and os.path.isdir( fulldirname ): | ||
| 309 | if os.path.exists( "%s/%s" % ( fulldirname, filename ) ): | ||
| 310 | print "SHELL: ERROR: %s/%s already exists" % ( fulldirname, filename ) | ||
| 311 | return False | ||
| 312 | print "SHELL: Creating '%s/%s'" % ( fulldirname, filename ) | ||
| 313 | newpackage = open( "%s/%s" % ( fulldirname, filename ), "w" ) | ||
| 314 | print >>newpackage,"""DESCRIPTION = "" | ||
| 315 | SECTION = "" | ||
| 316 | AUTHOR = "" | ||
| 317 | HOMEPAGE = "" | ||
| 318 | MAINTAINER = "" | ||
| 319 | LICENSE = "GPL" | ||
| 320 | PR = "r0" | ||
| 321 | |||
| 322 | SRC_URI = "" | ||
| 323 | |||
| 324 | #inherit base | ||
| 325 | |||
| 326 | #do_configure() { | ||
| 327 | # | ||
| 328 | #} | ||
| 329 | |||
| 330 | #do_compile() { | ||
| 331 | # | ||
| 332 | #} | ||
| 333 | |||
| 334 | #do_stage() { | ||
| 335 | # | ||
| 336 | #} | ||
| 337 | |||
| 338 | #do_install() { | ||
| 339 | # | ||
| 340 | #} | ||
| 341 | """ | ||
| 342 | newpackage.close() | ||
| 343 | os.system( "%s %s/%s" % ( os.environ.get( "EDITOR" ), fulldirname, filename ) ) | ||
| 344 | new.usage = "<directory> <filename>" | ||
| 345 | |||
| 346 | def pasteBin( self, params ): | ||
| 347 | """Send a command + output buffer to http://pastebin.com""" | ||
| 348 | index = params[0] | ||
| 349 | contents = self._shell.myout.buffer( int( index ) ) | ||
| 350 | status, error, location = sendToPastebin( contents ) | ||
| 351 | if status == 302: | ||
| 352 | print "SHELL: Pasted to %s" % location | ||
| 353 | else: | ||
| 354 | print "ERROR: %s %s" % ( status, error ) | ||
| 355 | pasteBin.usage = "<index>" | ||
| 356 | |||
| 357 | def pasteLog( self, params ): | ||
| 358 | """Send the last event exception error log (if there is one) to http://pastebin.com""" | ||
| 359 | if last_exception is None: | ||
| 360 | print "SHELL: No Errors yet (Phew)..." | ||
| 361 | else: | ||
| 362 | reason, event = last_exception.args | ||
| 363 | print "SHELL: Reason for the last error: '%s'" % reason | ||
| 364 | if ':' in reason: | ||
| 365 | msg, filename = reason.split( ':' ) | ||
| 366 | filename = filename.strip() | ||
| 367 | print "SHELL: Pasting log file to pastebin..." | ||
| 368 | |||
| 369 | status, error, location = sendToPastebin( open( filename ).read() ) | ||
| 370 | |||
| 371 | if status == 302: | ||
| 372 | print "SHELL: Pasted to %s" % location | ||
| 373 | else: | ||
| 374 | print "ERROR: %s %s" % ( status, error ) | ||
| 375 | |||
| 376 | def patch( self, params ): | ||
| 377 | """Execute 'patch' command on a providee""" | ||
| 378 | self.build( params, "patch" ) | ||
| 379 | patch.usage = "<providee>" | ||
| 380 | |||
| 381 | def parse( self, params ): | ||
| 382 | """(Re-)parse .bb files and calculate the dependency graph""" | ||
| 383 | cooker.status = cooker.ParsingStatus() | ||
| 384 | ignore = data.getVar("ASSUME_PROVIDED", cooker.configuration.data, 1) or "" | ||
| 385 | cooker.status.ignored_dependencies = set( ignore.split() ) | ||
| 386 | cooker.handleCollections( data.getVar("BBFILE_COLLECTIONS", cooker.configuration.data, 1) ) | ||
| 387 | |||
| 388 | cooker.collect_bbfiles( cooker.myProgressCallback ) | ||
| 389 | cooker.buildDepgraph() | ||
| 390 | global parsed | ||
| 391 | parsed = True | ||
| 392 | |||
| 393 | |||
| 394 | def getvar( self, params ): | ||
| 395 | """Dump the contents of an outer BitBake environment variable""" | ||
| 396 | var = params[0] | ||
| 397 | value = data.getVar( var, cooker.configuration.data, 1 ) | ||
| 398 | print value | ||
| 399 | getvar.usage = "<variable>" | ||
| 400 | |||
| 401 | def peek( self, params ): | ||
| 402 | """Dump contents of variable defined in providee's metadata""" | ||
| 403 | name, var = params | ||
| 404 | bbfile = self._findProvider( name ) | ||
| 405 | if bbfile is not None: | ||
| 406 | value = cooker.pkgdata[bbfile].getVar( var, 1 ) | ||
| 407 | print value | ||
| 408 | else: | ||
| 409 | print "ERROR: Nothing provides '%s'" % name | ||
| 410 | peek.usage = "<providee> <variable>" | ||
| 411 | |||
| 412 | def poke( self, params ): | ||
| 413 | """Set contents of variable defined in providee's metadata""" | ||
| 414 | name, var, value = params | ||
| 415 | bbfile = self._findProvider( name ) | ||
| 416 | d = cooker.pkgdata[bbfile] | ||
| 417 | if bbfile is not None: | ||
| 418 | data.setVar( var, value, d ) | ||
| 419 | |||
| 420 | # mark the change semi persistant | ||
| 421 | cooker.pkgdata.setDirty(bbfile, d) | ||
| 422 | print "OK" | ||
| 423 | else: | ||
| 424 | print "ERROR: Nothing provides '%s'" % name | ||
| 425 | poke.usage = "<providee> <variable> <value>" | ||
| 426 | |||
| 427 | def print_( self, params ): | ||
| 428 | """Dump all files or providers""" | ||
| 429 | what = params[0] | ||
| 430 | if what == "files": | ||
| 431 | self._checkParsed() | ||
| 432 | for key in cooker.pkgdata.keys(): print key | ||
| 433 | elif what == "providers": | ||
| 434 | self._checkParsed() | ||
| 435 | for key in cooker.status.providers.keys(): print key | ||
| 436 | else: | ||
| 437 | print "Usage: print %s" % self.print_.usage | ||
| 438 | print_.usage = "<files|providers>" | ||
| 439 | |||
| 440 | def python( self, params ): | ||
| 441 | """Enter the expert mode - an interactive BitBake Python Interpreter""" | ||
| 442 | sys.ps1 = "EXPERT BB>>> " | ||
| 443 | sys.ps2 = "EXPERT BB... " | ||
| 444 | import code | ||
| 445 | interpreter = code.InteractiveConsole( dict( globals() ) ) | ||
| 446 | interpreter.interact( "SHELL: Expert Mode - BitBake Python %s\nType 'help' for more information, press CTRL-D to switch back to BBSHELL." % sys.version ) | ||
| 447 | |||
| 448 | def showdata( self, params ): | ||
| 449 | """Execute 'showdata' on a providee""" | ||
| 450 | self.build( params, "showdata" ) | ||
| 451 | showdata.usage = "<providee>" | ||
| 452 | |||
| 453 | def setVar( self, params ): | ||
| 454 | """Set an outer BitBake environment variable""" | ||
| 455 | var, value = params | ||
| 456 | data.setVar( var, value, cooker.configuration.data ) | ||
| 457 | print "OK" | ||
| 458 | setVar.usage = "<variable> <value>" | ||
| 459 | |||
| 460 | def rebuild( self, params ): | ||
| 461 | """Clean and rebuild a .bb file or a providee""" | ||
| 462 | self.build( params, "clean" ) | ||
| 463 | self.build( params, "build" ) | ||
| 464 | rebuild.usage = "<providee>" | ||
| 465 | |||
| 466 | def shell( self, params ): | ||
| 467 | """Execute a shell command and dump the output""" | ||
| 468 | if params != "": | ||
| 469 | print commands.getoutput( " ".join( params ) ) | ||
| 470 | shell.usage = "<...>" | ||
| 471 | |||
| 472 | def stage( self, params ): | ||
| 473 | """Execute 'stage' on a providee""" | ||
| 474 | self.build( params, "stage" ) | ||
| 475 | stage.usage = "<providee>" | ||
| 476 | |||
| 477 | def status( self, params ): | ||
| 478 | """<just for testing>""" | ||
| 479 | print "-" * 78 | ||
| 480 | print "build cache = '%s'" % cooker.build_cache | ||
| 481 | print "build cache fail = '%s'" % cooker.build_cache_fail | ||
| 482 | print "building list = '%s'" % cooker.building_list | ||
| 483 | print "build path = '%s'" % cooker.build_path | ||
| 484 | print "consider_msgs_cache = '%s'" % cooker.consider_msgs_cache | ||
| 485 | print "build stats = '%s'" % cooker.stats | ||
| 486 | if last_exception is not None: print "last_exception = '%s'" % repr( last_exception.args ) | ||
| 487 | print "memory output contents = '%s'" % self._shell.myout._buffer | ||
| 488 | |||
| 489 | def test( self, params ): | ||
| 490 | """<just for testing>""" | ||
| 491 | print "testCommand called with '%s'" % params | ||
| 492 | |||
| 493 | def unpack( self, params ): | ||
| 494 | """Execute 'unpack' on a providee""" | ||
| 495 | self.build( params, "unpack" ) | ||
| 496 | unpack.usage = "<providee>" | ||
| 497 | |||
| 498 | def which( self, params ): | ||
| 499 | """Computes the providers for a given providee""" | ||
| 500 | item = params[0] | ||
| 501 | |||
| 502 | self._checkParsed() | ||
| 503 | |||
| 504 | preferred = data.getVar( "PREFERRED_PROVIDER_%s" % item, cooker.configuration.data, 1 ) | ||
| 505 | if not preferred: preferred = item | ||
| 506 | |||
| 507 | try: | ||
| 508 | lv, lf, pv, pf = cooker.findBestProvider( preferred ) | ||
| 509 | except KeyError: | ||
| 510 | lv, lf, pv, pf = (None,)*4 | ||
| 511 | |||
| 512 | try: | ||
| 513 | providers = cooker.status.providers[item] | ||
| 514 | except KeyError: | ||
| 515 | print "SHELL: ERROR: Nothing provides", preferred | ||
| 516 | else: | ||
| 517 | for provider in providers: | ||
| 518 | if provider == pf: provider = " (***) %s" % provider | ||
| 519 | else: provider = " %s" % provider | ||
| 520 | print provider | ||
| 521 | which.usage = "<providee>" | ||
| 522 | |||
| 523 | ########################################################################## | ||
| 524 | # Common helper functions | ||
| 525 | ########################################################################## | ||
| 526 | |||
| 527 | def completeFilePath( bbfile ): | ||
| 528 | """Get the complete bbfile path""" | ||
| 529 | if not cooker.pkgdata: return bbfile | ||
| 530 | for key in cooker.pkgdata.keys(): | ||
| 531 | if key.endswith( bbfile ): | ||
| 532 | return key | ||
| 533 | return bbfile | ||
| 534 | |||
| 535 | def sendToPastebin( content ): | ||
| 536 | """Send content to http://www.pastebin.com""" | ||
| 537 | mydata = {} | ||
| 538 | mydata["parent_pid"] = "" | ||
| 539 | mydata["format"] = "bash" | ||
| 540 | mydata["code2"] = content | ||
| 541 | mydata["paste"] = "Send" | ||
| 542 | mydata["poster"] = "%s@%s" % ( os.environ.get( "USER", "unknown" ), socket.gethostname() or "unknown" ) | ||
| 543 | params = urllib.urlencode( mydata ) | ||
| 544 | headers = {"Content-type": "application/x-www-form-urlencoded","Accept": "text/plain"} | ||
| 545 | |||
| 546 | conn = httplib.HTTPConnection( "pastebin.com:80" ) | ||
| 547 | conn.request("POST", "/", params, headers ) | ||
| 548 | |||
| 549 | response = conn.getresponse() | ||
| 550 | conn.close() | ||
| 551 | |||
| 552 | return response.status, response.reason, response.getheader( "location" ) or "unknown" | ||
| 553 | |||
| 554 | def completer( text, state ): | ||
| 555 | """Return a possible readline completion""" | ||
| 556 | debugOut( "completer called with text='%s', state='%d'" % ( text, state ) ) | ||
| 557 | |||
| 558 | if state == 0: | ||
| 559 | line = readline.get_line_buffer() | ||
| 560 | if " " in line: | ||
| 561 | line = line.split() | ||
| 562 | # we are in second (or more) argument | ||
| 563 | if line[0] in cmds and hasattr( cmds[line[0]][0], "usage" ): # known command and usage | ||
| 564 | u = getattr( cmds[line[0]][0], "usage" ).split()[0] | ||
| 565 | if u == "<variable>": | ||
| 566 | allmatches = cooker.configuration.data.keys() | ||
| 567 | elif u == "<bbfile>": | ||
| 568 | if cooker.pkgdata is None: allmatches = [ "(No Matches Available. Parsed yet?)" ] | ||
| 569 | else: allmatches = [ x.split("/")[-1] for x in cooker.pkgdata.keys() ] | ||
| 570 | elif u == "<providee>": | ||
| 571 | if cooker.pkgdata is None: allmatches = [ "(No Matches Available. Parsed yet?)" ] | ||
| 572 | else: allmatches = cooker.status.providers.iterkeys() | ||
| 573 | else: allmatches = [ "(No tab completion available for this command)" ] | ||
| 574 | else: allmatches = [ "(No tab completion available for this command)" ] | ||
| 575 | else: | ||
| 576 | # we are in first argument | ||
| 577 | allmatches = cmds.iterkeys() | ||
| 578 | |||
| 579 | completer.matches = [ x for x in allmatches if x[:len(text)] == text ] | ||
| 580 | #print "completer.matches = '%s'" % completer.matches | ||
| 581 | if len( completer.matches ) > state: | ||
| 582 | return completer.matches[state] | ||
| 583 | else: | ||
| 584 | return None | ||
| 585 | |||
| 586 | def debugOut( text ): | ||
| 587 | if debug: | ||
| 588 | sys.stderr.write( "( %s )\n" % text ) | ||
| 589 | |||
| 590 | def columnize( alist, width = 80 ): | ||
| 591 | """ | ||
| 592 | A word-wrap function that preserves existing line breaks | ||
| 593 | and most spaces in the text. Expects that existing line | ||
| 594 | breaks are posix newlines (\n). | ||
| 595 | """ | ||
| 596 | return reduce(lambda line, word, width=width: '%s%s%s' % | ||
| 597 | (line, | ||
| 598 | ' \n'[(len(line[line.rfind('\n')+1:]) | ||
| 599 | + len(word.split('\n',1)[0] | ||
| 600 | ) >= width)], | ||
| 601 | word), | ||
| 602 | alist | ||
| 603 | ) | ||
| 604 | |||
| 605 | def globfilter( names, pattern ): | ||
| 606 | return fnmatch.filter( names, pattern ) | ||
| 607 | |||
| 608 | ########################################################################## | ||
| 609 | # Class MemoryOutput | ||
| 610 | ########################################################################## | ||
| 611 | |||
| 612 | class MemoryOutput: | ||
| 613 | """File-like output class buffering the output of the last 10 commands""" | ||
| 614 | def __init__( self, delegate ): | ||
| 615 | self.delegate = delegate | ||
| 616 | self._buffer = [] | ||
| 617 | self.text = [] | ||
| 618 | self._command = None | ||
| 619 | |||
| 620 | def startCommand( self, command ): | ||
| 621 | self._command = command | ||
| 622 | self.text = [] | ||
| 623 | def endCommand( self ): | ||
| 624 | if self._command is not None: | ||
| 625 | if len( self._buffer ) == 10: del self._buffer[0] | ||
| 626 | self._buffer.append( ( self._command, self.text ) ) | ||
| 627 | def removeLast( self ): | ||
| 628 | if self._buffer: | ||
| 629 | del self._buffer[ len( self._buffer ) - 1 ] | ||
| 630 | self.text = [] | ||
| 631 | self._command = None | ||
| 632 | def lastBuffer( self ): | ||
| 633 | if self._buffer: | ||
| 634 | return self._buffer[ len( self._buffer ) -1 ][1] | ||
| 635 | def bufferedCommands( self ): | ||
| 636 | return [ cmd for cmd, output in self._buffer ] | ||
| 637 | def buffer( self, i ): | ||
| 638 | if i < len( self._buffer ): | ||
| 639 | return "BB>> %s\n%s" % ( self._buffer[i][0], "".join( self._buffer[i][1] ) ) | ||
| 640 | else: return "ERROR: Invalid buffer number. Buffer needs to be in (0, %d)" % ( len( self._buffer ) - 1 ) | ||
| 641 | def write( self, text ): | ||
| 642 | if self._command is not None and text != "BB>> ": self.text.append( text ) | ||
| 643 | if self.delegate is not None: self.delegate.write( text ) | ||
| 644 | def flush( self ): | ||
| 645 | return self.delegate.flush() | ||
| 646 | def fileno( self ): | ||
| 647 | return self.delegate.fileno() | ||
| 648 | def isatty( self ): | ||
| 649 | return self.delegate.isatty() | ||
| 650 | |||
| 651 | ########################################################################## | ||
| 652 | # Class BitBakeShell | ||
| 653 | ########################################################################## | ||
| 654 | |||
| 655 | class BitBakeShell: | ||
| 656 | |||
| 657 | def __init__( self ): | ||
| 658 | """Register commands and set up readline""" | ||
| 659 | self.commandQ = Queue.Queue() | ||
| 660 | self.commands = BitBakeShellCommands( self ) | ||
| 661 | self.myout = MemoryOutput( sys.stdout ) | ||
| 662 | self.historyfilename = os.path.expanduser( "~/.bbsh_history" ) | ||
| 663 | self.startupfilename = os.path.expanduser( "~/.bbsh_startup" ) | ||
| 664 | |||
| 665 | readline.set_completer( completer ) | ||
| 666 | readline.set_completer_delims( " " ) | ||
| 667 | readline.parse_and_bind("tab: complete") | ||
| 668 | |||
| 669 | try: | ||
| 670 | readline.read_history_file( self.historyfilename ) | ||
| 671 | except IOError: | ||
| 672 | pass # It doesn't exist yet. | ||
| 673 | |||
| 674 | print __credits__ | ||
| 675 | |||
| 676 | # save initial cooker configuration (will be reused in file*** commands) | ||
| 677 | global initdata | ||
| 678 | initdata = copy.deepcopy( cooker.configuration.data ) | ||
| 679 | |||
| 680 | def cleanup( self ): | ||
| 681 | """Write readline history and clean up resources""" | ||
| 682 | debugOut( "writing command history" ) | ||
| 683 | try: | ||
| 684 | readline.write_history_file( self.historyfilename ) | ||
| 685 | except: | ||
| 686 | print "SHELL: Unable to save command history" | ||
| 687 | |||
| 688 | def registerCommand( self, command, function, numparams = 0, usage = "", helptext = "" ): | ||
| 689 | """Register a command""" | ||
| 690 | if usage == "": usage = command | ||
| 691 | if helptext == "": helptext = function.__doc__ or "<not yet documented>" | ||
| 692 | cmds[command] = ( function, numparams, usage, helptext ) | ||
| 693 | |||
| 694 | def processCommand( self, command, params ): | ||
| 695 | """Process a command. Check number of params and print a usage string, if appropriate""" | ||
| 696 | debugOut( "processing command '%s'..." % command ) | ||
| 697 | try: | ||
| 698 | function, numparams, usage, helptext = cmds[command] | ||
| 699 | except KeyError: | ||
| 700 | print "SHELL: ERROR: '%s' command is not a valid command." % command | ||
| 701 | self.myout.removeLast() | ||
| 702 | else: | ||
| 703 | if (numparams != -1) and (not len( params ) == numparams): | ||
| 704 | print "Usage: '%s'" % usage | ||
| 705 | return | ||
| 706 | |||
| 707 | result = function( self.commands, params ) | ||
| 708 | debugOut( "result was '%s'" % result ) | ||
| 709 | |||
| 710 | def processStartupFile( self ): | ||
| 711 | """Read and execute all commands found in $HOME/.bbsh_startup""" | ||
| 712 | if os.path.exists( self.startupfilename ): | ||
| 713 | startupfile = open( self.startupfilename, "r" ) | ||
| 714 | for cmdline in startupfile: | ||
| 715 | debugOut( "processing startup line '%s'" % cmdline ) | ||
| 716 | if not cmdline: | ||
| 717 | continue | ||
| 718 | if "|" in cmdline: | ||
| 719 | print "ERROR: '|' in startup file is not allowed. Ignoring line" | ||
| 720 | continue | ||
| 721 | self.commandQ.put( cmdline.strip() ) | ||
| 722 | |||
| 723 | def main( self ): | ||
| 724 | """The main command loop""" | ||
| 725 | while not leave_mainloop: | ||
| 726 | try: | ||
| 727 | if self.commandQ.empty(): | ||
| 728 | sys.stdout = self.myout.delegate | ||
| 729 | cmdline = raw_input( "BB>> " ) | ||
| 730 | sys.stdout = self.myout | ||
| 731 | else: | ||
| 732 | cmdline = self.commandQ.get() | ||
| 733 | if cmdline: | ||
| 734 | allCommands = cmdline.split( ';' ) | ||
| 735 | for command in allCommands: | ||
| 736 | pipecmd = None | ||
| 737 | # | ||
| 738 | # special case for expert mode | ||
| 739 | if command == 'python': | ||
| 740 | sys.stdout = self.myout.delegate | ||
| 741 | self.processCommand( command, "" ) | ||
| 742 | sys.stdout = self.myout | ||
| 743 | else: | ||
| 744 | self.myout.startCommand( command ) | ||
| 745 | if '|' in command: # disable output | ||
| 746 | command, pipecmd = command.split( '|' ) | ||
| 747 | delegate = self.myout.delegate | ||
| 748 | self.myout.delegate = None | ||
| 749 | tokens = shlex.split( command, True ) | ||
| 750 | self.processCommand( tokens[0], tokens[1:] or "" ) | ||
| 751 | self.myout.endCommand() | ||
| 752 | if pipecmd is not None: # restore output | ||
| 753 | self.myout.delegate = delegate | ||
| 754 | |||
| 755 | pipe = popen2.Popen4( pipecmd ) | ||
| 756 | pipe.tochild.write( "\n".join( self.myout.lastBuffer() ) ) | ||
| 757 | pipe.tochild.close() | ||
| 758 | sys.stdout.write( pipe.fromchild.read() ) | ||
| 759 | # | ||
| 760 | except EOFError: | ||
| 761 | |||
| 762 | return | ||
| 763 | except KeyboardInterrupt: | ||
| 764 | |||
| 765 | |||
| 766 | ########################################################################## | ||
| 767 | # Start function - called from the BitBake command line utility | ||
| 768 | ########################################################################## | ||
| 769 | |||
| 770 | def start( aCooker ): | ||
| 771 | global cooker | ||
| 772 | cooker = aCooker | ||
| 773 | bbshell = BitBakeShell() | ||
| 774 | bbshell.processStartupFile() | ||
| 775 | bbshell.main() | ||
| 776 | bbshell.cleanup() | ||
| 777 | |||
| 778 | if __name__ == "__main__": | ||
| 779 | print "SHELL: Sorry, this program should only be called by BitBake." | ||
diff --git a/bitbake/lib/bb/shell.pyc b/bitbake/lib/bb/shell.pyc new file mode 100644 index 0000000000..7f9d267504 --- /dev/null +++ b/bitbake/lib/bb/shell.pyc | |||
| Binary files differ | |||
diff --git a/bitbake/lib/bb/utils.py b/bitbake/lib/bb/utils.py new file mode 100644 index 0000000000..ee8713a2d0 --- /dev/null +++ b/bitbake/lib/bb/utils.py | |||
| @@ -0,0 +1,71 @@ | |||
| 1 | # ex:ts=4:sw=4:sts=4:et | ||
| 2 | # -*- tab-width: 4; c-basic-offset: 4; indent-tabs-mode: nil -*- | ||
| 3 | """ | ||
| 4 | BitBake Utility Functions | ||
| 5 | |||
| 6 | This program is free software; you can redistribute it and/or modify it under | ||
| 7 | the terms of the GNU General Public License as published by the Free Software | ||
| 8 | Foundation; either version 2 of the License, or (at your option) any later | ||
| 9 | version. | ||
| 10 | |||
| 11 | This program is distributed in the hope that it will be useful, but WITHOUT | ||
| 12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS | ||
| 13 | FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. | ||
| 14 | |||
| 15 | You should have received a copy of the GNU General Public License along with | ||
| 16 | this program; if not, write to the Free Software Foundation, Inc., 59 Temple | ||
| 17 | Place, Suite 330, Boston, MA 02111-1307 USA. | ||
| 18 | |||
| 19 | This file is part of the BitBake build tools. | ||
| 20 | """ | ||
| 21 | |||
| 22 | digits = "0123456789" | ||
| 23 | ascii_letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ" | ||
| 24 | |||
| 25 | import re | ||
| 26 | |||
| 27 | def explode_version(s): | ||
| 28 | r = [] | ||
| 29 | alpha_regexp = re.compile('^([a-zA-Z]+)(.*)$') | ||
| 30 | numeric_regexp = re.compile('^(\d+)(.*)$') | ||
| 31 | while (s != ''): | ||
| 32 | if s[0] in digits: | ||
| 33 | m = numeric_regexp.match(s) | ||
| 34 | r.append(int(m.group(1))) | ||
| 35 | s = m.group(2) | ||
| 36 | continue | ||
| 37 | if s[0] in ascii_letters: | ||
| 38 | m = alpha_regexp.match(s) | ||
| 39 | r.append(m.group(1)) | ||
| 40 | s = m.group(2) | ||
| 41 | continue | ||
| 42 | s = s[1:] | ||
| 43 | return r | ||
| 44 | |||
| 45 | def vercmp_part(a, b): | ||
| 46 | va = explode_version(a) | ||
| 47 | vb = explode_version(b) | ||
| 48 | while True: | ||
| 49 | if va == []: | ||
| 50 | ca = None | ||
| 51 | else: | ||
| 52 | ca = va.pop(0) | ||
| 53 | if vb == []: | ||
| 54 | cb = None | ||
| 55 | else: | ||
| 56 | cb = vb.pop(0) | ||
| 57 | if ca == None and cb == None: | ||
| 58 | return 0 | ||
| 59 | if ca > cb: | ||
| 60 | return 1 | ||
| 61 | if ca < cb: | ||
| 62 | return -1 | ||
| 63 | |||
| 64 | def vercmp(ta, tb): | ||
| 65 | (va, ra) = ta | ||
| 66 | (vb, rb) = tb | ||
| 67 | |||
| 68 | r = vercmp_part(va, vb) | ||
| 69 | if (r == 0): | ||
| 70 | r = vercmp_part(ra, rb) | ||
| 71 | return r | ||
diff --git a/bitbake/lib/bb/utils.pyc b/bitbake/lib/bb/utils.pyc new file mode 100644 index 0000000000..9f2bc75cbc --- /dev/null +++ b/bitbake/lib/bb/utils.pyc | |||
| Binary files differ | |||
