diff options
Diffstat (limited to 'bitbake/lib/bb/pysh')
-rw-r--r-- | bitbake/lib/bb/pysh/__init__.py | 0 | ||||
-rw-r--r-- | bitbake/lib/bb/pysh/builtin.py | 710 | ||||
-rw-r--r-- | bitbake/lib/bb/pysh/interp.py | 1367 | ||||
-rw-r--r-- | bitbake/lib/bb/pysh/lsprof.py | 116 | ||||
-rw-r--r-- | bitbake/lib/bb/pysh/pysh.py | 167 | ||||
-rw-r--r-- | bitbake/lib/bb/pysh/pyshlex.py | 888 | ||||
-rw-r--r-- | bitbake/lib/bb/pysh/pyshyacc.py | 772 | ||||
-rw-r--r-- | bitbake/lib/bb/pysh/sherrors.py | 41 | ||||
-rw-r--r-- | bitbake/lib/bb/pysh/subprocess_fix.py | 77 |
9 files changed, 4138 insertions, 0 deletions
diff --git a/bitbake/lib/bb/pysh/__init__.py b/bitbake/lib/bb/pysh/__init__.py new file mode 100644 index 0000000000..e69de29bb2 --- /dev/null +++ b/bitbake/lib/bb/pysh/__init__.py | |||
diff --git a/bitbake/lib/bb/pysh/builtin.py b/bitbake/lib/bb/pysh/builtin.py new file mode 100644 index 0000000000..25ad22eb74 --- /dev/null +++ b/bitbake/lib/bb/pysh/builtin.py | |||
@@ -0,0 +1,710 @@ | |||
1 | # builtin.py - builtins and utilities definitions for pysh. | ||
2 | # | ||
3 | # Copyright 2007 Patrick Mezard | ||
4 | # | ||
5 | # This software may be used and distributed according to the terms | ||
6 | # of the GNU General Public License, incorporated herein by reference. | ||
7 | |||
8 | """Builtin and internal utilities implementations. | ||
9 | |||
10 | - Beware not to use python interpreter environment as if it were the shell | ||
11 | environment. For instance, commands working directory must be explicitely handled | ||
12 | through env['PWD'] instead of relying on python working directory. | ||
13 | """ | ||
14 | import errno | ||
15 | import optparse | ||
16 | import os | ||
17 | import re | ||
18 | import subprocess | ||
19 | import sys | ||
20 | import time | ||
21 | |||
22 | def has_subprocess_bug(): | ||
23 | return getattr(subprocess, 'list2cmdline') and \ | ||
24 | ( subprocess.list2cmdline(['']) == '' or \ | ||
25 | subprocess.list2cmdline(['foo|bar']) == 'foo|bar') | ||
26 | |||
27 | # Detect python bug 1634343: "subprocess swallows empty arguments under win32" | ||
28 | # <http://sourceforge.net/tracker/index.php?func=detail&aid=1634343&group_id=5470&atid=105470> | ||
29 | # Also detect: "[ 1710802 ] subprocess must escape redirection characters under win32" | ||
30 | # <http://sourceforge.net/tracker/index.php?func=detail&aid=1710802&group_id=5470&atid=105470> | ||
31 | if has_subprocess_bug(): | ||
32 | import subprocess_fix | ||
33 | subprocess.list2cmdline = subprocess_fix.list2cmdline | ||
34 | |||
35 | from sherrors import * | ||
36 | |||
37 | class NonExitingParser(optparse.OptionParser): | ||
38 | """OptionParser default behaviour upon error is to print the error message and | ||
39 | exit. Raise a utility error instead. | ||
40 | """ | ||
41 | def error(self, msg): | ||
42 | raise UtilityError(msg) | ||
43 | |||
44 | #------------------------------------------------------------------------------- | ||
45 | # set special builtin | ||
46 | #------------------------------------------------------------------------------- | ||
47 | OPT_SET = NonExitingParser(usage="set - set or unset options and positional parameters") | ||
48 | OPT_SET.add_option( '-f', action='store_true', dest='has_f', default=False, | ||
49 | help='The shell shall disable pathname expansion.') | ||
50 | OPT_SET.add_option('-e', action='store_true', dest='has_e', default=False, | ||
51 | help="""When this option is on, if a simple command fails for any of the \ | ||
52 | reasons listed in Consequences of Shell Errors or returns an exit status \ | ||
53 | value >0, and is not part of the compound list following a while, until, \ | ||
54 | or if keyword, and is not a part of an AND or OR list, and is not a \ | ||
55 | pipeline preceded by the ! reserved word, then the shell shall immediately \ | ||
56 | exit.""") | ||
57 | OPT_SET.add_option('-x', action='store_true', dest='has_x', default=False, | ||
58 | help="""The shell shall write to standard error a trace for each command \ | ||
59 | after it expands the command and before it executes it. It is unspecified \ | ||
60 | whether the command that turns tracing off is traced.""") | ||
61 | |||
62 | def builtin_set(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
63 | if 'debug-utility' in debugflags: | ||
64 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
65 | |||
66 | option, args = OPT_SET.parse_args(args) | ||
67 | env = interp.get_env() | ||
68 | |||
69 | if option.has_f: | ||
70 | env.set_opt('-f') | ||
71 | if option.has_e: | ||
72 | env.set_opt('-e') | ||
73 | if option.has_x: | ||
74 | env.set_opt('-x') | ||
75 | return 0 | ||
76 | |||
77 | #------------------------------------------------------------------------------- | ||
78 | # shift special builtin | ||
79 | #------------------------------------------------------------------------------- | ||
80 | def builtin_shift(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
81 | if 'debug-utility' in debugflags: | ||
82 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
83 | |||
84 | params = interp.get_env().get_positional_args() | ||
85 | if args: | ||
86 | try: | ||
87 | n = int(args[0]) | ||
88 | if n > len(params): | ||
89 | raise ValueError() | ||
90 | except ValueError: | ||
91 | return 1 | ||
92 | else: | ||
93 | n = 1 | ||
94 | |||
95 | params[:n] = [] | ||
96 | interp.get_env().set_positional_args(params) | ||
97 | return 0 | ||
98 | |||
99 | #------------------------------------------------------------------------------- | ||
100 | # export special builtin | ||
101 | #------------------------------------------------------------------------------- | ||
102 | OPT_EXPORT = NonExitingParser(usage="set - set or unset options and positional parameters") | ||
103 | OPT_EXPORT.add_option('-p', action='store_true', dest='has_p', default=False) | ||
104 | |||
105 | def builtin_export(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
106 | if 'debug-utility' in debugflags: | ||
107 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
108 | |||
109 | option, args = OPT_EXPORT.parse_args(args) | ||
110 | if option.has_p: | ||
111 | raise NotImplementedError() | ||
112 | |||
113 | for arg in args: | ||
114 | try: | ||
115 | name, value = arg.split('=', 1) | ||
116 | except ValueError: | ||
117 | name, value = arg, None | ||
118 | env = interp.get_env().export(name, value) | ||
119 | |||
120 | return 0 | ||
121 | |||
122 | #------------------------------------------------------------------------------- | ||
123 | # return special builtin | ||
124 | #------------------------------------------------------------------------------- | ||
125 | def builtin_return(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
126 | if 'debug-utility' in debugflags: | ||
127 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
128 | res = 0 | ||
129 | if args: | ||
130 | try: | ||
131 | res = int(args[0]) | ||
132 | except ValueError: | ||
133 | res = 0 | ||
134 | if not 0<=res<=255: | ||
135 | res = 0 | ||
136 | |||
137 | # BUG: should be last executed command exit code | ||
138 | raise ReturnSignal(res) | ||
139 | |||
140 | #------------------------------------------------------------------------------- | ||
141 | # trap special builtin | ||
142 | #------------------------------------------------------------------------------- | ||
143 | def builtin_trap(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
144 | if 'debug-utility' in debugflags: | ||
145 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
146 | if len(args) < 2: | ||
147 | stderr.write('trap: usage: trap [[arg] signal_spec ...]\n') | ||
148 | return 2 | ||
149 | |||
150 | action = args[0] | ||
151 | for sig in args[1:]: | ||
152 | try: | ||
153 | env.traps[sig] = action | ||
154 | except Exception, e: | ||
155 | stderr.write('trap: %s\n' % str(e)) | ||
156 | return 0 | ||
157 | |||
158 | #------------------------------------------------------------------------------- | ||
159 | # unset special builtin | ||
160 | #------------------------------------------------------------------------------- | ||
161 | OPT_UNSET = NonExitingParser("unset - unset values and attributes of variables and functions") | ||
162 | OPT_UNSET.add_option( '-f', action='store_true', dest='has_f', default=False) | ||
163 | OPT_UNSET.add_option( '-v', action='store_true', dest='has_v', default=False) | ||
164 | |||
165 | def builtin_unset(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
166 | if 'debug-utility' in debugflags: | ||
167 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
168 | |||
169 | option, args = OPT_UNSET.parse_args(args) | ||
170 | |||
171 | status = 0 | ||
172 | env = interp.get_env() | ||
173 | for arg in args: | ||
174 | try: | ||
175 | if option.has_f: | ||
176 | env.remove_function(arg) | ||
177 | else: | ||
178 | del env[arg] | ||
179 | except KeyError: | ||
180 | pass | ||
181 | except VarAssignmentError: | ||
182 | status = 1 | ||
183 | |||
184 | return status | ||
185 | |||
186 | #------------------------------------------------------------------------------- | ||
187 | # wait special builtin | ||
188 | #------------------------------------------------------------------------------- | ||
189 | def builtin_wait(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
190 | if 'debug-utility' in debugflags: | ||
191 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
192 | |||
193 | return interp.wait([int(arg) for arg in args]) | ||
194 | |||
195 | #------------------------------------------------------------------------------- | ||
196 | # cat utility | ||
197 | #------------------------------------------------------------------------------- | ||
198 | def utility_cat(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
199 | if 'debug-utility' in debugflags: | ||
200 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
201 | |||
202 | if not args: | ||
203 | args = ['-'] | ||
204 | |||
205 | status = 0 | ||
206 | for arg in args: | ||
207 | if arg == '-': | ||
208 | data = stdin.read() | ||
209 | else: | ||
210 | path = os.path.join(env['PWD'], arg) | ||
211 | try: | ||
212 | f = file(path, 'rb') | ||
213 | try: | ||
214 | data = f.read() | ||
215 | finally: | ||
216 | f.close() | ||
217 | except IOError, e: | ||
218 | if e.errno != errno.ENOENT: | ||
219 | raise | ||
220 | status = 1 | ||
221 | continue | ||
222 | stdout.write(data) | ||
223 | stdout.flush() | ||
224 | return status | ||
225 | |||
226 | #------------------------------------------------------------------------------- | ||
227 | # cd utility | ||
228 | #------------------------------------------------------------------------------- | ||
229 | OPT_CD = NonExitingParser("cd - change the working directory") | ||
230 | |||
231 | def utility_cd(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
232 | if 'debug-utility' in debugflags: | ||
233 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
234 | |||
235 | option, args = OPT_CD.parse_args(args) | ||
236 | env = interp.get_env() | ||
237 | |||
238 | directory = None | ||
239 | printdir = False | ||
240 | if not args: | ||
241 | home = env.get('HOME') | ||
242 | if home: | ||
243 | # Unspecified, do nothing | ||
244 | return 0 | ||
245 | else: | ||
246 | directory = home | ||
247 | elif len(args)==1: | ||
248 | directory = args[0] | ||
249 | if directory=='-': | ||
250 | if 'OLDPWD' not in env: | ||
251 | raise UtilityError("OLDPWD not set") | ||
252 | printdir = True | ||
253 | directory = env['OLDPWD'] | ||
254 | else: | ||
255 | raise UtilityError("too many arguments") | ||
256 | |||
257 | curpath = None | ||
258 | # Absolute directories will be handled correctly by the os.path.join call. | ||
259 | if not directory.startswith('.') and not directory.startswith('..'): | ||
260 | cdpaths = env.get('CDPATH', '.').split(';') | ||
261 | for cdpath in cdpaths: | ||
262 | p = os.path.join(cdpath, directory) | ||
263 | if os.path.isdir(p): | ||
264 | curpath = p | ||
265 | break | ||
266 | |||
267 | if curpath is None: | ||
268 | curpath = directory | ||
269 | curpath = os.path.join(env['PWD'], directory) | ||
270 | |||
271 | env['OLDPWD'] = env['PWD'] | ||
272 | env['PWD'] = curpath | ||
273 | if printdir: | ||
274 | stdout.write('%s\n' % curpath) | ||
275 | return 0 | ||
276 | |||
277 | #------------------------------------------------------------------------------- | ||
278 | # colon utility | ||
279 | #------------------------------------------------------------------------------- | ||
280 | def utility_colon(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
281 | if 'debug-utility' in debugflags: | ||
282 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
283 | return 0 | ||
284 | |||
285 | #------------------------------------------------------------------------------- | ||
286 | # echo utility | ||
287 | #------------------------------------------------------------------------------- | ||
288 | def utility_echo(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
289 | if 'debug-utility' in debugflags: | ||
290 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
291 | |||
292 | # Echo only takes arguments, no options. Use printf if you need fancy stuff. | ||
293 | output = ' '.join(args) + '\n' | ||
294 | stdout.write(output) | ||
295 | stdout.flush() | ||
296 | return 0 | ||
297 | |||
298 | #------------------------------------------------------------------------------- | ||
299 | # egrep utility | ||
300 | #------------------------------------------------------------------------------- | ||
301 | # egrep is usually a shell script. | ||
302 | # Unfortunately, pysh does not support shell scripts *with arguments* right now, | ||
303 | # so the redirection is implemented here, assuming grep is available. | ||
304 | def utility_egrep(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
305 | if 'debug-utility' in debugflags: | ||
306 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
307 | |||
308 | return run_command('grep', ['-E'] + args, interp, env, stdin, stdout, | ||
309 | stderr, debugflags) | ||
310 | |||
311 | #------------------------------------------------------------------------------- | ||
312 | # env utility | ||
313 | #------------------------------------------------------------------------------- | ||
314 | def utility_env(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
315 | if 'debug-utility' in debugflags: | ||
316 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
317 | |||
318 | if args and args[0]=='-i': | ||
319 | raise NotImplementedError('env: -i option is not implemented') | ||
320 | |||
321 | i = 0 | ||
322 | for arg in args: | ||
323 | if '=' not in arg: | ||
324 | break | ||
325 | # Update the current environment | ||
326 | name, value = arg.split('=', 1) | ||
327 | env[name] = value | ||
328 | i += 1 | ||
329 | |||
330 | if args[i:]: | ||
331 | # Find then execute the specified interpreter | ||
332 | utility = env.find_in_path(args[i]) | ||
333 | if not utility: | ||
334 | return 127 | ||
335 | args[i:i+1] = utility | ||
336 | name = args[i] | ||
337 | args = args[i+1:] | ||
338 | try: | ||
339 | return run_command(name, args, interp, env, stdin, stdout, stderr, | ||
340 | debugflags) | ||
341 | except UtilityError: | ||
342 | stderr.write('env: failed to execute %s' % ' '.join([name]+args)) | ||
343 | return 126 | ||
344 | else: | ||
345 | for pair in env.get_variables().iteritems(): | ||
346 | stdout.write('%s=%s\n' % pair) | ||
347 | return 0 | ||
348 | |||
349 | #------------------------------------------------------------------------------- | ||
350 | # exit utility | ||
351 | #------------------------------------------------------------------------------- | ||
352 | def utility_exit(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
353 | if 'debug-utility' in debugflags: | ||
354 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
355 | |||
356 | res = None | ||
357 | if args: | ||
358 | try: | ||
359 | res = int(args[0]) | ||
360 | except ValueError: | ||
361 | res = None | ||
362 | if not 0<=res<=255: | ||
363 | res = None | ||
364 | |||
365 | if res is None: | ||
366 | # BUG: should be last executed command exit code | ||
367 | res = 0 | ||
368 | |||
369 | raise ExitSignal(res) | ||
370 | |||
371 | #------------------------------------------------------------------------------- | ||
372 | # fgrep utility | ||
373 | #------------------------------------------------------------------------------- | ||
374 | # see egrep | ||
375 | def utility_fgrep(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
376 | if 'debug-utility' in debugflags: | ||
377 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
378 | |||
379 | return run_command('grep', ['-F'] + args, interp, env, stdin, stdout, | ||
380 | stderr, debugflags) | ||
381 | |||
382 | #------------------------------------------------------------------------------- | ||
383 | # gunzip utility | ||
384 | #------------------------------------------------------------------------------- | ||
385 | # see egrep | ||
386 | def utility_gunzip(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
387 | if 'debug-utility' in debugflags: | ||
388 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
389 | |||
390 | return run_command('gzip', ['-d'] + args, interp, env, stdin, stdout, | ||
391 | stderr, debugflags) | ||
392 | |||
393 | #------------------------------------------------------------------------------- | ||
394 | # kill utility | ||
395 | #------------------------------------------------------------------------------- | ||
396 | def utility_kill(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
397 | if 'debug-utility' in debugflags: | ||
398 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
399 | |||
400 | for arg in args: | ||
401 | pid = int(arg) | ||
402 | status = subprocess.call(['pskill', '/T', str(pid)], | ||
403 | shell=True, | ||
404 | stdout=subprocess.PIPE, | ||
405 | stderr=subprocess.PIPE) | ||
406 | # pskill is asynchronous, hence the stupid polling loop | ||
407 | while 1: | ||
408 | p = subprocess.Popen(['pslist', str(pid)], | ||
409 | shell=True, | ||
410 | stdout=subprocess.PIPE, | ||
411 | stderr=subprocess.STDOUT) | ||
412 | output = p.communicate()[0] | ||
413 | if ('process %d was not' % pid) in output: | ||
414 | break | ||
415 | time.sleep(1) | ||
416 | return status | ||
417 | |||
418 | #------------------------------------------------------------------------------- | ||
419 | # mkdir utility | ||
420 | #------------------------------------------------------------------------------- | ||
421 | OPT_MKDIR = NonExitingParser("mkdir - make directories.") | ||
422 | OPT_MKDIR.add_option('-p', action='store_true', dest='has_p', default=False) | ||
423 | |||
424 | def utility_mkdir(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
425 | if 'debug-utility' in debugflags: | ||
426 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
427 | |||
428 | # TODO: implement umask | ||
429 | # TODO: implement proper utility error report | ||
430 | option, args = OPT_MKDIR.parse_args(args) | ||
431 | for arg in args: | ||
432 | path = os.path.join(env['PWD'], arg) | ||
433 | if option.has_p: | ||
434 | try: | ||
435 | os.makedirs(path) | ||
436 | except IOError, e: | ||
437 | if e.errno != errno.EEXIST: | ||
438 | raise | ||
439 | else: | ||
440 | os.mkdir(path) | ||
441 | return 0 | ||
442 | |||
443 | #------------------------------------------------------------------------------- | ||
444 | # netstat utility | ||
445 | #------------------------------------------------------------------------------- | ||
446 | def utility_netstat(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
447 | # Do you really expect me to implement netstat ? | ||
448 | # This empty form is enough for Mercurial tests since it's | ||
449 | # supposed to generate nothing upon success. Faking this test | ||
450 | # is not a big deal either. | ||
451 | if 'debug-utility' in debugflags: | ||
452 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
453 | return 0 | ||
454 | |||
455 | #------------------------------------------------------------------------------- | ||
456 | # pwd utility | ||
457 | #------------------------------------------------------------------------------- | ||
458 | OPT_PWD = NonExitingParser("pwd - return working directory name") | ||
459 | OPT_PWD.add_option('-L', action='store_true', dest='has_L', default=True, | ||
460 | help="""If the PWD environment variable contains an absolute pathname of \ | ||
461 | the current directory that does not contain the filenames dot or dot-dot, \ | ||
462 | pwd shall write this pathname to standard output. Otherwise, the -L option \ | ||
463 | shall behave as the -P option.""") | ||
464 | OPT_PWD.add_option('-P', action='store_true', dest='has_L', default=False, | ||
465 | help="""The absolute pathname written shall not contain filenames that, in \ | ||
466 | the context of the pathname, refer to files of type symbolic link.""") | ||
467 | |||
468 | def utility_pwd(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
469 | if 'debug-utility' in debugflags: | ||
470 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
471 | |||
472 | option, args = OPT_PWD.parse_args(args) | ||
473 | stdout.write('%s\n' % env['PWD']) | ||
474 | return 0 | ||
475 | |||
476 | #------------------------------------------------------------------------------- | ||
477 | # printf utility | ||
478 | #------------------------------------------------------------------------------- | ||
479 | RE_UNESCAPE = re.compile(r'(\\x[a-zA-Z0-9]{2}|\\[0-7]{1,3}|\\.)') | ||
480 | |||
481 | def utility_printf(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
482 | if 'debug-utility' in debugflags: | ||
483 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
484 | |||
485 | def replace(m): | ||
486 | assert m.group() | ||
487 | g = m.group()[1:] | ||
488 | if g.startswith('x'): | ||
489 | return chr(int(g[1:], 16)) | ||
490 | if len(g) <= 3 and len([c for c in g if c in '01234567']) == len(g): | ||
491 | # Yay, an octal number | ||
492 | return chr(int(g, 8)) | ||
493 | return { | ||
494 | 'a': '\a', | ||
495 | 'b': '\b', | ||
496 | 'f': '\f', | ||
497 | 'n': '\n', | ||
498 | 'r': '\r', | ||
499 | 't': '\t', | ||
500 | 'v': '\v', | ||
501 | '\\': '\\', | ||
502 | }.get(g) | ||
503 | |||
504 | # Convert escape sequences | ||
505 | format = re.sub(RE_UNESCAPE, replace, args[0]) | ||
506 | stdout.write(format % tuple(args[1:])) | ||
507 | return 0 | ||
508 | |||
509 | #------------------------------------------------------------------------------- | ||
510 | # true utility | ||
511 | #------------------------------------------------------------------------------- | ||
512 | def utility_true(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
513 | if 'debug-utility' in debugflags: | ||
514 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
515 | return 0 | ||
516 | |||
517 | #------------------------------------------------------------------------------- | ||
518 | # sed utility | ||
519 | #------------------------------------------------------------------------------- | ||
520 | RE_SED = re.compile(r'^s(.).*\1[a-zA-Z]*$') | ||
521 | |||
522 | # cygwin sed fails with some expressions when they do not end with a single space. | ||
523 | # see unit tests for details. Interestingly, the same expressions works perfectly | ||
524 | # in cygwin shell. | ||
525 | def utility_sed(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
526 | if 'debug-utility' in debugflags: | ||
527 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
528 | |||
529 | # Scan pattern arguments and append a space if necessary | ||
530 | for i in xrange(len(args)): | ||
531 | if not RE_SED.search(args[i]): | ||
532 | continue | ||
533 | args[i] = args[i] + ' ' | ||
534 | |||
535 | return run_command(name, args, interp, env, stdin, stdout, | ||
536 | stderr, debugflags) | ||
537 | |||
538 | #------------------------------------------------------------------------------- | ||
539 | # sleep utility | ||
540 | #------------------------------------------------------------------------------- | ||
541 | def utility_sleep(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
542 | if 'debug-utility' in debugflags: | ||
543 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
544 | time.sleep(int(args[0])) | ||
545 | return 0 | ||
546 | |||
547 | #------------------------------------------------------------------------------- | ||
548 | # sort utility | ||
549 | #------------------------------------------------------------------------------- | ||
550 | OPT_SORT = NonExitingParser("sort - sort, merge, or sequence check text files") | ||
551 | |||
552 | def utility_sort(name, args, interp, env, stdin, stdout, stderr, debugflags): | ||
553 | |||
554 | def sort(path): | ||
555 | if path == '-': | ||
556 | lines = stdin.readlines() | ||
557 | else: | ||
558 | try: | ||
559 | f = file(path) | ||
560 | try: | ||
561 | lines = f.readlines() | ||
562 | finally: | ||
563 | f.close() | ||
564 | except IOError, e: | ||
565 | stderr.write(str(e) + '\n') | ||
566 | return 1 | ||
567 | |||
568 | if lines and lines[-1][-1]!='\n': | ||
569 | lines[-1] = lines[-1] + '\n' | ||
570 | return lines | ||
571 | |||
572 | if 'debug-utility' in debugflags: | ||
573 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
574 | |||
575 | option, args = OPT_SORT.parse_args(args) | ||
576 | alllines = [] | ||
577 | |||
578 | if len(args)<=0: | ||
579 | args += ['-'] | ||
580 | |||
581 | # Load all files lines | ||
582 | curdir = os.getcwd() | ||
583 | try: | ||
584 | os.chdir(env['PWD']) | ||
585 | for path in args: | ||
586 | alllines += sort(path) | ||
587 | finally: | ||
588 | os.chdir(curdir) | ||
589 | |||
590 | alllines.sort() | ||
591 | for line in alllines: | ||
592 | stdout.write(line) | ||
593 | return 0 | ||
594 | |||
595 | #------------------------------------------------------------------------------- | ||
596 | # hg utility | ||
597 | #------------------------------------------------------------------------------- | ||
598 | |||
599 | hgcommands = [ | ||
600 | 'add', | ||
601 | 'addremove', | ||
602 | 'commit', 'ci', | ||
603 | 'debugrename', | ||
604 | 'debugwalk', | ||
605 | 'falabala', # Dummy command used in a mercurial test | ||
606 | 'incoming', | ||
607 | 'locate', | ||
608 | 'pull', | ||
609 | 'push', | ||
610 | 'qinit', | ||
611 | 'remove', 'rm', | ||
612 | 'rename', 'mv', | ||
613 | 'revert', | ||
614 | 'showconfig', | ||
615 | 'status', 'st', | ||
616 | 'strip', | ||
617 | ] | ||
618 | |||
619 | def rewriteslashes(name, args): | ||
620 | # Several hg commands output file paths, rewrite the separators | ||
621 | if len(args) > 1 and name.lower().endswith('python') \ | ||
622 | and args[0].endswith('hg'): | ||
623 | for cmd in hgcommands: | ||
624 | if cmd in args[1:]: | ||
625 | return True | ||
626 | |||
627 | # svn output contains many paths with OS specific separators. | ||
628 | # Normalize these to unix paths. | ||
629 | base = os.path.basename(name) | ||
630 | if base.startswith('svn'): | ||
631 | return True | ||
632 | |||
633 | return False | ||
634 | |||
635 | def rewritehg(output): | ||
636 | if not output: | ||
637 | return output | ||
638 | # Rewrite os specific messages | ||
639 | output = output.replace(': The system cannot find the file specified', | ||
640 | ': No such file or directory') | ||
641 | output = re.sub(': Access is denied.*$', ': Permission denied', output) | ||
642 | output = output.replace(': No connection could be made because the target machine actively refused it', | ||
643 | ': Connection refused') | ||
644 | return output | ||
645 | |||
646 | |||
647 | def run_command(name, args, interp, env, stdin, stdout, | ||
648 | stderr, debugflags): | ||
649 | # Execute the command | ||
650 | if 'debug-utility' in debugflags: | ||
651 | print interp.log(' '.join([name, str(args), interp['PWD']]) + '\n') | ||
652 | |||
653 | hgbin = interp.options().hgbinary | ||
654 | ishg = hgbin and ('hg' in name or args and 'hg' in args[0]) | ||
655 | unixoutput = 'cygwin' in name or ishg | ||
656 | |||
657 | exec_env = env.get_variables() | ||
658 | try: | ||
659 | # BUG: comparing file descriptor is clearly not a reliable way to tell | ||
660 | # whether they point on the same underlying object. But in pysh limited | ||
661 | # scope this is usually right, we do not expect complicated redirections | ||
662 | # besides usual 2>&1. | ||
663 | # Still there is one case we have but cannot deal with is when stdout | ||
664 | # and stderr are redirected *by pysh caller*. This the reason for the | ||
665 | # --redirect pysh() option. | ||
666 | # Now, we want to know they are the same because we sometimes need to | ||
667 | # transform the command output, mostly remove CR-LF to ensure that | ||
668 | # command output is unix-like. Cygwin utilies are a special case because | ||
669 | # they explicitely set their output streams to binary mode, so we have | ||
670 | # nothing to do. For all others commands, we have to guess whether they | ||
671 | # are sending text data, in which case the transformation must be done. | ||
672 | # Again, the NUL character test is unreliable but should be enough for | ||
673 | # hg tests. | ||
674 | redirected = stdout.fileno()==stderr.fileno() | ||
675 | if not redirected: | ||
676 | p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env, | ||
677 | stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.PIPE) | ||
678 | else: | ||
679 | p = subprocess.Popen([name] + args, cwd=env['PWD'], env=exec_env, | ||
680 | stdin=stdin, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) | ||
681 | out, err = p.communicate() | ||
682 | except WindowsError, e: | ||
683 | raise UtilityError(str(e)) | ||
684 | |||
685 | if not unixoutput: | ||
686 | def encode(s): | ||
687 | if '\0' in s: | ||
688 | return s | ||
689 | return s.replace('\r\n', '\n') | ||
690 | else: | ||
691 | encode = lambda s: s | ||
692 | |||
693 | if rewriteslashes(name, args): | ||
694 | encode1_ = encode | ||
695 | def encode(s): | ||
696 | s = encode1_(s) | ||
697 | s = s.replace('\\\\', '\\') | ||
698 | s = s.replace('\\', '/') | ||
699 | return s | ||
700 | |||
701 | if ishg: | ||
702 | encode2_ = encode | ||
703 | def encode(s): | ||
704 | return rewritehg(encode2_(s)) | ||
705 | |||
706 | stdout.write(encode(out)) | ||
707 | if not redirected: | ||
708 | stderr.write(encode(err)) | ||
709 | return p.returncode | ||
710 | |||
diff --git a/bitbake/lib/bb/pysh/interp.py b/bitbake/lib/bb/pysh/interp.py new file mode 100644 index 0000000000..efe5181e1e --- /dev/null +++ b/bitbake/lib/bb/pysh/interp.py | |||
@@ -0,0 +1,1367 @@ | |||
1 | # interp.py - shell interpreter for pysh. | ||
2 | # | ||
3 | # Copyright 2007 Patrick Mezard | ||
4 | # | ||
5 | # This software may be used and distributed according to the terms | ||
6 | # of the GNU General Public License, incorporated herein by reference. | ||
7 | |||
8 | """Implement the shell interpreter. | ||
9 | |||
10 | Most references are made to "The Open Group Base Specifications Issue 6". | ||
11 | <http://www.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html> | ||
12 | """ | ||
13 | # TODO: document the fact input streams must implement fileno() so Popen will work correctly. | ||
14 | # it requires non-stdin stream to be implemented as files. Still to be tested... | ||
15 | # DOC: pathsep is used in PATH instead of ':'. Clearly, there are path syntax issues here. | ||
16 | # TODO: stop command execution upon error. | ||
17 | # TODO: sort out the filename/io_number mess. It should be possible to use filenames only. | ||
18 | # TODO: review subshell implementation | ||
19 | # TODO: test environment cloning for non-special builtins | ||
20 | # TODO: set -x should not rebuild commands from tokens, assignments/redirections are lost | ||
21 | # TODO: unit test for variable assignment | ||
22 | # TODO: test error management wrt error type/utility type | ||
23 | # TODO: test for binary output everywhere | ||
24 | # BUG: debug-parsing does not pass log file to PLY. Maybe a PLY upgrade is necessary. | ||
25 | import base64 | ||
26 | import cPickle as pickle | ||
27 | import errno | ||
28 | import glob | ||
29 | import os | ||
30 | import re | ||
31 | import subprocess | ||
32 | import sys | ||
33 | import tempfile | ||
34 | |||
35 | try: | ||
36 | s = set() | ||
37 | del s | ||
38 | except NameError: | ||
39 | from Set import Set as set | ||
40 | |||
41 | import builtin | ||
42 | from sherrors import * | ||
43 | import pyshlex | ||
44 | import pyshyacc | ||
45 | |||
46 | def mappend(func, *args, **kargs): | ||
47 | """Like map but assume func returns a list. Returned lists are merged into | ||
48 | a single one. | ||
49 | """ | ||
50 | return reduce(lambda a,b: a+b, map(func, *args, **kargs), []) | ||
51 | |||
52 | class FileWrapper: | ||
53 | """File object wrapper to ease debugging. | ||
54 | |||
55 | Allow mode checking and implement file duplication through a simple | ||
56 | reference counting scheme. Not sure the latter is really useful since | ||
57 | only real file descriptors can be used. | ||
58 | """ | ||
59 | def __init__(self, mode, file, close=True): | ||
60 | if mode not in ('r', 'w', 'a'): | ||
61 | raise IOError('invalid mode: %s' % mode) | ||
62 | self._mode = mode | ||
63 | self._close = close | ||
64 | if isinstance(file, FileWrapper): | ||
65 | if file._refcount[0] <= 0: | ||
66 | raise IOError(0, 'Error') | ||
67 | self._refcount = file._refcount | ||
68 | self._refcount[0] += 1 | ||
69 | self._file = file._file | ||
70 | else: | ||
71 | self._refcount = [1] | ||
72 | self._file = file | ||
73 | |||
74 | def dup(self): | ||
75 | return FileWrapper(self._mode, self, self._close) | ||
76 | |||
77 | def fileno(self): | ||
78 | """fileno() should be only necessary for input streams.""" | ||
79 | return self._file.fileno() | ||
80 | |||
81 | def read(self, size=-1): | ||
82 | if self._mode!='r': | ||
83 | raise IOError(0, 'Error') | ||
84 | return self._file.read(size) | ||
85 | |||
86 | def readlines(self, *args, **kwargs): | ||
87 | return self._file.readlines(*args, **kwargs) | ||
88 | |||
89 | def write(self, s): | ||
90 | if self._mode not in ('w', 'a'): | ||
91 | raise IOError(0, 'Error') | ||
92 | return self._file.write(s) | ||
93 | |||
94 | def flush(self): | ||
95 | self._file.flush() | ||
96 | |||
97 | def close(self): | ||
98 | if not self._refcount: | ||
99 | return | ||
100 | assert self._refcount[0] > 0 | ||
101 | |||
102 | self._refcount[0] -= 1 | ||
103 | if self._refcount[0] == 0: | ||
104 | self._mode = 'c' | ||
105 | if self._close: | ||
106 | self._file.close() | ||
107 | self._refcount = None | ||
108 | |||
109 | def mode(self): | ||
110 | return self._mode | ||
111 | |||
112 | def __getattr__(self, name): | ||
113 | if name == 'name': | ||
114 | self.name = getattr(self._file, name) | ||
115 | return self.name | ||
116 | else: | ||
117 | raise AttributeError(name) | ||
118 | |||
119 | def __del__(self): | ||
120 | self.close() | ||
121 | |||
122 | |||
123 | def win32_open_devnull(mode): | ||
124 | return open('NUL', mode) | ||
125 | |||
126 | |||
127 | class Redirections: | ||
128 | """Stores open files and their mapping to pseudo-sh file descriptor. | ||
129 | """ | ||
130 | # BUG: redirections are not handled correctly: 1>&3 2>&3 3>&4 does | ||
131 | # not make 1 to redirect to 4 | ||
132 | def __init__(self, stdin=None, stdout=None, stderr=None): | ||
133 | self._descriptors = {} | ||
134 | if stdin is not None: | ||
135 | self._add_descriptor(0, stdin) | ||
136 | if stdout is not None: | ||
137 | self._add_descriptor(1, stdout) | ||
138 | if stderr is not None: | ||
139 | self._add_descriptor(2, stderr) | ||
140 | |||
141 | def add_here_document(self, interp, name, content, io_number=None): | ||
142 | if io_number is None: | ||
143 | io_number = 0 | ||
144 | |||
145 | if name==pyshlex.unquote_wordtree(name): | ||
146 | content = interp.expand_here_document(('TOKEN', content)) | ||
147 | |||
148 | # Write document content in a temporary file | ||
149 | tmp = tempfile.TemporaryFile() | ||
150 | try: | ||
151 | tmp.write(content) | ||
152 | tmp.flush() | ||
153 | tmp.seek(0) | ||
154 | self._add_descriptor(io_number, FileWrapper('r', tmp)) | ||
155 | except: | ||
156 | tmp.close() | ||
157 | raise | ||
158 | |||
159 | def add(self, interp, op, filename, io_number=None): | ||
160 | if op not in ('<', '>', '>|', '>>', '>&'): | ||
161 | # TODO: add descriptor duplication and here_documents | ||
162 | raise RedirectionError('Unsupported redirection operator "%s"' % op) | ||
163 | |||
164 | if io_number is not None: | ||
165 | io_number = int(io_number) | ||
166 | |||
167 | if (op == '>&' and filename.isdigit()) or filename=='-': | ||
168 | # No expansion for file descriptors, quote them if you want a filename | ||
169 | fullname = filename | ||
170 | else: | ||
171 | if filename.startswith('/'): | ||
172 | # TODO: win32 kludge | ||
173 | if filename=='/dev/null': | ||
174 | fullname = 'NUL' | ||
175 | else: | ||
176 | # TODO: handle absolute pathnames, they are unlikely to exist on the | ||
177 | # current platform (win32 for instance). | ||
178 | raise NotImplementedError() | ||
179 | else: | ||
180 | fullname = interp.expand_redirection(('TOKEN', filename)) | ||
181 | if not fullname: | ||
182 | raise RedirectionError('%s: ambiguous redirect' % filename) | ||
183 | # Build absolute path based on PWD | ||
184 | fullname = os.path.join(interp.get_env()['PWD'], fullname) | ||
185 | |||
186 | if op=='<': | ||
187 | return self._add_input_redirection(interp, fullname, io_number) | ||
188 | elif op in ('>', '>|'): | ||
189 | clobber = ('>|'==op) | ||
190 | return self._add_output_redirection(interp, fullname, io_number, clobber) | ||
191 | elif op=='>>': | ||
192 | return self._add_output_appending(interp, fullname, io_number) | ||
193 | elif op=='>&': | ||
194 | return self._dup_output_descriptor(fullname, io_number) | ||
195 | |||
196 | def close(self): | ||
197 | if self._descriptors is not None: | ||
198 | for desc in self._descriptors.itervalues(): | ||
199 | desc.flush() | ||
200 | desc.close() | ||
201 | self._descriptors = None | ||
202 | |||
203 | def stdin(self): | ||
204 | return self._descriptors[0] | ||
205 | |||
206 | def stdout(self): | ||
207 | return self._descriptors[1] | ||
208 | |||
209 | def stderr(self): | ||
210 | return self._descriptors[2] | ||
211 | |||
212 | def clone(self): | ||
213 | clone = Redirections() | ||
214 | for desc, fileobj in self._descriptors.iteritems(): | ||
215 | clone._descriptors[desc] = fileobj.dup() | ||
216 | return clone | ||
217 | |||
218 | def _add_output_redirection(self, interp, filename, io_number, clobber): | ||
219 | if io_number is None: | ||
220 | # io_number default to standard output | ||
221 | io_number = 1 | ||
222 | |||
223 | if not clobber and interp.get_env().has_opt('-C') and os.path.isfile(filename): | ||
224 | # File already exist in no-clobber mode, bail out | ||
225 | raise RedirectionError('File "%s" already exists' % filename) | ||
226 | |||
227 | # Open and register | ||
228 | self._add_file_descriptor(io_number, filename, 'w') | ||
229 | |||
230 | def _add_output_appending(self, interp, filename, io_number): | ||
231 | if io_number is None: | ||
232 | io_number = 1 | ||
233 | self._add_file_descriptor(io_number, filename, 'a') | ||
234 | |||
235 | def _add_input_redirection(self, interp, filename, io_number): | ||
236 | if io_number is None: | ||
237 | io_number = 0 | ||
238 | self._add_file_descriptor(io_number, filename, 'r') | ||
239 | |||
240 | def _add_file_descriptor(self, io_number, filename, mode): | ||
241 | try: | ||
242 | if filename.startswith('/'): | ||
243 | if filename=='/dev/null': | ||
244 | f = win32_open_devnull(mode+'b') | ||
245 | else: | ||
246 | # TODO: handle absolute pathnames, they are unlikely to exist on the | ||
247 | # current platform (win32 for instance). | ||
248 | raise NotImplementedError('cannot open absolute path %s' % repr(filename)) | ||
249 | else: | ||
250 | f = file(filename, mode+'b') | ||
251 | except IOError, e: | ||
252 | raise RedirectionError(str(e)) | ||
253 | |||
254 | wrapper = None | ||
255 | try: | ||
256 | wrapper = FileWrapper(mode, f) | ||
257 | f = None | ||
258 | self._add_descriptor(io_number, wrapper) | ||
259 | except: | ||
260 | if f: f.close() | ||
261 | if wrapper: wrapper.close() | ||
262 | raise | ||
263 | |||
264 | def _dup_output_descriptor(self, source_fd, dest_fd): | ||
265 | if source_fd is None: | ||
266 | source_fd = 1 | ||
267 | self._dup_file_descriptor(source_fd, dest_fd, 'w') | ||
268 | |||
269 | def _dup_file_descriptor(self, source_fd, dest_fd, mode): | ||
270 | source_fd = int(source_fd) | ||
271 | if source_fd not in self._descriptors: | ||
272 | raise RedirectionError('"%s" is not a valid file descriptor' % str(source_fd)) | ||
273 | source = self._descriptors[source_fd] | ||
274 | |||
275 | if source.mode()!=mode: | ||
276 | raise RedirectionError('Descriptor %s cannot be duplicated in mode "%s"' % (str(source), mode)) | ||
277 | |||
278 | if dest_fd=='-': | ||
279 | # Close the source descriptor | ||
280 | del self._descriptors[source_fd] | ||
281 | source.close() | ||
282 | else: | ||
283 | dest_fd = int(dest_fd) | ||
284 | if dest_fd not in self._descriptors: | ||
285 | raise RedirectionError('Cannot replace file descriptor %s' % str(dest_fd)) | ||
286 | |||
287 | dest = self._descriptors[dest_fd] | ||
288 | if dest.mode()!=mode: | ||
289 | raise RedirectionError('Descriptor %s cannot be cannot be redirected in mode "%s"' % (str(dest), mode)) | ||
290 | |||
291 | self._descriptors[dest_fd] = source.dup() | ||
292 | dest.close() | ||
293 | |||
294 | def _add_descriptor(self, io_number, file): | ||
295 | io_number = int(io_number) | ||
296 | |||
297 | if io_number in self._descriptors: | ||
298 | # Close the current descriptor | ||
299 | d = self._descriptors[io_number] | ||
300 | del self._descriptors[io_number] | ||
301 | d.close() | ||
302 | |||
303 | self._descriptors[io_number] = file | ||
304 | |||
305 | def __str__(self): | ||
306 | names = [('%d=%r' % (k, getattr(v, 'name', None))) for k,v | ||
307 | in self._descriptors.iteritems()] | ||
308 | names = ','.join(names) | ||
309 | return 'Redirections(%s)' % names | ||
310 | |||
311 | def __del__(self): | ||
312 | self.close() | ||
313 | |||
314 | def cygwin_to_windows_path(path): | ||
315 | """Turn /cygdrive/c/foo into c:/foo, or return path if it | ||
316 | is not a cygwin path. | ||
317 | """ | ||
318 | if not path.startswith('/cygdrive/'): | ||
319 | return path | ||
320 | path = path[len('/cygdrive/'):] | ||
321 | path = path[:1] + ':' + path[1:] | ||
322 | return path | ||
323 | |||
324 | def win32_to_unix_path(path): | ||
325 | if path is not None: | ||
326 | path = path.replace('\\', '/') | ||
327 | return path | ||
328 | |||
329 | _RE_SHEBANG = re.compile(r'^\#!\s?([^\s]+)(?:\s([^\s]+))?') | ||
330 | _SHEBANG_CMDS = { | ||
331 | '/usr/bin/env': 'env', | ||
332 | '/bin/sh': 'pysh', | ||
333 | 'python': 'python', | ||
334 | } | ||
335 | |||
336 | def resolve_shebang(path, ignoreshell=False): | ||
337 | """Return a list of arguments as shebang interpreter call or an empty list | ||
338 | if path does not refer to an executable script. | ||
339 | See <http://www.opengroup.org/austin/docs/austin_51r2.txt>. | ||
340 | |||
341 | ignoreshell - set to True to ignore sh shebangs. Return an empty list instead. | ||
342 | """ | ||
343 | try: | ||
344 | f = file(path) | ||
345 | try: | ||
346 | # At most 80 characters in the first line | ||
347 | header = f.read(80).splitlines()[0] | ||
348 | finally: | ||
349 | f.close() | ||
350 | |||
351 | m = _RE_SHEBANG.search(header) | ||
352 | if not m: | ||
353 | return [] | ||
354 | cmd, arg = m.group(1,2) | ||
355 | if os.path.isfile(cmd): | ||
356 | # Keep this one, the hg script for instance contains a weird windows | ||
357 | # shebang referencing the current python install. | ||
358 | cmdfile = os.path.basename(cmd).lower() | ||
359 | if cmdfile == 'python.exe': | ||
360 | cmd = 'python' | ||
361 | pass | ||
362 | elif cmd not in _SHEBANG_CMDS: | ||
363 | raise CommandNotFound('Unknown interpreter "%s" referenced in '\ | ||
364 | 'shebang' % header) | ||
365 | cmd = _SHEBANG_CMDS.get(cmd) | ||
366 | if cmd is None or (ignoreshell and cmd == 'pysh'): | ||
367 | return [] | ||
368 | if arg is None: | ||
369 | return [cmd, win32_to_unix_path(path)] | ||
370 | return [cmd, arg, win32_to_unix_path(path)] | ||
371 | except IOError, e: | ||
372 | if e.errno!=errno.ENOENT and \ | ||
373 | (e.errno!=errno.EPERM and not os.path.isdir(path)): # Opening a directory raises EPERM | ||
374 | raise | ||
375 | return [] | ||
376 | |||
377 | def win32_find_in_path(name, path): | ||
378 | if isinstance(path, str): | ||
379 | path = path.split(os.pathsep) | ||
380 | |||
381 | exts = os.environ.get('PATHEXT', '').lower().split(os.pathsep) | ||
382 | for p in path: | ||
383 | p_name = os.path.join(p, name) | ||
384 | |||
385 | prefix = resolve_shebang(p_name) | ||
386 | if prefix: | ||
387 | return prefix | ||
388 | |||
389 | for ext in exts: | ||
390 | p_name_ext = p_name + ext | ||
391 | if os.path.exists(p_name_ext): | ||
392 | return [win32_to_unix_path(p_name_ext)] | ||
393 | return [] | ||
394 | |||
395 | class Traps(dict): | ||
396 | def __setitem__(self, key, value): | ||
397 | if key not in ('EXIT',): | ||
398 | raise NotImplementedError() | ||
399 | super(Traps, self).__setitem__(key, value) | ||
400 | |||
401 | # IFS white spaces character class | ||
402 | _IFS_WHITESPACES = (' ', '\t', '\n') | ||
403 | |||
404 | class Environment: | ||
405 | """Environment holds environment variables, export table, function | ||
406 | definitions and whatever is defined in 2.12 "Shell Execution Environment", | ||
407 | redirection excepted. | ||
408 | """ | ||
409 | def __init__(self, pwd): | ||
410 | self._opt = set() #Shell options | ||
411 | |||
412 | self._functions = {} | ||
413 | self._env = {'?': '0', '#': '0'} | ||
414 | self._exported = set([ | ||
415 | 'HOME', 'IFS', 'PATH' | ||
416 | ]) | ||
417 | |||
418 | # Set environment vars with side-effects | ||
419 | self._ifs_ws = None # Set of IFS whitespace characters | ||
420 | self._ifs_re = None # Regular expression used to split between words using IFS classes | ||
421 | self['IFS'] = ''.join(_IFS_WHITESPACES) #Default environment values | ||
422 | self['PWD'] = pwd | ||
423 | self.traps = Traps() | ||
424 | |||
425 | def clone(self, subshell=False): | ||
426 | env = Environment(self['PWD']) | ||
427 | env._opt = set(self._opt) | ||
428 | for k,v in self.get_variables().iteritems(): | ||
429 | if k in self._exported: | ||
430 | env.export(k,v) | ||
431 | elif subshell: | ||
432 | env[k] = v | ||
433 | |||
434 | if subshell: | ||
435 | env._functions = dict(self._functions) | ||
436 | |||
437 | return env | ||
438 | |||
439 | def __getitem__(self, key): | ||
440 | if key in ('@', '*', '-', '$'): | ||
441 | raise NotImplementedError('%s is not implemented' % repr(key)) | ||
442 | return self._env[key] | ||
443 | |||
444 | def get(self, key, defval=None): | ||
445 | try: | ||
446 | return self[key] | ||
447 | except KeyError: | ||
448 | return defval | ||
449 | |||
450 | def __setitem__(self, key, value): | ||
451 | if key=='IFS': | ||
452 | # Update the whitespace/non-whitespace classes | ||
453 | self._update_ifs(value) | ||
454 | elif key=='PWD': | ||
455 | pwd = os.path.abspath(value) | ||
456 | if not os.path.isdir(pwd): | ||
457 | raise VarAssignmentError('Invalid directory %s' % value) | ||
458 | value = pwd | ||
459 | elif key in ('?', '!'): | ||
460 | value = str(int(value)) | ||
461 | self._env[key] = value | ||
462 | |||
463 | def __delitem__(self, key): | ||
464 | if key in ('IFS', 'PWD', '?'): | ||
465 | raise VarAssignmentError('%s cannot be unset' % key) | ||
466 | del self._env[key] | ||
467 | |||
468 | def __contains__(self, item): | ||
469 | return item in self._env | ||
470 | |||
471 | def set_positional_args(self, args): | ||
472 | """Set the content of 'args' as positional argument from 1 to len(args). | ||
473 | Return previous argument as a list of strings. | ||
474 | """ | ||
475 | # Save and remove previous arguments | ||
476 | prevargs = [] | ||
477 | for i in xrange(int(self._env['#'])): | ||
478 | i = str(i+1) | ||
479 | prevargs.append(self._env[i]) | ||
480 | del self._env[i] | ||
481 | self._env['#'] = '0' | ||
482 | |||
483 | #Set new ones | ||
484 | for i,arg in enumerate(args): | ||
485 | self._env[str(i+1)] = str(arg) | ||
486 | self._env['#'] = str(len(args)) | ||
487 | |||
488 | return prevargs | ||
489 | |||
490 | def get_positional_args(self): | ||
491 | return [self._env[str(i+1)] for i in xrange(int(self._env['#']))] | ||
492 | |||
493 | def get_variables(self): | ||
494 | return dict(self._env) | ||
495 | |||
496 | def export(self, key, value=None): | ||
497 | if value is not None: | ||
498 | self[key] = value | ||
499 | self._exported.add(key) | ||
500 | |||
501 | def get_exported(self): | ||
502 | return [(k,self._env.get(k)) for k in self._exported] | ||
503 | |||
504 | def split_fields(self, word): | ||
505 | if not self._ifs_ws or not word: | ||
506 | return [word] | ||
507 | return re.split(self._ifs_re, word) | ||
508 | |||
509 | def _update_ifs(self, value): | ||
510 | """Update the split_fields related variables when IFS character set is | ||
511 | changed. | ||
512 | """ | ||
513 | # TODO: handle NULL IFS | ||
514 | |||
515 | # Separate characters in whitespace and non-whitespace | ||
516 | chars = set(value) | ||
517 | ws = [c for c in chars if c in _IFS_WHITESPACES] | ||
518 | nws = [c for c in chars if c not in _IFS_WHITESPACES] | ||
519 | |||
520 | # Keep whitespaces in a string for left and right stripping | ||
521 | self._ifs_ws = ''.join(ws) | ||
522 | |||
523 | # Build a regexp to split fields | ||
524 | trailing = '[' + ''.join([re.escape(c) for c in ws]) + ']' | ||
525 | if nws: | ||
526 | # First, the single non-whitespace occurence. | ||
527 | nws = '[' + ''.join([re.escape(c) for c in nws]) + ']' | ||
528 | nws = '(?:' + trailing + '*' + nws + trailing + '*' + '|' + trailing + '+)' | ||
529 | else: | ||
530 | # Then mix all parts with quantifiers | ||
531 | nws = trailing + '+' | ||
532 | self._ifs_re = re.compile(nws) | ||
533 | |||
534 | def has_opt(self, opt, val=None): | ||
535 | return (opt, val) in self._opt | ||
536 | |||
537 | def set_opt(self, opt, val=None): | ||
538 | self._opt.add((opt, val)) | ||
539 | |||
540 | def find_in_path(self, name, pwd=False): | ||
541 | path = self._env.get('PATH', '').split(os.pathsep) | ||
542 | if pwd: | ||
543 | path[:0] = [self['PWD']] | ||
544 | if os.name == 'nt': | ||
545 | return win32_find_in_path(name, self._env.get('PATH', '')) | ||
546 | else: | ||
547 | raise NotImplementedError() | ||
548 | |||
549 | def define_function(self, name, body): | ||
550 | if not is_name(name): | ||
551 | raise ShellSyntaxError('%s is not a valid function name' % repr(name)) | ||
552 | self._functions[name] = body | ||
553 | |||
554 | def remove_function(self, name): | ||
555 | del self._functions[name] | ||
556 | |||
557 | def is_function(self, name): | ||
558 | return name in self._functions | ||
559 | |||
560 | def get_function(self, name): | ||
561 | return self._functions.get(name) | ||
562 | |||
563 | |||
564 | name_charset = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_' | ||
565 | name_charset = dict(zip(name_charset,name_charset)) | ||
566 | |||
567 | def match_name(s): | ||
568 | """Return the length in characters of the longest prefix made of name | ||
569 | allowed characters in s. | ||
570 | """ | ||
571 | for i,c in enumerate(s): | ||
572 | if c not in name_charset: | ||
573 | return s[:i] | ||
574 | return s | ||
575 | |||
576 | def is_name(s): | ||
577 | return len([c for c in s if c not in name_charset])<=0 | ||
578 | |||
579 | def is_special_param(c): | ||
580 | return len(c)==1 and c in ('@','*','#','?','-','$','!','0') | ||
581 | |||
582 | def utility_not_implemented(name, *args, **kwargs): | ||
583 | raise NotImplementedError('%s utility is not implemented' % name) | ||
584 | |||
585 | |||
586 | class Utility: | ||
587 | """Define utilities properties: | ||
588 | func -- utility callable. See builtin module for utility samples. | ||
589 | is_special -- see XCU 2.8. | ||
590 | """ | ||
591 | def __init__(self, func, is_special=0): | ||
592 | self.func = func | ||
593 | self.is_special = bool(is_special) | ||
594 | |||
595 | |||
596 | def encodeargs(args): | ||
597 | def encodearg(s): | ||
598 | lines = base64.encodestring(s) | ||
599 | lines = [l.splitlines()[0] for l in lines] | ||
600 | return ''.join(lines) | ||
601 | |||
602 | s = pickle.dumps(args) | ||
603 | return encodearg(s) | ||
604 | |||
605 | def decodeargs(s): | ||
606 | s = base64.decodestring(s) | ||
607 | return pickle.loads(s) | ||
608 | |||
609 | |||
610 | class GlobError(Exception): | ||
611 | pass | ||
612 | |||
613 | class Options: | ||
614 | def __init__(self): | ||
615 | # True if Mercurial operates with binary streams | ||
616 | self.hgbinary = True | ||
617 | |||
618 | class Interpreter: | ||
619 | # Implementation is very basic: the execute() method just makes a DFS on the | ||
620 | # AST and execute nodes one by one. Nodes are tuple (name,obj) where name | ||
621 | # is a string identifier and obj the AST element returned by the parser. | ||
622 | # | ||
623 | # Handler are named after the node identifiers. | ||
624 | # TODO: check node names and remove the switch in execute with some | ||
625 | # dynamic getattr() call to find node handlers. | ||
626 | """Shell interpreter. | ||
627 | |||
628 | The following debugging flags can be passed: | ||
629 | debug-parsing - enable PLY debugging. | ||
630 | debug-tree - print the generated AST. | ||
631 | debug-cmd - trace command execution before word expansion, plus exit status. | ||
632 | debug-utility - trace utility execution. | ||
633 | """ | ||
634 | |||
635 | # List supported commands. | ||
636 | COMMANDS = { | ||
637 | 'cat': Utility(builtin.utility_cat,), | ||
638 | 'cd': Utility(builtin.utility_cd,), | ||
639 | ':': Utility(builtin.utility_colon,), | ||
640 | 'echo': Utility(builtin.utility_echo), | ||
641 | 'env': Utility(builtin.utility_env), | ||
642 | 'exit': Utility(builtin.utility_exit), | ||
643 | 'export': Utility(builtin.builtin_export, is_special=1), | ||
644 | 'egrep': Utility(builtin.utility_egrep), | ||
645 | 'fgrep': Utility(builtin.utility_fgrep), | ||
646 | 'gunzip': Utility(builtin.utility_gunzip), | ||
647 | 'kill': Utility(builtin.utility_kill), | ||
648 | 'mkdir': Utility(builtin.utility_mkdir), | ||
649 | 'netstat': Utility(builtin.utility_netstat), | ||
650 | 'printf': Utility(builtin.utility_printf), | ||
651 | 'pwd': Utility(builtin.utility_pwd), | ||
652 | 'return': Utility(builtin.builtin_return, is_special=1), | ||
653 | 'sed': Utility(builtin.utility_sed,), | ||
654 | 'set': Utility(builtin.builtin_set,), | ||
655 | 'shift': Utility(builtin.builtin_shift,), | ||
656 | 'sleep': Utility(builtin.utility_sleep,), | ||
657 | 'sort': Utility(builtin.utility_sort,), | ||
658 | 'trap': Utility(builtin.builtin_trap, is_special=1), | ||
659 | 'true': Utility(builtin.utility_true), | ||
660 | 'unset': Utility(builtin.builtin_unset, is_special=1), | ||
661 | 'wait': Utility(builtin.builtin_wait, is_special=1), | ||
662 | } | ||
663 | |||
664 | def __init__(self, pwd, debugflags = [], env=None, redirs=None, stdin=None, | ||
665 | stdout=None, stderr=None, opts=Options()): | ||
666 | self._env = env | ||
667 | if self._env is None: | ||
668 | self._env = Environment(pwd) | ||
669 | self._children = {} | ||
670 | |||
671 | self._redirs = redirs | ||
672 | self._close_redirs = False | ||
673 | |||
674 | if self._redirs is None: | ||
675 | if stdin is None: | ||
676 | stdin = sys.stdin | ||
677 | if stdout is None: | ||
678 | stdout = sys.stdout | ||
679 | if stderr is None: | ||
680 | stderr = sys.stderr | ||
681 | stdin = FileWrapper('r', stdin, False) | ||
682 | stdout = FileWrapper('w', stdout, False) | ||
683 | stderr = FileWrapper('w', stderr, False) | ||
684 | self._redirs = Redirections(stdin, stdout, stderr) | ||
685 | self._close_redirs = True | ||
686 | |||
687 | self._debugflags = list(debugflags) | ||
688 | self._logfile = sys.stderr | ||
689 | self._options = opts | ||
690 | |||
691 | def close(self): | ||
692 | """Must be called when the interpreter is no longer used.""" | ||
693 | script = self._env.traps.get('EXIT') | ||
694 | if script: | ||
695 | try: | ||
696 | self.execute_script(script=script) | ||
697 | except: | ||
698 | pass | ||
699 | |||
700 | if self._redirs is not None and self._close_redirs: | ||
701 | self._redirs.close() | ||
702 | self._redirs = None | ||
703 | |||
704 | def log(self, s): | ||
705 | self._logfile.write(s) | ||
706 | self._logfile.flush() | ||
707 | |||
708 | def __getitem__(self, key): | ||
709 | return self._env[key] | ||
710 | |||
711 | def __setitem__(self, key, value): | ||
712 | self._env[key] = value | ||
713 | |||
714 | def options(self): | ||
715 | return self._options | ||
716 | |||
717 | def redirect(self, redirs, ios): | ||
718 | def add_redir(io): | ||
719 | if isinstance(io, pyshyacc.IORedirect): | ||
720 | redirs.add(self, io.op, io.filename, io.io_number) | ||
721 | else: | ||
722 | redirs.add_here_document(self, io.name, io.content, io.io_number) | ||
723 | |||
724 | map(add_redir, ios) | ||
725 | return redirs | ||
726 | |||
727 | def execute_script(self, script=None, ast=None, sourced=False, | ||
728 | scriptpath=None): | ||
729 | """If script is not None, parse the input. Otherwise takes the supplied | ||
730 | AST. Then execute the AST. | ||
731 | Return the script exit status. | ||
732 | """ | ||
733 | try: | ||
734 | if scriptpath is not None: | ||
735 | self._env['0'] = os.path.abspath(scriptpath) | ||
736 | |||
737 | if script is not None: | ||
738 | debug_parsing = ('debug-parsing' in self._debugflags) | ||
739 | cmds, script = pyshyacc.parse(script, True, debug_parsing) | ||
740 | if 'debug-tree' in self._debugflags: | ||
741 | pyshyacc.print_commands(cmds, self._logfile) | ||
742 | self._logfile.flush() | ||
743 | else: | ||
744 | cmds, script = ast, '' | ||
745 | |||
746 | status = 0 | ||
747 | for cmd in cmds: | ||
748 | try: | ||
749 | status = self.execute(cmd) | ||
750 | except ExitSignal, e: | ||
751 | if sourced: | ||
752 | raise | ||
753 | status = int(e.args[0]) | ||
754 | return status | ||
755 | except ShellError: | ||
756 | self._env['?'] = 1 | ||
757 | raise | ||
758 | if 'debug-utility' in self._debugflags or 'debug-cmd' in self._debugflags: | ||
759 | self.log('returncode ' + str(status)+ '\n') | ||
760 | return status | ||
761 | except CommandNotFound, e: | ||
762 | print >>self._redirs.stderr, str(e) | ||
763 | self._redirs.stderr.flush() | ||
764 | # Command not found by non-interactive shell | ||
765 | # return 127 | ||
766 | raise | ||
767 | except RedirectionError, e: | ||
768 | # TODO: should be handled depending on the utility status | ||
769 | print >>self._redirs.stderr, str(e) | ||
770 | self._redirs.stderr.flush() | ||
771 | # Command not found by non-interactive shell | ||
772 | # return 127 | ||
773 | raise | ||
774 | |||
775 | def dotcommand(self, env, args): | ||
776 | if len(args) < 1: | ||
777 | raise ShellError('. expects at least one argument') | ||
778 | path = args[0] | ||
779 | if '/' not in path: | ||
780 | found = env.find_in_path(args[0], True) | ||
781 | if found: | ||
782 | path = found[0] | ||
783 | script = file(path).read() | ||
784 | return self.execute_script(script=script, sourced=True) | ||
785 | |||
786 | def execute(self, token, redirs=None): | ||
787 | """Execute and AST subtree with supplied redirections overriding default | ||
788 | interpreter ones. | ||
789 | Return the exit status. | ||
790 | """ | ||
791 | if not token: | ||
792 | return 0 | ||
793 | |||
794 | if redirs is None: | ||
795 | redirs = self._redirs | ||
796 | |||
797 | if isinstance(token, list): | ||
798 | # Commands sequence | ||
799 | res = 0 | ||
800 | for t in token: | ||
801 | res = self.execute(t, redirs) | ||
802 | return res | ||
803 | |||
804 | type, value = token | ||
805 | status = 0 | ||
806 | if type=='simple_command': | ||
807 | redirs_copy = redirs.clone() | ||
808 | try: | ||
809 | # TODO: define and handle command return values | ||
810 | # TODO: implement set -e | ||
811 | status = self._execute_simple_command(value, redirs_copy) | ||
812 | finally: | ||
813 | redirs_copy.close() | ||
814 | elif type=='pipeline': | ||
815 | status = self._execute_pipeline(value, redirs) | ||
816 | elif type=='and_or': | ||
817 | status = self._execute_and_or(value, redirs) | ||
818 | elif type=='for_clause': | ||
819 | status = self._execute_for_clause(value, redirs) | ||
820 | elif type=='while_clause': | ||
821 | status = self._execute_while_clause(value, redirs) | ||
822 | elif type=='function_definition': | ||
823 | status = self._execute_function_definition(value, redirs) | ||
824 | elif type=='brace_group': | ||
825 | status = self._execute_brace_group(value, redirs) | ||
826 | elif type=='if_clause': | ||
827 | status = self._execute_if_clause(value, redirs) | ||
828 | elif type=='subshell': | ||
829 | status = self.subshell(ast=value.cmds, redirs=redirs) | ||
830 | elif type=='async': | ||
831 | status = self._asynclist(value) | ||
832 | elif type=='redirect_list': | ||
833 | redirs_copy = self.redirect(redirs.clone(), value.redirs) | ||
834 | try: | ||
835 | status = self.execute(value.cmd, redirs_copy) | ||
836 | finally: | ||
837 | redirs_copy.close() | ||
838 | else: | ||
839 | raise NotImplementedError('Unsupported token type ' + type) | ||
840 | |||
841 | if status < 0: | ||
842 | status = 255 | ||
843 | return status | ||
844 | |||
845 | def _execute_if_clause(self, if_clause, redirs): | ||
846 | cond_status = self.execute(if_clause.cond, redirs) | ||
847 | if cond_status==0: | ||
848 | return self.execute(if_clause.if_cmds, redirs) | ||
849 | else: | ||
850 | return self.execute(if_clause.else_cmds, redirs) | ||
851 | |||
852 | def _execute_brace_group(self, group, redirs): | ||
853 | status = 0 | ||
854 | for cmd in group.cmds: | ||
855 | status = self.execute(cmd, redirs) | ||
856 | return status | ||
857 | |||
858 | def _execute_function_definition(self, fundef, redirs): | ||
859 | self._env.define_function(fundef.name, fundef.body) | ||
860 | return 0 | ||
861 | |||
862 | def _execute_while_clause(self, while_clause, redirs): | ||
863 | status = 0 | ||
864 | while 1: | ||
865 | cond_status = 0 | ||
866 | for cond in while_clause.condition: | ||
867 | cond_status = self.execute(cond, redirs) | ||
868 | |||
869 | if cond_status: | ||
870 | break | ||
871 | |||
872 | for cmd in while_clause.cmds: | ||
873 | status = self.execute(cmd, redirs) | ||
874 | |||
875 | return status | ||
876 | |||
877 | def _execute_for_clause(self, for_clause, redirs): | ||
878 | if not is_name(for_clause.name): | ||
879 | raise ShellSyntaxError('%s is not a valid name' % repr(for_clause.name)) | ||
880 | items = mappend(self.expand_token, for_clause.items) | ||
881 | |||
882 | status = 0 | ||
883 | for item in items: | ||
884 | self._env[for_clause.name] = item | ||
885 | for cmd in for_clause.cmds: | ||
886 | status = self.execute(cmd, redirs) | ||
887 | return status | ||
888 | |||
889 | def _execute_and_or(self, or_and, redirs): | ||
890 | res = self.execute(or_and.left, redirs) | ||
891 | if (or_and.op=='&&' and res==0) or (or_and.op!='&&' and res!=0): | ||
892 | res = self.execute(or_and.right, redirs) | ||
893 | return res | ||
894 | |||
895 | def _execute_pipeline(self, pipeline, redirs): | ||
896 | if len(pipeline.commands)==1: | ||
897 | status = self.execute(pipeline.commands[0], redirs) | ||
898 | else: | ||
899 | # Execute all commands one after the other | ||
900 | status = 0 | ||
901 | inpath, outpath = None, None | ||
902 | try: | ||
903 | # Commands inputs and outputs cannot really be plugged as done | ||
904 | # by a real shell. Run commands sequentially and chain their | ||
905 | # input/output throught temporary files. | ||
906 | tmpfd, inpath = tempfile.mkstemp() | ||
907 | os.close(tmpfd) | ||
908 | tmpfd, outpath = tempfile.mkstemp() | ||
909 | os.close(tmpfd) | ||
910 | |||
911 | inpath = win32_to_unix_path(inpath) | ||
912 | outpath = win32_to_unix_path(outpath) | ||
913 | |||
914 | for i, cmd in enumerate(pipeline.commands): | ||
915 | call_redirs = redirs.clone() | ||
916 | try: | ||
917 | if i!=0: | ||
918 | call_redirs.add(self, '<', inpath) | ||
919 | if i!=len(pipeline.commands)-1: | ||
920 | call_redirs.add(self, '>', outpath) | ||
921 | |||
922 | status = self.execute(cmd, call_redirs) | ||
923 | |||
924 | # Chain inputs/outputs | ||
925 | inpath, outpath = outpath, inpath | ||
926 | finally: | ||
927 | call_redirs.close() | ||
928 | finally: | ||
929 | if inpath: os.remove(inpath) | ||
930 | if outpath: os.remove(outpath) | ||
931 | |||
932 | if pipeline.reverse_status: | ||
933 | status = int(not status) | ||
934 | self._env['?'] = status | ||
935 | return status | ||
936 | |||
937 | def _execute_function(self, name, args, interp, env, stdin, stdout, stderr, *others): | ||
938 | assert interp is self | ||
939 | |||
940 | func = env.get_function(name) | ||
941 | #Set positional parameters | ||
942 | prevargs = None | ||
943 | try: | ||
944 | prevargs = env.set_positional_args(args) | ||
945 | try: | ||
946 | redirs = Redirections(stdin.dup(), stdout.dup(), stderr.dup()) | ||
947 | try: | ||
948 | status = self.execute(func, redirs) | ||
949 | finally: | ||
950 | redirs.close() | ||
951 | except ReturnSignal, e: | ||
952 | status = int(e.args[0]) | ||
953 | env['?'] = status | ||
954 | return status | ||
955 | finally: | ||
956 | #Reset positional parameters | ||
957 | if prevargs is not None: | ||
958 | env.set_positional_args(prevargs) | ||
959 | |||
960 | def _execute_simple_command(self, token, redirs): | ||
961 | """Can raise ReturnSignal when return builtin is called, ExitSignal when | ||
962 | exit is called, and other shell exceptions upon builtin failures. | ||
963 | """ | ||
964 | debug_command = 'debug-cmd' in self._debugflags | ||
965 | if debug_command: | ||
966 | self.log('word' + repr(token.words) + '\n') | ||
967 | self.log('assigns' + repr(token.assigns) + '\n') | ||
968 | self.log('redirs' + repr(token.redirs) + '\n') | ||
969 | |||
970 | is_special = None | ||
971 | env = self._env | ||
972 | |||
973 | try: | ||
974 | # Word expansion | ||
975 | args = [] | ||
976 | for word in token.words: | ||
977 | args += self.expand_token(word) | ||
978 | if is_special is None and args: | ||
979 | is_special = env.is_function(args[0]) or \ | ||
980 | (args[0] in self.COMMANDS and self.COMMANDS[args[0]].is_special) | ||
981 | |||
982 | if debug_command: | ||
983 | self.log('_execute_simple_command' + str(args) + '\n') | ||
984 | |||
985 | if not args: | ||
986 | # Redirections happen is a subshell | ||
987 | redirs = redirs.clone() | ||
988 | elif not is_special: | ||
989 | env = self._env.clone() | ||
990 | |||
991 | # Redirections | ||
992 | self.redirect(redirs, token.redirs) | ||
993 | |||
994 | # Variables assignments | ||
995 | res = 0 | ||
996 | for type,(k,v) in token.assigns: | ||
997 | status, expanded = self.expand_variable((k,v)) | ||
998 | if status is not None: | ||
999 | res = status | ||
1000 | if args: | ||
1001 | env.export(k, expanded) | ||
1002 | else: | ||
1003 | env[k] = expanded | ||
1004 | |||
1005 | if args and args[0] in ('.', 'source'): | ||
1006 | res = self.dotcommand(env, args[1:]) | ||
1007 | elif args: | ||
1008 | if args[0] in self.COMMANDS: | ||
1009 | command = self.COMMANDS[args[0]] | ||
1010 | elif env.is_function(args[0]): | ||
1011 | command = Utility(self._execute_function, is_special=True) | ||
1012 | else: | ||
1013 | if not '/' in args[0].replace('\\', '/'): | ||
1014 | cmd = env.find_in_path(args[0]) | ||
1015 | if not cmd: | ||
1016 | # TODO: test error code on unknown command => 127 | ||
1017 | raise CommandNotFound('Unknown command: "%s"' % args[0]) | ||
1018 | else: | ||
1019 | # Handle commands like '/cygdrive/c/foo.bat' | ||
1020 | cmd = cygwin_to_windows_path(args[0]) | ||
1021 | if not os.path.exists(cmd): | ||
1022 | raise CommandNotFound('%s: No such file or directory' % args[0]) | ||
1023 | shebang = resolve_shebang(cmd) | ||
1024 | if shebang: | ||
1025 | cmd = shebang | ||
1026 | else: | ||
1027 | cmd = [cmd] | ||
1028 | args[0:1] = cmd | ||
1029 | command = Utility(builtin.run_command) | ||
1030 | |||
1031 | # Command execution | ||
1032 | if 'debug-cmd' in self._debugflags: | ||
1033 | self.log('redirections ' + str(redirs) + '\n') | ||
1034 | |||
1035 | res = command.func(args[0], args[1:], self, env, | ||
1036 | redirs.stdin(), redirs.stdout(), | ||
1037 | redirs.stderr(), self._debugflags) | ||
1038 | |||
1039 | if self._env.has_opt('-x'): | ||
1040 | # Trace command execution in shell environment | ||
1041 | # BUG: would be hard to reproduce a real shell behaviour since | ||
1042 | # the AST is not annotated with source lines/tokens. | ||
1043 | self._redirs.stdout().write(' '.join(args)) | ||
1044 | |||
1045 | except ReturnSignal: | ||
1046 | raise | ||
1047 | except ShellError, e: | ||
1048 | if is_special or isinstance(e, (ExitSignal, | ||
1049 | ShellSyntaxError, ExpansionError)): | ||
1050 | raise e | ||
1051 | self._redirs.stderr().write(str(e)+'\n') | ||
1052 | return 1 | ||
1053 | |||
1054 | return res | ||
1055 | |||
1056 | def expand_token(self, word): | ||
1057 | """Expand a word as specified in [2.6 Word Expansions]. Return the list | ||
1058 | of expanded words. | ||
1059 | """ | ||
1060 | status, wtrees = self._expand_word(word) | ||
1061 | return map(pyshlex.wordtree_as_string, wtrees) | ||
1062 | |||
1063 | def expand_variable(self, word): | ||
1064 | """Return a status code (or None if no command expansion occurred) | ||
1065 | and a single word. | ||
1066 | """ | ||
1067 | status, wtrees = self._expand_word(word, pathname=False, split=False) | ||
1068 | words = map(pyshlex.wordtree_as_string, wtrees) | ||
1069 | assert len(words)==1 | ||
1070 | return status, words[0] | ||
1071 | |||
1072 | def expand_here_document(self, word): | ||
1073 | """Return the expanded document as a single word. The here document is | ||
1074 | assumed to be unquoted. | ||
1075 | """ | ||
1076 | status, wtrees = self._expand_word(word, pathname=False, | ||
1077 | split=False, here_document=True) | ||
1078 | words = map(pyshlex.wordtree_as_string, wtrees) | ||
1079 | assert len(words)==1 | ||
1080 | return words[0] | ||
1081 | |||
1082 | def expand_redirection(self, word): | ||
1083 | """Return a single word.""" | ||
1084 | return self.expand_variable(word)[1] | ||
1085 | |||
1086 | def get_env(self): | ||
1087 | return self._env | ||
1088 | |||
1089 | def _expand_word(self, token, pathname=True, split=True, here_document=False): | ||
1090 | wtree = pyshlex.make_wordtree(token[1], here_document=here_document) | ||
1091 | |||
1092 | # TODO: implement tilde expansion | ||
1093 | def expand(wtree): | ||
1094 | """Return a pseudo wordtree: the tree or its subelements can be empty | ||
1095 | lists when no value result from the expansion. | ||
1096 | """ | ||
1097 | status = None | ||
1098 | for part in wtree: | ||
1099 | if not isinstance(part, list): | ||
1100 | continue | ||
1101 | if part[0]in ("'", '\\'): | ||
1102 | continue | ||
1103 | elif part[0] in ('`', '$('): | ||
1104 | status, result = self._expand_command(part) | ||
1105 | part[:] = result | ||
1106 | elif part[0] in ('$', '${'): | ||
1107 | part[:] = self._expand_parameter(part, wtree[0]=='"', split) | ||
1108 | elif part[0] in ('', '"'): | ||
1109 | status, result = expand(part) | ||
1110 | part[:] = result | ||
1111 | else: | ||
1112 | raise NotImplementedError('%s expansion is not implemented' | ||
1113 | % part[0]) | ||
1114 | # [] is returned when an expansion result in no-field, | ||
1115 | # like an empty $@ | ||
1116 | wtree = [p for p in wtree if p != []] | ||
1117 | if len(wtree) < 3: | ||
1118 | return status, [] | ||
1119 | return status, wtree | ||
1120 | |||
1121 | status, wtree = expand(wtree) | ||
1122 | if len(wtree) == 0: | ||
1123 | return status, wtree | ||
1124 | wtree = pyshlex.normalize_wordtree(wtree) | ||
1125 | |||
1126 | if split: | ||
1127 | wtrees = self._split_fields(wtree) | ||
1128 | else: | ||
1129 | wtrees = [wtree] | ||
1130 | |||
1131 | if pathname: | ||
1132 | wtrees = mappend(self._expand_pathname, wtrees) | ||
1133 | |||
1134 | wtrees = map(self._remove_quotes, wtrees) | ||
1135 | return status, wtrees | ||
1136 | |||
1137 | def _expand_command(self, wtree): | ||
1138 | # BUG: there is something to do with backslashes and quoted | ||
1139 | # characters here | ||
1140 | command = pyshlex.wordtree_as_string(wtree[1:-1]) | ||
1141 | status, output = self.subshell_output(command) | ||
1142 | return status, ['', output, ''] | ||
1143 | |||
1144 | def _expand_parameter(self, wtree, quoted=False, split=False): | ||
1145 | """Return a valid wtree or an empty list when no parameter results.""" | ||
1146 | # Get the parameter name | ||
1147 | # TODO: implement weird expansion rules with ':' | ||
1148 | name = pyshlex.wordtree_as_string(wtree[1:-1]) | ||
1149 | if not is_name(name) and not is_special_param(name): | ||
1150 | raise ExpansionError('Bad substitution "%s"' % name) | ||
1151 | # TODO: implement special parameters | ||
1152 | if name in ('@', '*'): | ||
1153 | args = self._env.get_positional_args() | ||
1154 | if len(args) == 0: | ||
1155 | return [] | ||
1156 | if len(args)<2: | ||
1157 | return ['', ''.join(args), ''] | ||
1158 | |||
1159 | sep = self._env.get('IFS', '')[:1] | ||
1160 | if split and quoted and name=='@': | ||
1161 | # Introduce a new token to tell the caller that these parameters | ||
1162 | # cause a split as specified in 2.5.2 | ||
1163 | return ['@'] + args + [''] | ||
1164 | else: | ||
1165 | return ['', sep.join(args), ''] | ||
1166 | |||
1167 | return ['', self._env.get(name, ''), ''] | ||
1168 | |||
1169 | def _split_fields(self, wtree): | ||
1170 | def is_empty(split): | ||
1171 | return split==['', '', ''] | ||
1172 | |||
1173 | def split_positional(quoted): | ||
1174 | # Return a list of wtree split according positional parameters rules. | ||
1175 | # All remaining '@' groups are removed. | ||
1176 | assert quoted[0]=='"' | ||
1177 | |||
1178 | splits = [[]] | ||
1179 | for part in quoted: | ||
1180 | if not isinstance(part, list) or part[0]!='@': | ||
1181 | splits[-1].append(part) | ||
1182 | else: | ||
1183 | # Empty or single argument list were dealt with already | ||
1184 | assert len(part)>3 | ||
1185 | # First argument must join with the beginning part of the original word | ||
1186 | splits[-1].append(part[1]) | ||
1187 | # Create double-quotes expressions for every argument after the first | ||
1188 | for arg in part[2:-1]: | ||
1189 | splits[-1].append('"') | ||
1190 | splits.append(['"', arg]) | ||
1191 | return splits | ||
1192 | |||
1193 | # At this point, all expansions but pathnames have occured. Only quoted | ||
1194 | # and positional sequences remain. Thus, all candidates for field splitting | ||
1195 | # are in the tree root, or are positional splits ('@') and lie in root | ||
1196 | # children. | ||
1197 | if not wtree or wtree[0] not in ('', '"'): | ||
1198 | # The whole token is quoted or empty, nothing to split | ||
1199 | return [wtree] | ||
1200 | |||
1201 | if wtree[0]=='"': | ||
1202 | wtree = ['', wtree, ''] | ||
1203 | |||
1204 | result = [['', '']] | ||
1205 | for part in wtree[1:-1]: | ||
1206 | if isinstance(part, list): | ||
1207 | if part[0]=='"': | ||
1208 | splits = split_positional(part) | ||
1209 | if len(splits)<=1: | ||
1210 | result[-1] += [part, ''] | ||
1211 | else: | ||
1212 | # Terminate the current split | ||
1213 | result[-1] += [splits[0], ''] | ||
1214 | result += splits[1:-1] | ||
1215 | # Create a new split | ||
1216 | result += [['', splits[-1], '']] | ||
1217 | else: | ||
1218 | result[-1] += [part, ''] | ||
1219 | else: | ||
1220 | splits = self._env.split_fields(part) | ||
1221 | if len(splits)<=1: | ||
1222 | # No split | ||
1223 | result[-1][-1] += part | ||
1224 | else: | ||
1225 | # Terminate the current resulting part and create a new one | ||
1226 | result[-1][-1] += splits[0] | ||
1227 | result[-1].append('') | ||
1228 | result += [['', r, ''] for r in splits[1:-1]] | ||
1229 | result += [['', splits[-1]]] | ||
1230 | result[-1].append('') | ||
1231 | |||
1232 | # Leading and trailing empty groups come from leading/trailing blanks | ||
1233 | if result and is_empty(result[-1]): | ||
1234 | result[-1:] = [] | ||
1235 | if result and is_empty(result[0]): | ||
1236 | result[:1] = [] | ||
1237 | return result | ||
1238 | |||
1239 | def _expand_pathname(self, wtree): | ||
1240 | """See [2.6.6 Pathname Expansion].""" | ||
1241 | if self._env.has_opt('-f'): | ||
1242 | return [wtree] | ||
1243 | |||
1244 | # All expansions have been performed, only quoted sequences should remain | ||
1245 | # in the tree. Generate the pattern by folding the tree, escaping special | ||
1246 | # characters when appear quoted | ||
1247 | special_chars = '*?[]' | ||
1248 | |||
1249 | def make_pattern(wtree): | ||
1250 | subpattern = [] | ||
1251 | for part in wtree[1:-1]: | ||
1252 | if isinstance(part, list): | ||
1253 | part = make_pattern(part) | ||
1254 | elif wtree[0]!='': | ||
1255 | for c in part: | ||
1256 | # Meta-characters cannot be quoted | ||
1257 | if c in special_chars: | ||
1258 | raise GlobError() | ||
1259 | subpattern.append(part) | ||
1260 | return ''.join(subpattern) | ||
1261 | |||
1262 | def pwd_glob(pattern): | ||
1263 | cwd = os.getcwd() | ||
1264 | os.chdir(self._env['PWD']) | ||
1265 | try: | ||
1266 | return glob.glob(pattern) | ||
1267 | finally: | ||
1268 | os.chdir(cwd) | ||
1269 | |||
1270 | #TODO: check working directory issues here wrt relative patterns | ||
1271 | try: | ||
1272 | pattern = make_pattern(wtree) | ||
1273 | paths = pwd_glob(pattern) | ||
1274 | except GlobError: | ||
1275 | # BUG: Meta-characters were found in quoted sequences. The should | ||
1276 | # have been used literally but this is unsupported in current glob module. | ||
1277 | # Instead we consider the whole tree must be used literally and | ||
1278 | # therefore there is no point in globbing. This is wrong when meta | ||
1279 | # characters are mixed with quoted meta in the same pattern like: | ||
1280 | # < foo*"py*" > | ||
1281 | paths = [] | ||
1282 | |||
1283 | if not paths: | ||
1284 | return [wtree] | ||
1285 | return [['', path, ''] for path in paths] | ||
1286 | |||
1287 | def _remove_quotes(self, wtree): | ||
1288 | """See [2.6.7 Quote Removal].""" | ||
1289 | |||
1290 | def unquote(wtree): | ||
1291 | unquoted = [] | ||
1292 | for part in wtree[1:-1]: | ||
1293 | if isinstance(part, list): | ||
1294 | part = unquote(part) | ||
1295 | unquoted.append(part) | ||
1296 | return ''.join(unquoted) | ||
1297 | |||
1298 | return ['', unquote(wtree), ''] | ||
1299 | |||
1300 | def subshell(self, script=None, ast=None, redirs=None): | ||
1301 | """Execute the script or AST in a subshell, with inherited redirections | ||
1302 | if redirs is not None. | ||
1303 | """ | ||
1304 | if redirs: | ||
1305 | sub_redirs = redirs | ||
1306 | else: | ||
1307 | sub_redirs = redirs.clone() | ||
1308 | |||
1309 | subshell = None | ||
1310 | try: | ||
1311 | subshell = Interpreter(None, self._debugflags, self._env.clone(True), | ||
1312 | sub_redirs, opts=self._options) | ||
1313 | return subshell.execute_script(script, ast) | ||
1314 | finally: | ||
1315 | if not redirs: sub_redirs.close() | ||
1316 | if subshell: subshell.close() | ||
1317 | |||
1318 | def subshell_output(self, script): | ||
1319 | """Execute the script in a subshell and return the captured output.""" | ||
1320 | # Create temporary file to capture subshell output | ||
1321 | tmpfd, tmppath = tempfile.mkstemp() | ||
1322 | try: | ||
1323 | tmpfile = os.fdopen(tmpfd, 'wb') | ||
1324 | stdout = FileWrapper('w', tmpfile) | ||
1325 | |||
1326 | redirs = Redirections(self._redirs.stdin().dup(), | ||
1327 | stdout, | ||
1328 | self._redirs.stderr().dup()) | ||
1329 | try: | ||
1330 | status = self.subshell(script=script, redirs=redirs) | ||
1331 | finally: | ||
1332 | redirs.close() | ||
1333 | redirs = None | ||
1334 | |||
1335 | # Extract subshell standard output | ||
1336 | tmpfile = open(tmppath, 'rb') | ||
1337 | try: | ||
1338 | output = tmpfile.read() | ||
1339 | return status, output.rstrip('\n') | ||
1340 | finally: | ||
1341 | tmpfile.close() | ||
1342 | finally: | ||
1343 | os.remove(tmppath) | ||
1344 | |||
1345 | def _asynclist(self, cmd): | ||
1346 | args = (self._env.get_variables(), cmd) | ||
1347 | arg = encodeargs(args) | ||
1348 | assert len(args) < 30*1024 | ||
1349 | cmd = ['pysh.bat', '--ast', '-c', arg] | ||
1350 | p = subprocess.Popen(cmd, cwd=self._env['PWD']) | ||
1351 | self._children[p.pid] = p | ||
1352 | self._env['!'] = p.pid | ||
1353 | return 0 | ||
1354 | |||
1355 | def wait(self, pids=None): | ||
1356 | if not pids: | ||
1357 | pids = self._children.keys() | ||
1358 | |||
1359 | status = 127 | ||
1360 | for pid in pids: | ||
1361 | if pid not in self._children: | ||
1362 | continue | ||
1363 | p = self._children.pop(pid) | ||
1364 | status = p.wait() | ||
1365 | |||
1366 | return status | ||
1367 | |||
diff --git a/bitbake/lib/bb/pysh/lsprof.py b/bitbake/lib/bb/pysh/lsprof.py new file mode 100644 index 0000000000..b1831c22a7 --- /dev/null +++ b/bitbake/lib/bb/pysh/lsprof.py | |||
@@ -0,0 +1,116 @@ | |||
1 | #! /usr/bin/env python | ||
2 | |||
3 | import sys | ||
4 | from _lsprof import Profiler, profiler_entry | ||
5 | |||
6 | __all__ = ['profile', 'Stats'] | ||
7 | |||
8 | def profile(f, *args, **kwds): | ||
9 | """XXX docstring""" | ||
10 | p = Profiler() | ||
11 | p.enable(subcalls=True, builtins=True) | ||
12 | try: | ||
13 | f(*args, **kwds) | ||
14 | finally: | ||
15 | p.disable() | ||
16 | return Stats(p.getstats()) | ||
17 | |||
18 | |||
19 | class Stats(object): | ||
20 | """XXX docstring""" | ||
21 | |||
22 | def __init__(self, data): | ||
23 | self.data = data | ||
24 | |||
25 | def sort(self, crit="inlinetime"): | ||
26 | """XXX docstring""" | ||
27 | if crit not in profiler_entry.__dict__: | ||
28 | raise ValueError("Can't sort by %s" % crit) | ||
29 | self.data.sort(lambda b, a: cmp(getattr(a, crit), | ||
30 | getattr(b, crit))) | ||
31 | for e in self.data: | ||
32 | if e.calls: | ||
33 | e.calls.sort(lambda b, a: cmp(getattr(a, crit), | ||
34 | getattr(b, crit))) | ||
35 | |||
36 | def pprint(self, top=None, file=None, limit=None, climit=None): | ||
37 | """XXX docstring""" | ||
38 | if file is None: | ||
39 | file = sys.stdout | ||
40 | d = self.data | ||
41 | if top is not None: | ||
42 | d = d[:top] | ||
43 | cols = "% 12s %12s %11.4f %11.4f %s\n" | ||
44 | hcols = "% 12s %12s %12s %12s %s\n" | ||
45 | cols2 = "+%12s %12s %11.4f %11.4f + %s\n" | ||
46 | file.write(hcols % ("CallCount", "Recursive", "Total(ms)", | ||
47 | "Inline(ms)", "module:lineno(function)")) | ||
48 | count = 0 | ||
49 | for e in d: | ||
50 | file.write(cols % (e.callcount, e.reccallcount, e.totaltime, | ||
51 | e.inlinetime, label(e.code))) | ||
52 | count += 1 | ||
53 | if limit is not None and count == limit: | ||
54 | return | ||
55 | ccount = 0 | ||
56 | if e.calls: | ||
57 | for se in e.calls: | ||
58 | file.write(cols % ("+%s" % se.callcount, se.reccallcount, | ||
59 | se.totaltime, se.inlinetime, | ||
60 | "+%s" % label(se.code))) | ||
61 | count += 1 | ||
62 | ccount += 1 | ||
63 | if limit is not None and count == limit: | ||
64 | return | ||
65 | if climit is not None and ccount == climit: | ||
66 | break | ||
67 | |||
68 | def freeze(self): | ||
69 | """Replace all references to code objects with string | ||
70 | descriptions; this makes it possible to pickle the instance.""" | ||
71 | |||
72 | # this code is probably rather ickier than it needs to be! | ||
73 | for i in range(len(self.data)): | ||
74 | e = self.data[i] | ||
75 | if not isinstance(e.code, str): | ||
76 | self.data[i] = type(e)((label(e.code),) + e[1:]) | ||
77 | if e.calls: | ||
78 | for j in range(len(e.calls)): | ||
79 | se = e.calls[j] | ||
80 | if not isinstance(se.code, str): | ||
81 | e.calls[j] = type(se)((label(se.code),) + se[1:]) | ||
82 | |||
83 | _fn2mod = {} | ||
84 | |||
85 | def label(code): | ||
86 | if isinstance(code, str): | ||
87 | return code | ||
88 | try: | ||
89 | mname = _fn2mod[code.co_filename] | ||
90 | except KeyError: | ||
91 | for k, v in sys.modules.items(): | ||
92 | if v is None: | ||
93 | continue | ||
94 | if not hasattr(v, '__file__'): | ||
95 | continue | ||
96 | if not isinstance(v.__file__, str): | ||
97 | continue | ||
98 | if v.__file__.startswith(code.co_filename): | ||
99 | mname = _fn2mod[code.co_filename] = k | ||
100 | break | ||
101 | else: | ||
102 | mname = _fn2mod[code.co_filename] = '<%s>'%code.co_filename | ||
103 | |||
104 | return '%s:%d(%s)' % (mname, code.co_firstlineno, code.co_name) | ||
105 | |||
106 | |||
107 | if __name__ == '__main__': | ||
108 | import os | ||
109 | sys.argv = sys.argv[1:] | ||
110 | if not sys.argv: | ||
111 | print >> sys.stderr, "usage: lsprof.py <script> <arguments...>" | ||
112 | sys.exit(2) | ||
113 | sys.path.insert(0, os.path.abspath(os.path.dirname(sys.argv[0]))) | ||
114 | stats = profile(execfile, sys.argv[0], globals(), locals()) | ||
115 | stats.sort() | ||
116 | stats.pprint() | ||
diff --git a/bitbake/lib/bb/pysh/pysh.py b/bitbake/lib/bb/pysh/pysh.py new file mode 100644 index 0000000000..b4e6145b51 --- /dev/null +++ b/bitbake/lib/bb/pysh/pysh.py | |||
@@ -0,0 +1,167 @@ | |||
1 | # pysh.py - command processing for pysh. | ||
2 | # | ||
3 | # Copyright 2007 Patrick Mezard | ||
4 | # | ||
5 | # This software may be used and distributed according to the terms | ||
6 | # of the GNU General Public License, incorporated herein by reference. | ||
7 | |||
8 | import optparse | ||
9 | import os | ||
10 | import sys | ||
11 | |||
12 | import interp | ||
13 | |||
14 | SH_OPT = optparse.OptionParser(prog='pysh', usage="%prog [OPTIONS]", version='0.1') | ||
15 | SH_OPT.add_option('-c', action='store_true', dest='command_string', default=None, | ||
16 | help='A string that shall be interpreted by the shell as one or more commands') | ||
17 | SH_OPT.add_option('--redirect-to', dest='redirect_to', default=None, | ||
18 | help='Redirect script commands stdout and stderr to the specified file') | ||
19 | # See utility_command in builtin.py about the reason for this flag. | ||
20 | SH_OPT.add_option('--redirected', dest='redirected', action='store_true', default=False, | ||
21 | help='Tell the interpreter that stdout and stderr are actually the same objects, which is really stdout') | ||
22 | SH_OPT.add_option('--debug-parsing', action='store_true', dest='debug_parsing', default=False, | ||
23 | help='Trace PLY execution') | ||
24 | SH_OPT.add_option('--debug-tree', action='store_true', dest='debug_tree', default=False, | ||
25 | help='Display the generated syntax tree.') | ||
26 | SH_OPT.add_option('--debug-cmd', action='store_true', dest='debug_cmd', default=False, | ||
27 | help='Trace command execution before parameters expansion and exit status.') | ||
28 | SH_OPT.add_option('--debug-utility', action='store_true', dest='debug_utility', default=False, | ||
29 | help='Trace utility calls, after parameters expansions') | ||
30 | SH_OPT.add_option('--ast', action='store_true', dest='ast', default=False, | ||
31 | help='Encoded commands to execute in a subprocess') | ||
32 | SH_OPT.add_option('--profile', action='store_true', default=False, | ||
33 | help='Profile pysh run') | ||
34 | |||
35 | |||
36 | def split_args(args): | ||
37 | # Separate shell arguments from command ones | ||
38 | # Just stop at the first argument not starting with a dash. I know, this is completely broken, | ||
39 | # it ignores files starting with a dash or may take option values for command file. This is not | ||
40 | # supposed to happen for now | ||
41 | command_index = len(args) | ||
42 | for i,arg in enumerate(args): | ||
43 | if not arg.startswith('-'): | ||
44 | command_index = i | ||
45 | break | ||
46 | |||
47 | return args[:command_index], args[command_index:] | ||
48 | |||
49 | |||
50 | def fixenv(env): | ||
51 | path = env.get('PATH') | ||
52 | if path is not None: | ||
53 | parts = path.split(os.pathsep) | ||
54 | # Remove Windows utilities from PATH, they are useless at best and | ||
55 | # some of them (find) may be confused with other utilities. | ||
56 | parts = [p for p in parts if 'system32' not in p.lower()] | ||
57 | env['PATH'] = os.pathsep.join(parts) | ||
58 | if env.get('HOME') is None: | ||
59 | # Several utilities, including cvsps, cannot work without | ||
60 | # a defined HOME directory. | ||
61 | env['HOME'] = os.path.expanduser('~') | ||
62 | return env | ||
63 | |||
64 | def _sh(cwd, shargs, cmdargs, options, debugflags=None, env=None): | ||
65 | if os.environ.get('PYSH_TEXT') != '1': | ||
66 | import msvcrt | ||
67 | for fp in (sys.stdin, sys.stdout, sys.stderr): | ||
68 | msvcrt.setmode(fp.fileno(), os.O_BINARY) | ||
69 | |||
70 | hgbin = os.environ.get('PYSH_HGTEXT') != '1' | ||
71 | |||
72 | if debugflags is None: | ||
73 | debugflags = [] | ||
74 | if options.debug_parsing: debugflags.append('debug-parsing') | ||
75 | if options.debug_utility: debugflags.append('debug-utility') | ||
76 | if options.debug_cmd: debugflags.append('debug-cmd') | ||
77 | if options.debug_tree: debugflags.append('debug-tree') | ||
78 | |||
79 | if env is None: | ||
80 | env = fixenv(dict(os.environ)) | ||
81 | if cwd is None: | ||
82 | cwd = os.getcwd() | ||
83 | |||
84 | if not cmdargs: | ||
85 | # Nothing to do | ||
86 | return 0 | ||
87 | |||
88 | ast = None | ||
89 | command_file = None | ||
90 | if options.command_string: | ||
91 | input = cmdargs[0] | ||
92 | if not options.ast: | ||
93 | input += '\n' | ||
94 | else: | ||
95 | args, input = interp.decodeargs(input), None | ||
96 | env, ast = args | ||
97 | cwd = env.get('PWD', cwd) | ||
98 | else: | ||
99 | command_file = cmdargs[0] | ||
100 | arguments = cmdargs[1:] | ||
101 | |||
102 | prefix = interp.resolve_shebang(command_file, ignoreshell=True) | ||
103 | if prefix: | ||
104 | input = ' '.join(prefix + [command_file] + arguments) | ||
105 | else: | ||
106 | # Read commands from file | ||
107 | f = file(command_file) | ||
108 | try: | ||
109 | # Trailing newline to help the parser | ||
110 | input = f.read() + '\n' | ||
111 | finally: | ||
112 | f.close() | ||
113 | |||
114 | redirect = None | ||
115 | try: | ||
116 | if options.redirected: | ||
117 | stdout = sys.stdout | ||
118 | stderr = stdout | ||
119 | elif options.redirect_to: | ||
120 | redirect = open(options.redirect_to, 'wb') | ||
121 | stdout = redirect | ||
122 | stderr = redirect | ||
123 | else: | ||
124 | stdout = sys.stdout | ||
125 | stderr = sys.stderr | ||
126 | |||
127 | # TODO: set arguments to environment variables | ||
128 | opts = interp.Options() | ||
129 | opts.hgbinary = hgbin | ||
130 | ip = interp.Interpreter(cwd, debugflags, stdout=stdout, stderr=stderr, | ||
131 | opts=opts) | ||
132 | try: | ||
133 | # Export given environment in shell object | ||
134 | for k,v in env.iteritems(): | ||
135 | ip.get_env().export(k,v) | ||
136 | return ip.execute_script(input, ast, scriptpath=command_file) | ||
137 | finally: | ||
138 | ip.close() | ||
139 | finally: | ||
140 | if redirect is not None: | ||
141 | redirect.close() | ||
142 | |||
143 | def sh(cwd=None, args=None, debugflags=None, env=None): | ||
144 | if args is None: | ||
145 | args = sys.argv[1:] | ||
146 | shargs, cmdargs = split_args(args) | ||
147 | options, shargs = SH_OPT.parse_args(shargs) | ||
148 | |||
149 | if options.profile: | ||
150 | import lsprof | ||
151 | p = lsprof.Profiler() | ||
152 | p.enable(subcalls=True) | ||
153 | try: | ||
154 | return _sh(cwd, shargs, cmdargs, options, debugflags, env) | ||
155 | finally: | ||
156 | p.disable() | ||
157 | stats = lsprof.Stats(p.getstats()) | ||
158 | stats.sort() | ||
159 | stats.pprint(top=10, file=sys.stderr, climit=5) | ||
160 | else: | ||
161 | return _sh(cwd, shargs, cmdargs, options, debugflags, env) | ||
162 | |||
163 | def main(): | ||
164 | sys.exit(sh()) | ||
165 | |||
166 | if __name__=='__main__': | ||
167 | main() | ||
diff --git a/bitbake/lib/bb/pysh/pyshlex.py b/bitbake/lib/bb/pysh/pyshlex.py new file mode 100644 index 0000000000..b977b5e869 --- /dev/null +++ b/bitbake/lib/bb/pysh/pyshlex.py | |||
@@ -0,0 +1,888 @@ | |||
1 | # pyshlex.py - PLY compatible lexer for pysh. | ||
2 | # | ||
3 | # Copyright 2007 Patrick Mezard | ||
4 | # | ||
5 | # This software may be used and distributed according to the terms | ||
6 | # of the GNU General Public License, incorporated herein by reference. | ||
7 | |||
8 | # TODO: | ||
9 | # - review all "char in 'abc'" snippets: the empty string can be matched | ||
10 | # - test line continuations within quoted/expansion strings | ||
11 | # - eof is buggy wrt sublexers | ||
12 | # - the lexer cannot really work in pull mode as it would be required to run | ||
13 | # PLY in pull mode. It was designed to work incrementally and it would not be | ||
14 | # that hard to enable pull mode. | ||
15 | import re | ||
16 | try: | ||
17 | s = set() | ||
18 | del s | ||
19 | except NameError: | ||
20 | from Set import Set as set | ||
21 | |||
22 | from ply import lex | ||
23 | from sherrors import * | ||
24 | |||
25 | class NeedMore(Exception): | ||
26 | pass | ||
27 | |||
28 | def is_blank(c): | ||
29 | return c in (' ', '\t') | ||
30 | |||
31 | _RE_DIGITS = re.compile(r'^\d+$') | ||
32 | |||
33 | def are_digits(s): | ||
34 | return _RE_DIGITS.search(s) is not None | ||
35 | |||
36 | _OPERATORS = dict([ | ||
37 | ('&&', 'AND_IF'), | ||
38 | ('||', 'OR_IF'), | ||
39 | (';;', 'DSEMI'), | ||
40 | ('<<', 'DLESS'), | ||
41 | ('>>', 'DGREAT'), | ||
42 | ('<&', 'LESSAND'), | ||
43 | ('>&', 'GREATAND'), | ||
44 | ('<>', 'LESSGREAT'), | ||
45 | ('<<-', 'DLESSDASH'), | ||
46 | ('>|', 'CLOBBER'), | ||
47 | ('&', 'AMP'), | ||
48 | (';', 'COMMA'), | ||
49 | ('<', 'LESS'), | ||
50 | ('>', 'GREATER'), | ||
51 | ('(', 'LPARENS'), | ||
52 | (')', 'RPARENS'), | ||
53 | ]) | ||
54 | |||
55 | #Make a function to silence pychecker "Local variable shadows global" | ||
56 | def make_partial_ops(): | ||
57 | partials = {} | ||
58 | for k in _OPERATORS: | ||
59 | for i in range(1, len(k)+1): | ||
60 | partials[k[:i]] = None | ||
61 | return partials | ||
62 | |||
63 | _PARTIAL_OPERATORS = make_partial_ops() | ||
64 | |||
65 | def is_partial_op(s): | ||
66 | """Return True if s matches a non-empty subpart of an operator starting | ||
67 | at its first character. | ||
68 | """ | ||
69 | return s in _PARTIAL_OPERATORS | ||
70 | |||
71 | def is_op(s): | ||
72 | """If s matches an operator, returns the operator identifier. Return None | ||
73 | otherwise. | ||
74 | """ | ||
75 | return _OPERATORS.get(s) | ||
76 | |||
77 | _RESERVEDS = dict([ | ||
78 | ('if', 'If'), | ||
79 | ('then', 'Then'), | ||
80 | ('else', 'Else'), | ||
81 | ('elif', 'Elif'), | ||
82 | ('fi', 'Fi'), | ||
83 | ('do', 'Do'), | ||
84 | ('done', 'Done'), | ||
85 | ('case', 'Case'), | ||
86 | ('esac', 'Esac'), | ||
87 | ('while', 'While'), | ||
88 | ('until', 'Until'), | ||
89 | ('for', 'For'), | ||
90 | ('{', 'Lbrace'), | ||
91 | ('}', 'Rbrace'), | ||
92 | ('!', 'Bang'), | ||
93 | ('in', 'In'), | ||
94 | ('|', 'PIPE'), | ||
95 | ]) | ||
96 | |||
97 | def get_reserved(s): | ||
98 | return _RESERVEDS.get(s) | ||
99 | |||
100 | _RE_NAME = re.compile(r'^[0-9a-zA-Z_]+$') | ||
101 | |||
102 | def is_name(s): | ||
103 | return _RE_NAME.search(s) is not None | ||
104 | |||
105 | def find_chars(seq, chars): | ||
106 | for i,v in enumerate(seq): | ||
107 | if v in chars: | ||
108 | return i,v | ||
109 | return -1, None | ||
110 | |||
111 | class WordLexer: | ||
112 | """WordLexer parse quoted or expansion expressions and return an expression | ||
113 | tree. The input string can be any well formed sequence beginning with quoting | ||
114 | or expansion character. Embedded expressions are handled recursively. The | ||
115 | resulting tree is made of lists and strings. Lists represent quoted or | ||
116 | expansion expressions. Each list first element is the opening separator, | ||
117 | the last one the closing separator. In-between can be any number of strings | ||
118 | or lists for sub-expressions. Non quoted/expansion expression can written as | ||
119 | strings or as lists with empty strings as starting and ending delimiters. | ||
120 | """ | ||
121 | |||
122 | NAME_CHARSET = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_' | ||
123 | NAME_CHARSET = dict(zip(NAME_CHARSET, NAME_CHARSET)) | ||
124 | |||
125 | SPECIAL_CHARSET = '@*#?-$!0' | ||
126 | |||
127 | #Characters which can be escaped depends on the current delimiters | ||
128 | ESCAPABLE = { | ||
129 | '`': set(['$', '\\', '`']), | ||
130 | '"': set(['$', '\\', '`', '"']), | ||
131 | "'": set(), | ||
132 | } | ||
133 | |||
134 | def __init__(self, heredoc = False): | ||
135 | # _buffer is the unprocessed input characters buffer | ||
136 | self._buffer = [] | ||
137 | # _stack is empty or contains a quoted list being processed | ||
138 | # (this is the DFS path to the quoted expression being evaluated). | ||
139 | self._stack = [] | ||
140 | self._escapable = None | ||
141 | # True when parsing unquoted here documents | ||
142 | self._heredoc = heredoc | ||
143 | |||
144 | def add(self, data, eof=False): | ||
145 | """Feed the lexer with more data. If the quoted expression can be | ||
146 | delimited, return a tuple (expr, remaining) containing the expression | ||
147 | tree and the unconsumed data. | ||
148 | Otherwise, raise NeedMore. | ||
149 | """ | ||
150 | self._buffer += list(data) | ||
151 | self._parse(eof) | ||
152 | |||
153 | result = self._stack[0] | ||
154 | remaining = ''.join(self._buffer) | ||
155 | self._stack = [] | ||
156 | self._buffer = [] | ||
157 | return result, remaining | ||
158 | |||
159 | def _is_escapable(self, c, delim=None): | ||
160 | if delim is None: | ||
161 | if self._heredoc: | ||
162 | # Backslashes works as if they were double quoted in unquoted | ||
163 | # here-documents | ||
164 | delim = '"' | ||
165 | else: | ||
166 | if len(self._stack)<=1: | ||
167 | return True | ||
168 | delim = self._stack[-2][0] | ||
169 | |||
170 | escapables = self.ESCAPABLE.get(delim, None) | ||
171 | return escapables is None or c in escapables | ||
172 | |||
173 | def _parse_squote(self, buf, result, eof): | ||
174 | if not buf: | ||
175 | raise NeedMore() | ||
176 | try: | ||
177 | pos = buf.index("'") | ||
178 | except ValueError: | ||
179 | raise NeedMore() | ||
180 | result[-1] += ''.join(buf[:pos]) | ||
181 | result += ["'"] | ||
182 | return pos+1, True | ||
183 | |||
184 | def _parse_bquote(self, buf, result, eof): | ||
185 | if not buf: | ||
186 | raise NeedMore() | ||
187 | |||
188 | if buf[0]=='\n': | ||
189 | #Remove line continuations | ||
190 | result[:] = ['', '', ''] | ||
191 | elif self._is_escapable(buf[0]): | ||
192 | result[-1] += buf[0] | ||
193 | result += [''] | ||
194 | else: | ||
195 | #Keep as such | ||
196 | result[:] = ['', '\\'+buf[0], ''] | ||
197 | |||
198 | return 1, True | ||
199 | |||
200 | def _parse_dquote(self, buf, result, eof): | ||
201 | if not buf: | ||
202 | raise NeedMore() | ||
203 | pos, sep = find_chars(buf, '$\\`"') | ||
204 | if pos==-1: | ||
205 | raise NeedMore() | ||
206 | |||
207 | result[-1] += ''.join(buf[:pos]) | ||
208 | if sep=='"': | ||
209 | result += ['"'] | ||
210 | return pos+1, True | ||
211 | else: | ||
212 | #Keep everything until the separator and defer processing | ||
213 | return pos, False | ||
214 | |||
215 | def _parse_command(self, buf, result, eof): | ||
216 | if not buf: | ||
217 | raise NeedMore() | ||
218 | |||
219 | chars = '$\\`"\'' | ||
220 | if result[0] == '$(': | ||
221 | chars += ')' | ||
222 | pos, sep = find_chars(buf, chars) | ||
223 | if pos == -1: | ||
224 | raise NeedMore() | ||
225 | |||
226 | result[-1] += ''.join(buf[:pos]) | ||
227 | if (result[0]=='$(' and sep==')') or (result[0]=='`' and sep=='`'): | ||
228 | result += [sep] | ||
229 | return pos+1, True | ||
230 | else: | ||
231 | return pos, False | ||
232 | |||
233 | def _parse_parameter(self, buf, result, eof): | ||
234 | if not buf: | ||
235 | raise NeedMore() | ||
236 | |||
237 | pos, sep = find_chars(buf, '$\\`"\'}') | ||
238 | if pos==-1: | ||
239 | raise NeedMore() | ||
240 | |||
241 | result[-1] += ''.join(buf[:pos]) | ||
242 | if sep=='}': | ||
243 | result += [sep] | ||
244 | return pos+1, True | ||
245 | else: | ||
246 | return pos, False | ||
247 | |||
248 | def _parse_dollar(self, buf, result, eof): | ||
249 | sep = result[0] | ||
250 | if sep=='$': | ||
251 | if not buf: | ||
252 | #TODO: handle empty $ | ||
253 | raise NeedMore() | ||
254 | if buf[0]=='(': | ||
255 | if len(buf)==1: | ||
256 | raise NeedMore() | ||
257 | |||
258 | if buf[1]=='(': | ||
259 | result[0] = '$((' | ||
260 | buf[:2] = [] | ||
261 | else: | ||
262 | result[0] = '$(' | ||
263 | buf[:1] = [] | ||
264 | |||
265 | elif buf[0]=='{': | ||
266 | result[0] = '${' | ||
267 | buf[:1] = [] | ||
268 | else: | ||
269 | if buf[0] in self.SPECIAL_CHARSET: | ||
270 | result[-1] = buf[0] | ||
271 | read = 1 | ||
272 | else: | ||
273 | for read,c in enumerate(buf): | ||
274 | if c not in self.NAME_CHARSET: | ||
275 | break | ||
276 | else: | ||
277 | if not eof: | ||
278 | raise NeedMore() | ||
279 | read += 1 | ||
280 | |||
281 | result[-1] += ''.join(buf[0:read]) | ||
282 | |||
283 | if not result[-1]: | ||
284 | result[:] = ['', result[0], ''] | ||
285 | else: | ||
286 | result += [''] | ||
287 | return read,True | ||
288 | |||
289 | sep = result[0] | ||
290 | if sep=='$(': | ||
291 | parsefunc = self._parse_command | ||
292 | elif sep=='${': | ||
293 | parsefunc = self._parse_parameter | ||
294 | else: | ||
295 | raise NotImplementedError() | ||
296 | |||
297 | pos, closed = parsefunc(buf, result, eof) | ||
298 | return pos, closed | ||
299 | |||
300 | def _parse(self, eof): | ||
301 | buf = self._buffer | ||
302 | stack = self._stack | ||
303 | recurse = False | ||
304 | |||
305 | while 1: | ||
306 | if not stack or recurse: | ||
307 | if not buf: | ||
308 | raise NeedMore() | ||
309 | if buf[0] not in ('"\\`$\''): | ||
310 | raise ShellSyntaxError('Invalid quoted string sequence') | ||
311 | stack.append([buf[0], '']) | ||
312 | buf[:1] = [] | ||
313 | recurse = False | ||
314 | |||
315 | result = stack[-1] | ||
316 | if result[0]=="'": | ||
317 | parsefunc = self._parse_squote | ||
318 | elif result[0]=='\\': | ||
319 | parsefunc = self._parse_bquote | ||
320 | elif result[0]=='"': | ||
321 | parsefunc = self._parse_dquote | ||
322 | elif result[0]=='`': | ||
323 | parsefunc = self._parse_command | ||
324 | elif result[0][0]=='$': | ||
325 | parsefunc = self._parse_dollar | ||
326 | else: | ||
327 | raise NotImplementedError() | ||
328 | |||
329 | read, closed = parsefunc(buf, result, eof) | ||
330 | |||
331 | buf[:read] = [] | ||
332 | if closed: | ||
333 | if len(stack)>1: | ||
334 | #Merge in parent expression | ||
335 | parsed = stack.pop() | ||
336 | stack[-1] += [parsed] | ||
337 | stack[-1] += [''] | ||
338 | else: | ||
339 | break | ||
340 | else: | ||
341 | recurse = True | ||
342 | |||
343 | def normalize_wordtree(wtree): | ||
344 | """Fold back every literal sequence (delimited with empty strings) into | ||
345 | parent sequence. | ||
346 | """ | ||
347 | def normalize(wtree): | ||
348 | result = [] | ||
349 | for part in wtree[1:-1]: | ||
350 | if isinstance(part, list): | ||
351 | part = normalize(part) | ||
352 | if part[0]=='': | ||
353 | #Move the part content back at current level | ||
354 | result += part[1:-1] | ||
355 | continue | ||
356 | elif not part: | ||
357 | #Remove empty strings | ||
358 | continue | ||
359 | result.append(part) | ||
360 | if not result: | ||
361 | result = [''] | ||
362 | return [wtree[0]] + result + [wtree[-1]] | ||
363 | |||
364 | return normalize(wtree) | ||
365 | |||
366 | |||
367 | def make_wordtree(token, here_document=False): | ||
368 | """Parse a delimited token and return a tree similar to the ones returned by | ||
369 | WordLexer. token may contain any combinations of expansion/quoted fields and | ||
370 | non-ones. | ||
371 | """ | ||
372 | tree = [''] | ||
373 | remaining = token | ||
374 | delimiters = '\\$`' | ||
375 | if not here_document: | ||
376 | delimiters += '\'"' | ||
377 | |||
378 | while 1: | ||
379 | pos, sep = find_chars(remaining, delimiters) | ||
380 | if pos==-1: | ||
381 | tree += [remaining, ''] | ||
382 | return normalize_wordtree(tree) | ||
383 | tree.append(remaining[:pos]) | ||
384 | remaining = remaining[pos:] | ||
385 | |||
386 | try: | ||
387 | result, remaining = WordLexer(heredoc = here_document).add(remaining, True) | ||
388 | except NeedMore: | ||
389 | raise ShellSyntaxError('Invalid token "%s"') | ||
390 | tree.append(result) | ||
391 | |||
392 | |||
393 | def wordtree_as_string(wtree): | ||
394 | """Rewrite an expression tree generated by make_wordtree as string.""" | ||
395 | def visit(node, output): | ||
396 | for child in node: | ||
397 | if isinstance(child, list): | ||
398 | visit(child, output) | ||
399 | else: | ||
400 | output.append(child) | ||
401 | |||
402 | output = [] | ||
403 | visit(wtree, output) | ||
404 | return ''.join(output) | ||
405 | |||
406 | |||
407 | def unquote_wordtree(wtree): | ||
408 | """Fold the word tree while removing quotes everywhere. Other expansion | ||
409 | sequences are joined as such. | ||
410 | """ | ||
411 | def unquote(wtree): | ||
412 | unquoted = [] | ||
413 | if wtree[0] in ('', "'", '"', '\\'): | ||
414 | wtree = wtree[1:-1] | ||
415 | |||
416 | for part in wtree: | ||
417 | if isinstance(part, list): | ||
418 | part = unquote(part) | ||
419 | unquoted.append(part) | ||
420 | return ''.join(unquoted) | ||
421 | |||
422 | return unquote(wtree) | ||
423 | |||
424 | |||
425 | class HereDocLexer: | ||
426 | """HereDocLexer delimits whatever comes from the here-document starting newline | ||
427 | not included to the closing delimiter line included. | ||
428 | """ | ||
429 | def __init__(self, op, delim): | ||
430 | assert op in ('<<', '<<-') | ||
431 | if not delim: | ||
432 | raise ShellSyntaxError('invalid here document delimiter %s' % str(delim)) | ||
433 | |||
434 | self._op = op | ||
435 | self._delim = delim | ||
436 | self._buffer = [] | ||
437 | self._token = [] | ||
438 | |||
439 | def add(self, data, eof): | ||
440 | """If the here-document was delimited, return a tuple (content, remaining). | ||
441 | Raise NeedMore() otherwise. | ||
442 | """ | ||
443 | self._buffer += list(data) | ||
444 | self._parse(eof) | ||
445 | token = ''.join(self._token) | ||
446 | remaining = ''.join(self._buffer) | ||
447 | self._token, self._remaining = [], [] | ||
448 | return token, remaining | ||
449 | |||
450 | def _parse(self, eof): | ||
451 | while 1: | ||
452 | #Look for first unescaped newline. Quotes may be ignored | ||
453 | escaped = False | ||
454 | for i,c in enumerate(self._buffer): | ||
455 | if escaped: | ||
456 | escaped = False | ||
457 | elif c=='\\': | ||
458 | escaped = True | ||
459 | elif c=='\n': | ||
460 | break | ||
461 | else: | ||
462 | i = -1 | ||
463 | |||
464 | if i==-1 or self._buffer[i]!='\n': | ||
465 | if not eof: | ||
466 | raise NeedMore() | ||
467 | #No more data, maybe the last line is closing delimiter | ||
468 | line = ''.join(self._buffer) | ||
469 | eol = '' | ||
470 | self._buffer[:] = [] | ||
471 | else: | ||
472 | line = ''.join(self._buffer[:i]) | ||
473 | eol = self._buffer[i] | ||
474 | self._buffer[:i+1] = [] | ||
475 | |||
476 | if self._op=='<<-': | ||
477 | line = line.lstrip('\t') | ||
478 | |||
479 | if line==self._delim: | ||
480 | break | ||
481 | |||
482 | self._token += [line, eol] | ||
483 | if i==-1: | ||
484 | break | ||
485 | |||
486 | class Token: | ||
487 | #TODO: check this is still in use | ||
488 | OPERATOR = 'OPERATOR' | ||
489 | WORD = 'WORD' | ||
490 | |||
491 | def __init__(self): | ||
492 | self.value = '' | ||
493 | self.type = None | ||
494 | |||
495 | def __getitem__(self, key): | ||
496 | #Behave like a two elements tuple | ||
497 | if key==0: | ||
498 | return self.type | ||
499 | if key==1: | ||
500 | return self.value | ||
501 | raise IndexError(key) | ||
502 | |||
503 | |||
504 | class HereDoc: | ||
505 | def __init__(self, op, name=None): | ||
506 | self.op = op | ||
507 | self.name = name | ||
508 | self.pendings = [] | ||
509 | |||
510 | TK_COMMA = 'COMMA' | ||
511 | TK_AMPERSAND = 'AMP' | ||
512 | TK_OP = 'OP' | ||
513 | TK_TOKEN = 'TOKEN' | ||
514 | TK_COMMENT = 'COMMENT' | ||
515 | TK_NEWLINE = 'NEWLINE' | ||
516 | TK_IONUMBER = 'IO_NUMBER' | ||
517 | TK_ASSIGNMENT = 'ASSIGNMENT_WORD' | ||
518 | TK_HERENAME = 'HERENAME' | ||
519 | |||
520 | class Lexer: | ||
521 | """Main lexer. | ||
522 | |||
523 | Call add() until the script AST is returned. | ||
524 | """ | ||
525 | # Here-document handling makes the whole thing more complex because they basically | ||
526 | # force tokens to be reordered: here-content must come right after the operator | ||
527 | # and the here-document name, while some other tokens might be following the | ||
528 | # here-document expression on the same line. | ||
529 | # | ||
530 | # So, here-doc states are basically: | ||
531 | # *self._state==ST_NORMAL | ||
532 | # - self._heredoc.op is None: no here-document | ||
533 | # - self._heredoc.op is not None but name is: here-document operator matched, | ||
534 | # waiting for the document name/delimiter | ||
535 | # - self._heredoc.op and name are not None: here-document is ready, following | ||
536 | # tokens are being stored and will be pushed again when the document is | ||
537 | # completely parsed. | ||
538 | # *self._state==ST_HEREDOC | ||
539 | # - The here-document is being delimited by self._herelexer. Once it is done | ||
540 | # the content is pushed in front of the pending token list then all these | ||
541 | # tokens are pushed once again. | ||
542 | ST_NORMAL = 'ST_NORMAL' | ||
543 | ST_OP = 'ST_OP' | ||
544 | ST_BACKSLASH = 'ST_BACKSLASH' | ||
545 | ST_QUOTED = 'ST_QUOTED' | ||
546 | ST_COMMENT = 'ST_COMMENT' | ||
547 | ST_HEREDOC = 'ST_HEREDOC' | ||
548 | |||
549 | #Match end of backquote strings | ||
550 | RE_BACKQUOTE_END = re.compile(r'(?<!\\)(`)') | ||
551 | |||
552 | def __init__(self, parent_state = None): | ||
553 | self._input = [] | ||
554 | self._pos = 0 | ||
555 | |||
556 | self._token = '' | ||
557 | self._type = TK_TOKEN | ||
558 | |||
559 | self._state = self.ST_NORMAL | ||
560 | self._parent_state = parent_state | ||
561 | self._wordlexer = None | ||
562 | |||
563 | self._heredoc = HereDoc(None) | ||
564 | self._herelexer = None | ||
565 | |||
566 | ### Following attributes are not used for delimiting token and can safely | ||
567 | ### be changed after here-document detection (see _push_toke) | ||
568 | |||
569 | # Count the number of tokens following a 'For' reserved word. Needed to | ||
570 | # return an 'In' reserved word if it comes in third place. | ||
571 | self._for_count = None | ||
572 | |||
573 | def add(self, data, eof=False): | ||
574 | """Feed the lexer with data. | ||
575 | |||
576 | When eof is set to True, returns unconsumed data or raise if the lexer | ||
577 | is in the middle of a delimiting operation. | ||
578 | Raise NeedMore otherwise. | ||
579 | """ | ||
580 | self._input += list(data) | ||
581 | self._parse(eof) | ||
582 | self._input[:self._pos] = [] | ||
583 | return ''.join(self._input) | ||
584 | |||
585 | def _parse(self, eof): | ||
586 | while self._state: | ||
587 | if self._pos>=len(self._input): | ||
588 | if not eof: | ||
589 | raise NeedMore() | ||
590 | elif self._state not in (self.ST_OP, self.ST_QUOTED, self.ST_HEREDOC): | ||
591 | #Delimit the current token and leave cleanly | ||
592 | self._push_token('') | ||
593 | break | ||
594 | else: | ||
595 | #Let the sublexer handle the eof themselves | ||
596 | pass | ||
597 | |||
598 | if self._state==self.ST_NORMAL: | ||
599 | self._parse_normal() | ||
600 | elif self._state==self.ST_COMMENT: | ||
601 | self._parse_comment() | ||
602 | elif self._state==self.ST_OP: | ||
603 | self._parse_op(eof) | ||
604 | elif self._state==self.ST_QUOTED: | ||
605 | self._parse_quoted(eof) | ||
606 | elif self._state==self.ST_HEREDOC: | ||
607 | self._parse_heredoc(eof) | ||
608 | else: | ||
609 | assert False, "Unknown state " + str(self._state) | ||
610 | |||
611 | if self._heredoc.op is not None: | ||
612 | raise ShellSyntaxError('missing here-document delimiter') | ||
613 | |||
614 | def _parse_normal(self): | ||
615 | c = self._input[self._pos] | ||
616 | if c=='\n': | ||
617 | self._push_token(c) | ||
618 | self._token = c | ||
619 | self._type = TK_NEWLINE | ||
620 | self._push_token('') | ||
621 | self._pos += 1 | ||
622 | elif c in ('\\', '\'', '"', '`', '$'): | ||
623 | self._state = self.ST_QUOTED | ||
624 | elif is_partial_op(c): | ||
625 | self._push_token(c) | ||
626 | |||
627 | self._type = TK_OP | ||
628 | self._token += c | ||
629 | self._pos += 1 | ||
630 | self._state = self.ST_OP | ||
631 | elif is_blank(c): | ||
632 | self._push_token(c) | ||
633 | |||
634 | #Discard blanks | ||
635 | self._pos += 1 | ||
636 | elif self._token: | ||
637 | self._token += c | ||
638 | self._pos += 1 | ||
639 | elif c=='#': | ||
640 | self._state = self.ST_COMMENT | ||
641 | self._type = TK_COMMENT | ||
642 | self._pos += 1 | ||
643 | else: | ||
644 | self._pos += 1 | ||
645 | self._token += c | ||
646 | |||
647 | def _parse_op(self, eof): | ||
648 | assert self._token | ||
649 | |||
650 | while 1: | ||
651 | if self._pos>=len(self._input): | ||
652 | if not eof: | ||
653 | raise NeedMore() | ||
654 | c = '' | ||
655 | else: | ||
656 | c = self._input[self._pos] | ||
657 | |||
658 | op = self._token + c | ||
659 | if c and is_partial_op(op): | ||
660 | #Still parsing an operator | ||
661 | self._token = op | ||
662 | self._pos += 1 | ||
663 | else: | ||
664 | #End of operator | ||
665 | self._push_token(c) | ||
666 | self._state = self.ST_NORMAL | ||
667 | break | ||
668 | |||
669 | def _parse_comment(self): | ||
670 | while 1: | ||
671 | if self._pos>=len(self._input): | ||
672 | raise NeedMore() | ||
673 | |||
674 | c = self._input[self._pos] | ||
675 | if c=='\n': | ||
676 | #End of comment, do not consume the end of line | ||
677 | self._state = self.ST_NORMAL | ||
678 | break | ||
679 | else: | ||
680 | self._token += c | ||
681 | self._pos += 1 | ||
682 | |||
683 | def _parse_quoted(self, eof): | ||
684 | """Precondition: the starting backquote/dollar is still in the input queue.""" | ||
685 | if not self._wordlexer: | ||
686 | self._wordlexer = WordLexer() | ||
687 | |||
688 | if self._pos<len(self._input): | ||
689 | #Transfer input queue character into the subparser | ||
690 | input = self._input[self._pos:] | ||
691 | self._pos += len(input) | ||
692 | |||
693 | wtree, remaining = self._wordlexer.add(input, eof) | ||
694 | self._wordlexer = None | ||
695 | self._token += wordtree_as_string(wtree) | ||
696 | |||
697 | #Put unparsed character back in the input queue | ||
698 | if remaining: | ||
699 | self._input[self._pos:self._pos] = list(remaining) | ||
700 | self._state = self.ST_NORMAL | ||
701 | |||
702 | def _parse_heredoc(self, eof): | ||
703 | assert not self._token | ||
704 | |||
705 | if self._herelexer is None: | ||
706 | self._herelexer = HereDocLexer(self._heredoc.op, self._heredoc.name) | ||
707 | |||
708 | if self._pos<len(self._input): | ||
709 | #Transfer input queue character into the subparser | ||
710 | input = self._input[self._pos:] | ||
711 | self._pos += len(input) | ||
712 | |||
713 | self._token, remaining = self._herelexer.add(input, eof) | ||
714 | |||
715 | #Reset here-document state | ||
716 | self._herelexer = None | ||
717 | heredoc, self._heredoc = self._heredoc, HereDoc(None) | ||
718 | if remaining: | ||
719 | self._input[self._pos:self._pos] = list(remaining) | ||
720 | self._state = self.ST_NORMAL | ||
721 | |||
722 | #Push pending tokens | ||
723 | heredoc.pendings[:0] = [(self._token, self._type, heredoc.name)] | ||
724 | for token, type, delim in heredoc.pendings: | ||
725 | self._token = token | ||
726 | self._type = type | ||
727 | self._push_token(delim) | ||
728 | |||
729 | def _push_token(self, delim): | ||
730 | if not self._token: | ||
731 | return 0 | ||
732 | |||
733 | if self._heredoc.op is not None: | ||
734 | if self._heredoc.name is None: | ||
735 | #Here-document name | ||
736 | if self._type!=TK_TOKEN: | ||
737 | raise ShellSyntaxError("expecting here-document name, got '%s'" % self._token) | ||
738 | self._heredoc.name = unquote_wordtree(make_wordtree(self._token)) | ||
739 | self._type = TK_HERENAME | ||
740 | else: | ||
741 | #Capture all tokens until the newline starting the here-document | ||
742 | if self._type==TK_NEWLINE: | ||
743 | assert self._state==self.ST_NORMAL | ||
744 | self._state = self.ST_HEREDOC | ||
745 | |||
746 | self._heredoc.pendings.append((self._token, self._type, delim)) | ||
747 | self._token = '' | ||
748 | self._type = TK_TOKEN | ||
749 | return 1 | ||
750 | |||
751 | # BEWARE: do not change parser state from here to the end of the function: | ||
752 | # when parsing between an here-document operator to the end of the line | ||
753 | # tokens are stored in self._heredoc.pendings. Therefore, they will not | ||
754 | # reach the section below. | ||
755 | |||
756 | #Check operators | ||
757 | if self._type==TK_OP: | ||
758 | #False positive because of partial op matching | ||
759 | op = is_op(self._token) | ||
760 | if not op: | ||
761 | self._type = TK_TOKEN | ||
762 | else: | ||
763 | #Map to the specific operator | ||
764 | self._type = op | ||
765 | if self._token in ('<<', '<<-'): | ||
766 | #Done here rather than in _parse_op because there is no need | ||
767 | #to change the parser state since we are still waiting for | ||
768 | #the here-document name | ||
769 | if self._heredoc.op is not None: | ||
770 | raise ShellSyntaxError("syntax error near token '%s'" % self._token) | ||
771 | assert self._heredoc.op is None | ||
772 | self._heredoc.op = self._token | ||
773 | |||
774 | if self._type==TK_TOKEN: | ||
775 | if '=' in self._token and not delim: | ||
776 | if self._token.startswith('='): | ||
777 | #Token is a WORD... a TOKEN that is. | ||
778 | pass | ||
779 | else: | ||
780 | prev = self._token[:self._token.find('=')] | ||
781 | if is_name(prev): | ||
782 | self._type = TK_ASSIGNMENT | ||
783 | else: | ||
784 | #Just a token (unspecified) | ||
785 | pass | ||
786 | else: | ||
787 | reserved = get_reserved(self._token) | ||
788 | if reserved is not None: | ||
789 | if reserved=='In' and self._for_count!=2: | ||
790 | #Sorry, not a reserved word after all | ||
791 | pass | ||
792 | else: | ||
793 | self._type = reserved | ||
794 | if reserved in ('For', 'Case'): | ||
795 | self._for_count = 0 | ||
796 | elif are_digits(self._token) and delim in ('<', '>'): | ||
797 | #Detect IO_NUMBER | ||
798 | self._type = TK_IONUMBER | ||
799 | elif self._token==';': | ||
800 | self._type = TK_COMMA | ||
801 | elif self._token=='&': | ||
802 | self._type = TK_AMPERSAND | ||
803 | elif self._type==TK_COMMENT: | ||
804 | #Comments are not part of sh grammar, ignore them | ||
805 | self._token = '' | ||
806 | self._type = TK_TOKEN | ||
807 | return 0 | ||
808 | |||
809 | if self._for_count is not None: | ||
810 | #Track token count in 'For' expression to detect 'In' reserved words. | ||
811 | #Can only be in third position, no need to go beyond | ||
812 | self._for_count += 1 | ||
813 | if self._for_count==3: | ||
814 | self._for_count = None | ||
815 | |||
816 | self.on_token((self._token, self._type)) | ||
817 | self._token = '' | ||
818 | self._type = TK_TOKEN | ||
819 | return 1 | ||
820 | |||
821 | def on_token(self, token): | ||
822 | raise NotImplementedError | ||
823 | |||
824 | |||
825 | tokens = [ | ||
826 | TK_TOKEN, | ||
827 | # To silence yacc unused token warnings | ||
828 | # TK_COMMENT, | ||
829 | TK_NEWLINE, | ||
830 | TK_IONUMBER, | ||
831 | TK_ASSIGNMENT, | ||
832 | TK_HERENAME, | ||
833 | ] | ||
834 | |||
835 | #Add specific operators | ||
836 | tokens += _OPERATORS.values() | ||
837 | #Add reserved words | ||
838 | tokens += _RESERVEDS.values() | ||
839 | |||
840 | class PLYLexer(Lexer): | ||
841 | """Bridge Lexer and PLY lexer interface.""" | ||
842 | def __init__(self): | ||
843 | Lexer.__init__(self) | ||
844 | self._tokens = [] | ||
845 | self._current = 0 | ||
846 | self.lineno = 0 | ||
847 | |||
848 | def on_token(self, token): | ||
849 | value, type = token | ||
850 | |||
851 | self.lineno = 0 | ||
852 | t = lex.LexToken() | ||
853 | t.value = value | ||
854 | t.type = type | ||
855 | t.lexer = self | ||
856 | t.lexpos = 0 | ||
857 | t.lineno = 0 | ||
858 | |||
859 | self._tokens.append(t) | ||
860 | |||
861 | def is_empty(self): | ||
862 | return not bool(self._tokens) | ||
863 | |||
864 | #PLY compliant interface | ||
865 | def token(self): | ||
866 | if self._current>=len(self._tokens): | ||
867 | return None | ||
868 | t = self._tokens[self._current] | ||
869 | self._current += 1 | ||
870 | return t | ||
871 | |||
872 | |||
873 | def get_tokens(s): | ||
874 | """Parse the input string and return a tuple (tokens, unprocessed) where | ||
875 | tokens is a list of parsed tokens and unprocessed is the part of the input | ||
876 | string left untouched by the lexer. | ||
877 | """ | ||
878 | lexer = PLYLexer() | ||
879 | untouched = lexer.add(s, True) | ||
880 | tokens = [] | ||
881 | while 1: | ||
882 | token = lexer.token() | ||
883 | if token is None: | ||
884 | break | ||
885 | tokens.append(token) | ||
886 | |||
887 | tokens = [(t.value, t.type) for t in tokens] | ||
888 | return tokens, untouched | ||
diff --git a/bitbake/lib/bb/pysh/pyshyacc.py b/bitbake/lib/bb/pysh/pyshyacc.py new file mode 100644 index 0000000000..3d9510c0c3 --- /dev/null +++ b/bitbake/lib/bb/pysh/pyshyacc.py | |||
@@ -0,0 +1,772 @@ | |||
1 | # pyshyacc.py - PLY grammar definition for pysh | ||
2 | # | ||
3 | # Copyright 2007 Patrick Mezard | ||
4 | # | ||
5 | # This software may be used and distributed according to the terms | ||
6 | # of the GNU General Public License, incorporated herein by reference. | ||
7 | |||
8 | """PLY grammar file. | ||
9 | """ | ||
10 | import sys | ||
11 | |||
12 | import pyshlex | ||
13 | tokens = pyshlex.tokens | ||
14 | |||
15 | from ply import yacc | ||
16 | import sherrors | ||
17 | |||
18 | class IORedirect: | ||
19 | def __init__(self, op, filename, io_number=None): | ||
20 | self.op = op | ||
21 | self.filename = filename | ||
22 | self.io_number = io_number | ||
23 | |||
24 | class HereDocument: | ||
25 | def __init__(self, op, name, content, io_number=None): | ||
26 | self.op = op | ||
27 | self.name = name | ||
28 | self.content = content | ||
29 | self.io_number = io_number | ||
30 | |||
31 | def make_io_redirect(p): | ||
32 | """Make an IORedirect instance from the input 'io_redirect' production.""" | ||
33 | name, io_number, io_target = p | ||
34 | assert name=='io_redirect' | ||
35 | |||
36 | if io_target[0]=='io_file': | ||
37 | io_type, io_op, io_file = io_target | ||
38 | return IORedirect(io_op, io_file, io_number) | ||
39 | elif io_target[0]=='io_here': | ||
40 | io_type, io_op, io_name, io_content = io_target | ||
41 | return HereDocument(io_op, io_name, io_content, io_number) | ||
42 | else: | ||
43 | assert False, "Invalid IO redirection token %s" % repr(io_type) | ||
44 | |||
45 | class SimpleCommand: | ||
46 | """ | ||
47 | assigns contains (name, value) pairs. | ||
48 | """ | ||
49 | def __init__(self, words, redirs, assigns): | ||
50 | self.words = list(words) | ||
51 | self.redirs = list(redirs) | ||
52 | self.assigns = list(assigns) | ||
53 | |||
54 | class Pipeline: | ||
55 | def __init__(self, commands, reverse_status=False): | ||
56 | self.commands = list(commands) | ||
57 | assert self.commands #Grammar forbids this | ||
58 | self.reverse_status = reverse_status | ||
59 | |||
60 | class AndOr: | ||
61 | def __init__(self, op, left, right): | ||
62 | self.op = str(op) | ||
63 | self.left = left | ||
64 | self.right = right | ||
65 | |||
66 | class ForLoop: | ||
67 | def __init__(self, name, items, cmds): | ||
68 | self.name = str(name) | ||
69 | self.items = list(items) | ||
70 | self.cmds = list(cmds) | ||
71 | |||
72 | class WhileLoop: | ||
73 | def __init__(self, condition, cmds): | ||
74 | self.condition = list(condition) | ||
75 | self.cmds = list(cmds) | ||
76 | |||
77 | class UntilLoop: | ||
78 | def __init__(self, condition, cmds): | ||
79 | self.condition = list(condition) | ||
80 | self.cmds = list(cmds) | ||
81 | |||
82 | class FunDef: | ||
83 | def __init__(self, name, body): | ||
84 | self.name = str(name) | ||
85 | self.body = body | ||
86 | |||
87 | class BraceGroup: | ||
88 | def __init__(self, cmds): | ||
89 | self.cmds = list(cmds) | ||
90 | |||
91 | class IfCond: | ||
92 | def __init__(self, cond, if_cmds, else_cmds): | ||
93 | self.cond = list(cond) | ||
94 | self.if_cmds = if_cmds | ||
95 | self.else_cmds = else_cmds | ||
96 | |||
97 | class Case: | ||
98 | def __init__(self, name, items): | ||
99 | self.name = name | ||
100 | self.items = items | ||
101 | |||
102 | class SubShell: | ||
103 | def __init__(self, cmds): | ||
104 | self.cmds = cmds | ||
105 | |||
106 | class RedirectList: | ||
107 | def __init__(self, cmd, redirs): | ||
108 | self.cmd = cmd | ||
109 | self.redirs = list(redirs) | ||
110 | |||
111 | def get_production(productions, ptype): | ||
112 | """productions must be a list of production tuples like (name, obj) where | ||
113 | name is the production string identifier. | ||
114 | Return the first production named 'ptype'. Raise KeyError if None can be | ||
115 | found. | ||
116 | """ | ||
117 | for production in productions: | ||
118 | if production is not None and production[0]==ptype: | ||
119 | return production | ||
120 | raise KeyError(ptype) | ||
121 | |||
122 | #------------------------------------------------------------------------------- | ||
123 | # PLY grammar definition | ||
124 | #------------------------------------------------------------------------------- | ||
125 | |||
126 | def p_multiple_commands(p): | ||
127 | """multiple_commands : newline_sequence | ||
128 | | complete_command | ||
129 | | multiple_commands complete_command""" | ||
130 | if len(p)==2: | ||
131 | if p[1] is not None: | ||
132 | p[0] = [p[1]] | ||
133 | else: | ||
134 | p[0] = [] | ||
135 | else: | ||
136 | p[0] = p[1] + [p[2]] | ||
137 | |||
138 | def p_complete_command(p): | ||
139 | """complete_command : list separator | ||
140 | | list""" | ||
141 | if len(p)==3 and p[2] and p[2][1] == '&': | ||
142 | p[0] = ('async', p[1]) | ||
143 | else: | ||
144 | p[0] = p[1] | ||
145 | |||
146 | def p_list(p): | ||
147 | """list : list separator_op and_or | ||
148 | | and_or""" | ||
149 | if len(p)==2: | ||
150 | p[0] = [p[1]] | ||
151 | else: | ||
152 | #if p[2]!=';': | ||
153 | # raise NotImplementedError('AND-OR list asynchronous execution is not implemented') | ||
154 | p[0] = p[1] + [p[3]] | ||
155 | |||
156 | def p_and_or(p): | ||
157 | """and_or : pipeline | ||
158 | | and_or AND_IF linebreak pipeline | ||
159 | | and_or OR_IF linebreak pipeline""" | ||
160 | if len(p)==2: | ||
161 | p[0] = p[1] | ||
162 | else: | ||
163 | p[0] = ('and_or', AndOr(p[2], p[1], p[4])) | ||
164 | |||
165 | def p_maybe_bang_word(p): | ||
166 | """maybe_bang_word : Bang""" | ||
167 | p[0] = ('maybe_bang_word', p[1]) | ||
168 | |||
169 | def p_pipeline(p): | ||
170 | """pipeline : pipe_sequence | ||
171 | | bang_word pipe_sequence""" | ||
172 | if len(p)==3: | ||
173 | p[0] = ('pipeline', Pipeline(p[2][1:], True)) | ||
174 | else: | ||
175 | p[0] = ('pipeline', Pipeline(p[1][1:])) | ||
176 | |||
177 | def p_pipe_sequence(p): | ||
178 | """pipe_sequence : command | ||
179 | | pipe_sequence PIPE linebreak command""" | ||
180 | if len(p)==2: | ||
181 | p[0] = ['pipe_sequence', p[1]] | ||
182 | else: | ||
183 | p[0] = p[1] + [p[4]] | ||
184 | |||
185 | def p_command(p): | ||
186 | """command : simple_command | ||
187 | | compound_command | ||
188 | | compound_command redirect_list | ||
189 | | function_definition""" | ||
190 | |||
191 | if p[1][0] in ( 'simple_command', | ||
192 | 'for_clause', | ||
193 | 'while_clause', | ||
194 | 'until_clause', | ||
195 | 'case_clause', | ||
196 | 'if_clause', | ||
197 | 'function_definition', | ||
198 | 'subshell', | ||
199 | 'brace_group',): | ||
200 | if len(p) == 2: | ||
201 | p[0] = p[1] | ||
202 | else: | ||
203 | p[0] = ('redirect_list', RedirectList(p[1], p[2][1:])) | ||
204 | else: | ||
205 | raise NotImplementedError('%s command is not implemented' % repr(p[1][0])) | ||
206 | |||
207 | def p_compound_command(p): | ||
208 | """compound_command : brace_group | ||
209 | | subshell | ||
210 | | for_clause | ||
211 | | case_clause | ||
212 | | if_clause | ||
213 | | while_clause | ||
214 | | until_clause""" | ||
215 | p[0] = p[1] | ||
216 | |||
217 | def p_subshell(p): | ||
218 | """subshell : LPARENS compound_list RPARENS""" | ||
219 | p[0] = ('subshell', SubShell(p[2][1:])) | ||
220 | |||
221 | def p_compound_list(p): | ||
222 | """compound_list : term | ||
223 | | newline_list term | ||
224 | | term separator | ||
225 | | newline_list term separator""" | ||
226 | productions = p[1:] | ||
227 | try: | ||
228 | sep = get_production(productions, 'separator') | ||
229 | if sep[1]!=';': | ||
230 | raise NotImplementedError() | ||
231 | except KeyError: | ||
232 | pass | ||
233 | term = get_production(productions, 'term') | ||
234 | p[0] = ['compound_list'] + term[1:] | ||
235 | |||
236 | def p_term(p): | ||
237 | """term : term separator and_or | ||
238 | | and_or""" | ||
239 | if len(p)==2: | ||
240 | p[0] = ['term', p[1]] | ||
241 | else: | ||
242 | if p[2] is not None and p[2][1] == '&': | ||
243 | p[0] = ['term', ('async', p[1][1:])] + [p[3]] | ||
244 | else: | ||
245 | p[0] = p[1] + [p[3]] | ||
246 | |||
247 | def p_maybe_for_word(p): | ||
248 | # Rearrange 'For' priority wrt TOKEN. See p_for_word | ||
249 | """maybe_for_word : For""" | ||
250 | p[0] = ('maybe_for_word', p[1]) | ||
251 | |||
252 | def p_for_clause(p): | ||
253 | """for_clause : for_word name linebreak do_group | ||
254 | | for_word name linebreak in sequential_sep do_group | ||
255 | | for_word name linebreak in wordlist sequential_sep do_group""" | ||
256 | productions = p[1:] | ||
257 | do_group = get_production(productions, 'do_group') | ||
258 | try: | ||
259 | items = get_production(productions, 'in')[1:] | ||
260 | except KeyError: | ||
261 | raise NotImplementedError('"in" omission is not implemented') | ||
262 | |||
263 | try: | ||
264 | items = get_production(productions, 'wordlist')[1:] | ||
265 | except KeyError: | ||
266 | items = [] | ||
267 | |||
268 | name = p[2] | ||
269 | p[0] = ('for_clause', ForLoop(name, items, do_group[1:])) | ||
270 | |||
271 | def p_name(p): | ||
272 | """name : token""" #Was NAME instead of token | ||
273 | p[0] = p[1] | ||
274 | |||
275 | def p_in(p): | ||
276 | """in : In""" | ||
277 | p[0] = ('in', p[1]) | ||
278 | |||
279 | def p_wordlist(p): | ||
280 | """wordlist : wordlist token | ||
281 | | token""" | ||
282 | if len(p)==2: | ||
283 | p[0] = ['wordlist', ('TOKEN', p[1])] | ||
284 | else: | ||
285 | p[0] = p[1] + [('TOKEN', p[2])] | ||
286 | |||
287 | def p_case_clause(p): | ||
288 | """case_clause : Case token linebreak in linebreak case_list Esac | ||
289 | | Case token linebreak in linebreak case_list_ns Esac | ||
290 | | Case token linebreak in linebreak Esac""" | ||
291 | if len(p) < 8: | ||
292 | items = [] | ||
293 | else: | ||
294 | items = p[6][1:] | ||
295 | name = p[2] | ||
296 | p[0] = ('case_clause', Case(name, [c[1] for c in items])) | ||
297 | |||
298 | def p_case_list_ns(p): | ||
299 | """case_list_ns : case_list case_item_ns | ||
300 | | case_item_ns""" | ||
301 | p_case_list(p) | ||
302 | |||
303 | def p_case_list(p): | ||
304 | """case_list : case_list case_item | ||
305 | | case_item""" | ||
306 | if len(p)==2: | ||
307 | p[0] = ['case_list', p[1]] | ||
308 | else: | ||
309 | p[0] = p[1] + [p[2]] | ||
310 | |||
311 | def p_case_item_ns(p): | ||
312 | """case_item_ns : pattern RPARENS linebreak | ||
313 | | pattern RPARENS compound_list linebreak | ||
314 | | LPARENS pattern RPARENS linebreak | ||
315 | | LPARENS pattern RPARENS compound_list linebreak""" | ||
316 | p_case_item(p) | ||
317 | |||
318 | def p_case_item(p): | ||
319 | """case_item : pattern RPARENS linebreak DSEMI linebreak | ||
320 | | pattern RPARENS compound_list DSEMI linebreak | ||
321 | | LPARENS pattern RPARENS linebreak DSEMI linebreak | ||
322 | | LPARENS pattern RPARENS compound_list DSEMI linebreak""" | ||
323 | if len(p) < 7: | ||
324 | name = p[1][1:] | ||
325 | else: | ||
326 | name = p[2][1:] | ||
327 | |||
328 | try: | ||
329 | cmds = get_production(p[1:], "compound_list")[1:] | ||
330 | except KeyError: | ||
331 | cmds = [] | ||
332 | |||
333 | p[0] = ('case_item', (name, cmds)) | ||
334 | |||
335 | def p_pattern(p): | ||
336 | """pattern : token | ||
337 | | pattern PIPE token""" | ||
338 | if len(p)==2: | ||
339 | p[0] = ['pattern', ('TOKEN', p[1])] | ||
340 | else: | ||
341 | p[0] = p[1] + [('TOKEN', p[2])] | ||
342 | |||
343 | def p_maybe_if_word(p): | ||
344 | # Rearrange 'If' priority wrt TOKEN. See p_if_word | ||
345 | """maybe_if_word : If""" | ||
346 | p[0] = ('maybe_if_word', p[1]) | ||
347 | |||
348 | def p_maybe_then_word(p): | ||
349 | # Rearrange 'Then' priority wrt TOKEN. See p_then_word | ||
350 | """maybe_then_word : Then""" | ||
351 | p[0] = ('maybe_then_word', p[1]) | ||
352 | |||
353 | def p_if_clause(p): | ||
354 | """if_clause : if_word compound_list then_word compound_list else_part Fi | ||
355 | | if_word compound_list then_word compound_list Fi""" | ||
356 | else_part = [] | ||
357 | if len(p)==7: | ||
358 | else_part = p[5] | ||
359 | p[0] = ('if_clause', IfCond(p[2][1:], p[4][1:], else_part)) | ||
360 | |||
361 | def p_else_part(p): | ||
362 | """else_part : Elif compound_list then_word compound_list else_part | ||
363 | | Elif compound_list then_word compound_list | ||
364 | | Else compound_list""" | ||
365 | if len(p)==3: | ||
366 | p[0] = p[2][1:] | ||
367 | else: | ||
368 | else_part = [] | ||
369 | if len(p)==6: | ||
370 | else_part = p[5] | ||
371 | p[0] = ('elif', IfCond(p[2][1:], p[4][1:], else_part)) | ||
372 | |||
373 | def p_while_clause(p): | ||
374 | """while_clause : While compound_list do_group""" | ||
375 | p[0] = ('while_clause', WhileLoop(p[2][1:], p[3][1:])) | ||
376 | |||
377 | def p_maybe_until_word(p): | ||
378 | # Rearrange 'Until' priority wrt TOKEN. See p_until_word | ||
379 | """maybe_until_word : Until""" | ||
380 | p[0] = ('maybe_until_word', p[1]) | ||
381 | |||
382 | def p_until_clause(p): | ||
383 | """until_clause : until_word compound_list do_group""" | ||
384 | p[0] = ('until_clause', UntilLoop(p[2][1:], p[3][1:])) | ||
385 | |||
386 | def p_function_definition(p): | ||
387 | """function_definition : fname LPARENS RPARENS linebreak function_body""" | ||
388 | p[0] = ('function_definition', FunDef(p[1], p[5])) | ||
389 | |||
390 | def p_function_body(p): | ||
391 | """function_body : compound_command | ||
392 | | compound_command redirect_list""" | ||
393 | if len(p)!=2: | ||
394 | raise NotImplementedError('functions redirections lists are not implemented') | ||
395 | p[0] = p[1] | ||
396 | |||
397 | def p_fname(p): | ||
398 | """fname : TOKEN""" #Was NAME instead of token | ||
399 | p[0] = p[1] | ||
400 | |||
401 | def p_brace_group(p): | ||
402 | """brace_group : Lbrace compound_list Rbrace""" | ||
403 | p[0] = ('brace_group', BraceGroup(p[2][1:])) | ||
404 | |||
405 | def p_maybe_done_word(p): | ||
406 | #See p_assignment_word for details. | ||
407 | """maybe_done_word : Done""" | ||
408 | p[0] = ('maybe_done_word', p[1]) | ||
409 | |||
410 | def p_maybe_do_word(p): | ||
411 | """maybe_do_word : Do""" | ||
412 | p[0] = ('maybe_do_word', p[1]) | ||
413 | |||
414 | def p_do_group(p): | ||
415 | """do_group : do_word compound_list done_word""" | ||
416 | #Do group contains a list of AndOr | ||
417 | p[0] = ['do_group'] + p[2][1:] | ||
418 | |||
419 | def p_simple_command(p): | ||
420 | """simple_command : cmd_prefix cmd_word cmd_suffix | ||
421 | | cmd_prefix cmd_word | ||
422 | | cmd_prefix | ||
423 | | cmd_name cmd_suffix | ||
424 | | cmd_name""" | ||
425 | words, redirs, assigns = [], [], [] | ||
426 | for e in p[1:]: | ||
427 | name = e[0] | ||
428 | if name in ('cmd_prefix', 'cmd_suffix'): | ||
429 | for sube in e[1:]: | ||
430 | subname = sube[0] | ||
431 | if subname=='io_redirect': | ||
432 | redirs.append(make_io_redirect(sube)) | ||
433 | elif subname=='ASSIGNMENT_WORD': | ||
434 | assigns.append(sube) | ||
435 | else: | ||
436 | words.append(sube) | ||
437 | elif name in ('cmd_word', 'cmd_name'): | ||
438 | words.append(e) | ||
439 | |||
440 | cmd = SimpleCommand(words, redirs, assigns) | ||
441 | p[0] = ('simple_command', cmd) | ||
442 | |||
443 | def p_cmd_name(p): | ||
444 | """cmd_name : TOKEN""" | ||
445 | p[0] = ('cmd_name', p[1]) | ||
446 | |||
447 | def p_cmd_word(p): | ||
448 | """cmd_word : token""" | ||
449 | p[0] = ('cmd_word', p[1]) | ||
450 | |||
451 | def p_maybe_assignment_word(p): | ||
452 | #See p_assignment_word for details. | ||
453 | """maybe_assignment_word : ASSIGNMENT_WORD""" | ||
454 | p[0] = ('maybe_assignment_word', p[1]) | ||
455 | |||
456 | def p_cmd_prefix(p): | ||
457 | """cmd_prefix : io_redirect | ||
458 | | cmd_prefix io_redirect | ||
459 | | assignment_word | ||
460 | | cmd_prefix assignment_word""" | ||
461 | try: | ||
462 | prefix = get_production(p[1:], 'cmd_prefix') | ||
463 | except KeyError: | ||
464 | prefix = ['cmd_prefix'] | ||
465 | |||
466 | try: | ||
467 | value = get_production(p[1:], 'assignment_word')[1] | ||
468 | value = ('ASSIGNMENT_WORD', value.split('=', 1)) | ||
469 | except KeyError: | ||
470 | value = get_production(p[1:], 'io_redirect') | ||
471 | p[0] = prefix + [value] | ||
472 | |||
473 | def p_cmd_suffix(p): | ||
474 | """cmd_suffix : io_redirect | ||
475 | | cmd_suffix io_redirect | ||
476 | | token | ||
477 | | cmd_suffix token | ||
478 | | maybe_for_word | ||
479 | | cmd_suffix maybe_for_word | ||
480 | | maybe_done_word | ||
481 | | cmd_suffix maybe_done_word | ||
482 | | maybe_do_word | ||
483 | | cmd_suffix maybe_do_word | ||
484 | | maybe_until_word | ||
485 | | cmd_suffix maybe_until_word | ||
486 | | maybe_assignment_word | ||
487 | | cmd_suffix maybe_assignment_word | ||
488 | | maybe_if_word | ||
489 | | cmd_suffix maybe_if_word | ||
490 | | maybe_then_word | ||
491 | | cmd_suffix maybe_then_word | ||
492 | | maybe_bang_word | ||
493 | | cmd_suffix maybe_bang_word""" | ||
494 | try: | ||
495 | suffix = get_production(p[1:], 'cmd_suffix') | ||
496 | token = p[2] | ||
497 | except KeyError: | ||
498 | suffix = ['cmd_suffix'] | ||
499 | token = p[1] | ||
500 | |||
501 | if isinstance(token, tuple): | ||
502 | if token[0]=='io_redirect': | ||
503 | p[0] = suffix + [token] | ||
504 | else: | ||
505 | #Convert maybe_* to TOKEN if necessary | ||
506 | p[0] = suffix + [('TOKEN', token[1])] | ||
507 | else: | ||
508 | p[0] = suffix + [('TOKEN', token)] | ||
509 | |||
510 | def p_redirect_list(p): | ||
511 | """redirect_list : io_redirect | ||
512 | | redirect_list io_redirect""" | ||
513 | if len(p) == 2: | ||
514 | p[0] = ['redirect_list', make_io_redirect(p[1])] | ||
515 | else: | ||
516 | p[0] = p[1] + [make_io_redirect(p[2])] | ||
517 | |||
518 | def p_io_redirect(p): | ||
519 | """io_redirect : io_file | ||
520 | | IO_NUMBER io_file | ||
521 | | io_here | ||
522 | | IO_NUMBER io_here""" | ||
523 | if len(p)==3: | ||
524 | p[0] = ('io_redirect', p[1], p[2]) | ||
525 | else: | ||
526 | p[0] = ('io_redirect', None, p[1]) | ||
527 | |||
528 | def p_io_file(p): | ||
529 | #Return the tuple (operator, filename) | ||
530 | """io_file : LESS filename | ||
531 | | LESSAND filename | ||
532 | | GREATER filename | ||
533 | | GREATAND filename | ||
534 | | DGREAT filename | ||
535 | | LESSGREAT filename | ||
536 | | CLOBBER filename""" | ||
537 | #Extract the filename from the file | ||
538 | p[0] = ('io_file', p[1], p[2][1]) | ||
539 | |||
540 | def p_filename(p): | ||
541 | #Return the filename | ||
542 | """filename : TOKEN""" | ||
543 | p[0] = ('filename', p[1]) | ||
544 | |||
545 | def p_io_here(p): | ||
546 | """io_here : DLESS here_end | ||
547 | | DLESSDASH here_end""" | ||
548 | p[0] = ('io_here', p[1], p[2][1], p[2][2]) | ||
549 | |||
550 | def p_here_end(p): | ||
551 | """here_end : HERENAME TOKEN""" | ||
552 | p[0] = ('here_document', p[1], p[2]) | ||
553 | |||
554 | def p_newline_sequence(p): | ||
555 | # Nothing in the grammar can handle leading NEWLINE productions, so add | ||
556 | # this one with the lowest possible priority relatively to newline_list. | ||
557 | """newline_sequence : newline_list""" | ||
558 | p[0] = None | ||
559 | |||
560 | def p_newline_list(p): | ||
561 | """newline_list : NEWLINE | ||
562 | | newline_list NEWLINE""" | ||
563 | p[0] = None | ||
564 | |||
565 | def p_linebreak(p): | ||
566 | """linebreak : newline_list | ||
567 | | empty""" | ||
568 | p[0] = None | ||
569 | |||
570 | def p_separator_op(p): | ||
571 | """separator_op : COMMA | ||
572 | | AMP""" | ||
573 | p[0] = p[1] | ||
574 | |||
575 | def p_separator(p): | ||
576 | """separator : separator_op linebreak | ||
577 | | newline_list""" | ||
578 | if len(p)==2: | ||
579 | #Ignore newlines | ||
580 | p[0] = None | ||
581 | else: | ||
582 | #Keep the separator operator | ||
583 | p[0] = ('separator', p[1]) | ||
584 | |||
585 | def p_sequential_sep(p): | ||
586 | """sequential_sep : COMMA linebreak | ||
587 | | newline_list""" | ||
588 | p[0] = None | ||
589 | |||
590 | # Low priority TOKEN => for_word conversion. | ||
591 | # Let maybe_for_word be used as a token when necessary in higher priority | ||
592 | # rules. | ||
593 | def p_for_word(p): | ||
594 | """for_word : maybe_for_word""" | ||
595 | p[0] = p[1] | ||
596 | |||
597 | def p_if_word(p): | ||
598 | """if_word : maybe_if_word""" | ||
599 | p[0] = p[1] | ||
600 | |||
601 | def p_then_word(p): | ||
602 | """then_word : maybe_then_word""" | ||
603 | p[0] = p[1] | ||
604 | |||
605 | def p_done_word(p): | ||
606 | """done_word : maybe_done_word""" | ||
607 | p[0] = p[1] | ||
608 | |||
609 | def p_do_word(p): | ||
610 | """do_word : maybe_do_word""" | ||
611 | p[0] = p[1] | ||
612 | |||
613 | def p_until_word(p): | ||
614 | """until_word : maybe_until_word""" | ||
615 | p[0] = p[1] | ||
616 | |||
617 | def p_assignment_word(p): | ||
618 | """assignment_word : maybe_assignment_word""" | ||
619 | p[0] = ('assignment_word', p[1][1]) | ||
620 | |||
621 | def p_bang_word(p): | ||
622 | """bang_word : maybe_bang_word""" | ||
623 | p[0] = ('bang_word', p[1][1]) | ||
624 | |||
625 | def p_token(p): | ||
626 | """token : TOKEN | ||
627 | | Fi""" | ||
628 | p[0] = p[1] | ||
629 | |||
630 | def p_empty(p): | ||
631 | 'empty :' | ||
632 | p[0] = None | ||
633 | |||
634 | # Error rule for syntax errors | ||
635 | def p_error(p): | ||
636 | msg = [] | ||
637 | w = msg.append | ||
638 | w('%r\n' % p) | ||
639 | w('followed by:\n') | ||
640 | for i in range(5): | ||
641 | n = yacc.token() | ||
642 | if not n: | ||
643 | break | ||
644 | w(' %r\n' % n) | ||
645 | raise sherrors.ShellSyntaxError(''.join(msg)) | ||
646 | |||
647 | # Build the parser | ||
648 | try: | ||
649 | import pyshtables | ||
650 | except ImportError: | ||
651 | yacc.yacc(tabmodule = 'pyshtables') | ||
652 | else: | ||
653 | yacc.yacc(tabmodule = 'pysh.pyshtables', write_tables = 0, debug = 0) | ||
654 | |||
655 | |||
656 | def parse(input, eof=False, debug=False): | ||
657 | """Parse a whole script at once and return the generated AST and unconsumed | ||
658 | data in a tuple. | ||
659 | |||
660 | NOTE: eof is probably meaningless for now, the parser being unable to work | ||
661 | in pull mode. It should be set to True. | ||
662 | """ | ||
663 | lexer = pyshlex.PLYLexer() | ||
664 | remaining = lexer.add(input, eof) | ||
665 | if lexer.is_empty(): | ||
666 | return [], remaining | ||
667 | if debug: | ||
668 | debug = 2 | ||
669 | return yacc.parse(lexer=lexer, debug=debug), remaining | ||
670 | |||
671 | #------------------------------------------------------------------------------- | ||
672 | # AST rendering helpers | ||
673 | #------------------------------------------------------------------------------- | ||
674 | |||
675 | def format_commands(v): | ||
676 | """Return a tree made of strings and lists. Make command trees easier to | ||
677 | display. | ||
678 | """ | ||
679 | if isinstance(v, list): | ||
680 | return [format_commands(c) for c in v] | ||
681 | if isinstance(v, tuple): | ||
682 | if len(v)==2 and isinstance(v[0], str) and not isinstance(v[1], str): | ||
683 | if v[0] == 'async': | ||
684 | return ['AsyncList', map(format_commands, v[1])] | ||
685 | else: | ||
686 | #Avoid decomposing tuples like ('pipeline', Pipeline(...)) | ||
687 | return format_commands(v[1]) | ||
688 | return format_commands(list(v)) | ||
689 | elif isinstance(v, IfCond): | ||
690 | name = ['IfCond'] | ||
691 | name += ['if', map(format_commands, v.cond)] | ||
692 | name += ['then', map(format_commands, v.if_cmds)] | ||
693 | name += ['else', map(format_commands, v.else_cmds)] | ||
694 | return name | ||
695 | elif isinstance(v, ForLoop): | ||
696 | name = ['ForLoop'] | ||
697 | name += [repr(v.name)+' in ', map(str, v.items)] | ||
698 | name += ['commands', map(format_commands, v.cmds)] | ||
699 | return name | ||
700 | elif isinstance(v, AndOr): | ||
701 | return [v.op, format_commands(v.left), format_commands(v.right)] | ||
702 | elif isinstance(v, Pipeline): | ||
703 | name = 'Pipeline' | ||
704 | if v.reverse_status: | ||
705 | name = '!' + name | ||
706 | return [name, format_commands(v.commands)] | ||
707 | elif isinstance(v, SimpleCommand): | ||
708 | name = ['SimpleCommand'] | ||
709 | if v.words: | ||
710 | name += ['words', map(str, v.words)] | ||
711 | if v.assigns: | ||
712 | assigns = [tuple(a[1]) for a in v.assigns] | ||
713 | name += ['assigns', map(str, assigns)] | ||
714 | if v.redirs: | ||
715 | name += ['redirs', map(format_commands, v.redirs)] | ||
716 | return name | ||
717 | elif isinstance(v, RedirectList): | ||
718 | name = ['RedirectList'] | ||
719 | if v.redirs: | ||
720 | name += ['redirs', map(format_commands, v.redirs)] | ||
721 | name += ['command', format_commands(v.cmd)] | ||
722 | return name | ||
723 | elif isinstance(v, IORedirect): | ||
724 | return ' '.join(map(str, (v.io_number, v.op, v.filename))) | ||
725 | elif isinstance(v, HereDocument): | ||
726 | return ' '.join(map(str, (v.io_number, v.op, repr(v.name), repr(v.content)))) | ||
727 | elif isinstance(v, SubShell): | ||
728 | return ['SubShell', map(format_commands, v.cmds)] | ||
729 | else: | ||
730 | return repr(v) | ||
731 | |||
732 | def print_commands(cmds, output=sys.stdout): | ||
733 | """Pretty print a command tree.""" | ||
734 | def print_tree(cmd, spaces, output): | ||
735 | if isinstance(cmd, list): | ||
736 | for c in cmd: | ||
737 | print_tree(c, spaces + 3, output) | ||
738 | else: | ||
739 | print >>output, ' '*spaces + str(cmd) | ||
740 | |||
741 | formatted = format_commands(cmds) | ||
742 | print_tree(formatted, 0, output) | ||
743 | |||
744 | |||
745 | def stringify_commands(cmds): | ||
746 | """Serialize a command tree as a string. | ||
747 | |||
748 | Returned string is not pretty and is currently used for unit tests only. | ||
749 | """ | ||
750 | def stringify(value): | ||
751 | output = [] | ||
752 | if isinstance(value, list): | ||
753 | formatted = [] | ||
754 | for v in value: | ||
755 | formatted.append(stringify(v)) | ||
756 | formatted = ' '.join(formatted) | ||
757 | output.append(''.join(['<', formatted, '>'])) | ||
758 | else: | ||
759 | output.append(value) | ||
760 | return ' '.join(output) | ||
761 | |||
762 | return stringify(format_commands(cmds)) | ||
763 | |||
764 | |||
765 | def visit_commands(cmds, callable): | ||
766 | """Visit the command tree and execute callable on every Pipeline and | ||
767 | SimpleCommand instances. | ||
768 | """ | ||
769 | if isinstance(cmds, (tuple, list)): | ||
770 | map(lambda c: visit_commands(c,callable), cmds) | ||
771 | elif isinstance(cmds, (Pipeline, SimpleCommand)): | ||
772 | callable(cmds) | ||
diff --git a/bitbake/lib/bb/pysh/sherrors.py b/bitbake/lib/bb/pysh/sherrors.py new file mode 100644 index 0000000000..1d5bd53b3a --- /dev/null +++ b/bitbake/lib/bb/pysh/sherrors.py | |||
@@ -0,0 +1,41 @@ | |||
1 | # sherrors.py - shell errors and signals | ||
2 | # | ||
3 | # Copyright 2007 Patrick Mezard | ||
4 | # | ||
5 | # This software may be used and distributed according to the terms | ||
6 | # of the GNU General Public License, incorporated herein by reference. | ||
7 | |||
8 | """Define shell exceptions and error codes. | ||
9 | """ | ||
10 | |||
11 | class ShellError(Exception): | ||
12 | pass | ||
13 | |||
14 | class ShellSyntaxError(ShellError): | ||
15 | pass | ||
16 | |||
17 | class UtilityError(ShellError): | ||
18 | """Raised upon utility syntax error (option or operand error).""" | ||
19 | pass | ||
20 | |||
21 | class ExpansionError(ShellError): | ||
22 | pass | ||
23 | |||
24 | class CommandNotFound(ShellError): | ||
25 | """Specified command was not found.""" | ||
26 | pass | ||
27 | |||
28 | class RedirectionError(ShellError): | ||
29 | pass | ||
30 | |||
31 | class VarAssignmentError(ShellError): | ||
32 | """Variable assignment error.""" | ||
33 | pass | ||
34 | |||
35 | class ExitSignal(ShellError): | ||
36 | """Exit signal.""" | ||
37 | pass | ||
38 | |||
39 | class ReturnSignal(ShellError): | ||
40 | """Exit signal.""" | ||
41 | pass \ No newline at end of file | ||
diff --git a/bitbake/lib/bb/pysh/subprocess_fix.py b/bitbake/lib/bb/pysh/subprocess_fix.py new file mode 100644 index 0000000000..46eca22802 --- /dev/null +++ b/bitbake/lib/bb/pysh/subprocess_fix.py | |||
@@ -0,0 +1,77 @@ | |||
1 | # subprocess - Subprocesses with accessible I/O streams | ||
2 | # | ||
3 | # For more information about this module, see PEP 324. | ||
4 | # | ||
5 | # This module should remain compatible with Python 2.2, see PEP 291. | ||
6 | # | ||
7 | # Copyright (c) 2003-2005 by Peter Astrand <astrand@lysator.liu.se> | ||
8 | # | ||
9 | # Licensed to PSF under a Contributor Agreement. | ||
10 | # See http://www.python.org/2.4/license for licensing details. | ||
11 | |||
12 | def list2cmdline(seq): | ||
13 | """ | ||
14 | Translate a sequence of arguments into a command line | ||
15 | string, using the same rules as the MS C runtime: | ||
16 | |||
17 | 1) Arguments are delimited by white space, which is either a | ||
18 | space or a tab. | ||
19 | |||
20 | 2) A string surrounded by double quotation marks is | ||
21 | interpreted as a single argument, regardless of white space | ||
22 | contained within. A quoted string can be embedded in an | ||
23 | argument. | ||
24 | |||
25 | 3) A double quotation mark preceded by a backslash is | ||
26 | interpreted as a literal double quotation mark. | ||
27 | |||
28 | 4) Backslashes are interpreted literally, unless they | ||
29 | immediately precede a double quotation mark. | ||
30 | |||
31 | 5) If backslashes immediately precede a double quotation mark, | ||
32 | every pair of backslashes is interpreted as a literal | ||
33 | backslash. If the number of backslashes is odd, the last | ||
34 | backslash escapes the next double quotation mark as | ||
35 | described in rule 3. | ||
36 | """ | ||
37 | |||
38 | # See | ||
39 | # http://msdn.microsoft.com/library/en-us/vccelng/htm/progs_12.asp | ||
40 | result = [] | ||
41 | needquote = False | ||
42 | for arg in seq: | ||
43 | bs_buf = [] | ||
44 | |||
45 | # Add a space to separate this argument from the others | ||
46 | if result: | ||
47 | result.append(' ') | ||
48 | |||
49 | needquote = (" " in arg) or ("\t" in arg) or ("|" in arg) or arg == "" | ||
50 | if needquote: | ||
51 | result.append('"') | ||
52 | |||
53 | for c in arg: | ||
54 | if c == '\\': | ||
55 | # Don't know if we need to double yet. | ||
56 | bs_buf.append(c) | ||
57 | elif c == '"': | ||
58 | # Double backspaces. | ||
59 | result.append('\\' * len(bs_buf)*2) | ||
60 | bs_buf = [] | ||
61 | result.append('\\"') | ||
62 | else: | ||
63 | # Normal char | ||
64 | if bs_buf: | ||
65 | result.extend(bs_buf) | ||
66 | bs_buf = [] | ||
67 | result.append(c) | ||
68 | |||
69 | # Add remaining backspaces, if any. | ||
70 | if bs_buf: | ||
71 | result.extend(bs_buf) | ||
72 | |||
73 | if needquote: | ||
74 | result.extend(bs_buf) | ||
75 | result.append('"') | ||
76 | |||
77 | return ''.join(result) | ||