source: trunk/anuga_core/source/fabricate.py @ 8729

Last change on this file since 8729 was 8531, checked in by steve, 12 years ago

Moved fabricate over to source

  • Property svn:executable set to *
File size: 56.8 KB
Line 
1#!/usr/bin/env python
2
3"""Build tool that finds dependencies automatically for any language.
4
5fabricate is a build tool that finds dependencies automatically for any
6language. It's small and just works. No hidden stuff behind your back. It was
7inspired by Bill McCloskey's make replacement, memoize, but fabricate works on
8Windows as well as Linux.
9
10Read more about how to use it and how it works on the project page:
11    http://code.google.com/p/fabricate/
12
13Like memoize, fabricate is released under a "New BSD license". fabricate is
14copyright (c) 2009 Brush Technology. Full text of the license is here:
15    http://code.google.com/p/fabricate/wiki/License
16
17To get help on fabricate functions:
18    from fabricate import *
19    help(function)
20
21"""
22
23from __future__ import with_statement
24
25# fabricate version number
26__version__ = '1.24'
27
28# if version of .deps file has changed, we know to not use it
29deps_version = 2
30
31import atexit
32import optparse
33import os
34import platform
35import re
36import shlex
37import stat
38import subprocess
39import sys
40import tempfile
41import time
42import threading # NB uses old camelCase names for backward compatibility
43# multiprocessing module only exists on Python >= 2.6
44try:
45    import multiprocessing
46except ImportError:
47    class MultiprocessingModule(object):
48        def __getattr__(self, name):
49            raise NotImplementedError("multiprocessing module not available, can't do parallel builds")
50    multiprocessing = MultiprocessingModule()
51
52# so you can do "from fabricate import *" to simplify your build script
53__all__ = ['setup', 'run', 'autoclean', 'main', 'shell', 'fabricate_version',
54           'memoize', 'outofdate', 'parse_options', 'after',
55           'ExecutionError', 'md5_hasher', 'mtime_hasher',
56           'Runner', 'AtimesRunner', 'StraceRunner', 'AlwaysRunner',
57           'SmartRunner', 'Builder']
58
59import textwrap
60
61__doc__ += "Exported functions are:\n" + '  ' + '\n  '.join(textwrap.wrap(', '.join(__all__), 80))
62
63
64
65FAT_atime_resolution = 24*60*60     # resolution on FAT filesystems (seconds)
66FAT_mtime_resolution = 2
67
68# NTFS resolution is < 1 ms
69# We assume this is considerably more than time to run a new process
70
71NTFS_atime_resolution = 0.0002048   # resolution on NTFS filesystems (seconds)
72NTFS_mtime_resolution = 0.0002048   #  is actually 0.1us but python's can be
73                                    #  as low as 204.8us due to poor
74                                    #  float precision when storing numbers
75                                    #  as big as NTFS file times can be
76                                    #  (float has 52-bit precision and NTFS
77                                    #  FILETIME has 63-bit precision, so
78                                    #  we've lost 11 bits = 2048)
79
80# So we can use md5func in old and new versions of Python without warnings
81try:
82    import hashlib
83    md5func = hashlib.md5
84except ImportError:
85    import md5
86    md5func = md5.new
87
88# Use json, or pickle on older Python versions if simplejson not installed
89try:
90    import json
91except ImportError:
92    try:
93        import simplejson as json
94    except ImportError:
95        import cPickle
96        # needed to ignore the indent= argument for pickle's dump()
97        class PickleJson:
98            def load(self, f):
99                return cPickle.load(f)
100            def dump(self, obj, f, indent=None, sort_keys=None):
101                return cPickle.dump(obj, f)
102        json = PickleJson()
103
104def printerr(message):
105    """ Print given message to stderr with a line feed. """
106    print >>sys.stderr, message
107
108class PathError(Exception):
109    pass
110
111class ExecutionError(Exception):
112    """ Raised by shell() and run() if command returns non-zero exit code. """
113    pass
114
115def args_to_list(args):
116    """ Return a flat list of the given arguments for shell(). """
117    arglist = []
118    for arg in args:
119        if arg is None:
120            continue
121        if hasattr(arg, '__iter__'):
122            arglist.extend(args_to_list(arg))
123        else:
124            if not isinstance(arg, basestring):
125                arg = str(arg)
126            arglist.append(arg)
127    return arglist
128
129def shell(*args, **kwargs):
130    r""" Run a command: program name is given in first arg and command line
131        arguments in the rest of the args. Iterables (lists and tuples) in args
132        are recursively converted to separate arguments, non-string types are
133        converted with str(arg), and None is ignored. For example:
134
135        >>> def tail(input, n=3, flags=None):
136        >>>     args = ['-n', n]
137        >>>     return shell('tail', args, flags, input=input)
138        >>> tail('a\nb\nc\nd\ne\n')
139        'c\nd\ne\n'
140        >>> tail('a\nb\nc\nd\ne\n', 2, ['-v'])
141        '==> standard input <==\nd\ne\n'
142
143        Keyword arguments kwargs are interpreted as follows:
144
145        "input" is a string to pass standard input into the process (or the
146            default of None to use parent's stdin, eg: the keyboard)
147        "silent" is True (default) to return process's standard output as a
148            string, or False to print it as it comes out
149        "shell" set to True will run the command via the shell (/bin/sh or
150            COMSPEC) instead of running the command directly (the default)
151        "ignore_status" set to True means ignore command status code -- i.e.,
152            don't raise an ExecutionError on nonzero status code
153        Any other kwargs are passed directly to subprocess.Popen
154        Raises ExecutionError(message, output, status) if the command returns
155        a non-zero status code. """
156    return _shell(args, **kwargs)
157
158def _shell(args, input=None, silent=True, shell=False, ignore_status=False, **kwargs):
159    if input:
160        stdin = subprocess.PIPE
161    else:
162        stdin = None
163    if silent:
164        stdout = subprocess.PIPE
165    else:
166        stdout = None
167    arglist = args_to_list(args)
168    if not arglist:
169        raise TypeError('shell() takes at least 1 argument (0 given)')
170    if shell:
171        # handle subprocess.Popen quirk where subsequent args are passed
172        # to bash instead of to our command
173        command = subprocess.list2cmdline(arglist)
174    else:
175        command = arglist
176    try:
177        proc = subprocess.Popen(command, stdin=stdin, stdout=stdout,
178                                stderr=subprocess.STDOUT, shell=shell, **kwargs)
179    except OSError, e:
180        # Work around the problem that Windows Popen doesn't say what file it couldn't find
181        if platform.system() == 'Windows' and e.errno == 2 and e.filename is None:
182            e.filename = arglist[0]
183        raise e
184    output, stderr = proc.communicate(input)
185    status = proc.wait()
186    if status and not ignore_status:
187        raise ExecutionError('%r exited with status %d'
188                             % (os.path.basename(arglist[0]), status),
189                             output, status)
190    if silent:
191        return output
192
193def md5_hasher(filename):
194    """ Return MD5 hash of given filename, or None if file doesn't exist. """
195    try:
196        f = open(filename, 'rb')
197        try:
198            return md5func(f.read()).hexdigest()
199        finally:
200            f.close()
201    except IOError:
202        return None
203
204def mtime_hasher(filename):
205    """ Return modification time of file, or None if file doesn't exist. """
206    try:
207        st = os.stat(filename)
208        return repr(st.st_mtime)
209    except (IOError, OSError):
210        return None
211
212class RunnerUnsupportedException(Exception):
213    """ Exception raise by Runner constructor if it is not supported
214        on the current platform."""
215    pass
216
217class Runner(object):
218    def __call__(self, *args, **kwargs):
219        """ Run command and return (dependencies, outputs), where
220            dependencies is a list of the filenames of files that the
221            command depended on, and output is a list of the filenames
222            of files that the command modified. The input is passed
223            to shell()"""
224        raise NotImplementedError("Runner subclass called but subclass didn't define __call__")
225
226    def actual_runner(self):
227        """ Return the actual runner object (overriden in SmartRunner). """
228        return self
229       
230    def ignore(self, name):
231        return self._builder.ignore.search(name)
232
233class AtimesRunner(Runner):
234    def __init__(self, builder):
235        self._builder = builder
236        self.atimes = AtimesRunner.has_atimes(self._builder.dirs)
237        if self.atimes == 0:
238            raise RunnerUnsupportedException(
239                'atimes are not supported on this platform')
240
241    @staticmethod
242    def file_has_atimes(filename):
243        """ Return whether the given filesystem supports access time updates for
244            this file. Return:
245              - 0 if no a/mtimes not updated
246              - 1 if the atime resolution is at least one day and
247                the mtime resolution at least 2 seconds (as on FAT filesystems)
248              - 2 if the atime and mtime resolutions are both < ms
249                (NTFS filesystem has 100 ns resolution). """
250
251        def access_file(filename):
252            """ Access (read a byte from) file to try to update its access time. """
253            f = open(filename)
254            f.read(1)
255            f.close()
256
257        initial = os.stat(filename)
258        os.utime(filename, (
259            initial.st_atime-FAT_atime_resolution,
260            initial.st_mtime-FAT_mtime_resolution))
261
262        adjusted = os.stat(filename)
263        access_file(filename)
264        after = os.stat(filename)
265
266        # Check that a/mtimes actually moved back by at least resolution and
267        #  updated by a file access.
268        #  add NTFS_atime_resolution to account for float resolution factors
269        #  Comment on resolution/2 in atimes_runner()
270        if initial.st_atime-adjusted.st_atime > FAT_atime_resolution+NTFS_atime_resolution or \
271           initial.st_mtime-adjusted.st_mtime > FAT_mtime_resolution+NTFS_atime_resolution or \
272           initial.st_atime==adjusted.st_atime or \
273           initial.st_mtime==adjusted.st_mtime or \
274           not after.st_atime-FAT_atime_resolution/2 > adjusted.st_atime:
275            return 0
276
277        os.utime(filename, (
278            initial.st_atime-NTFS_atime_resolution,
279            initial.st_mtime-NTFS_mtime_resolution))
280        adjusted = os.stat(filename)
281
282        # Check that a/mtimes actually moved back by at least resolution
283        # Note: != comparison here fails due to float rounding error
284        #  double NTFS_atime_resolution to account for float resolution factors
285        if initial.st_atime-adjusted.st_atime > NTFS_atime_resolution*2 or \
286           initial.st_mtime-adjusted.st_mtime > NTFS_mtime_resolution*2 or \
287           initial.st_atime==adjusted.st_atime or \
288           initial.st_mtime==adjusted.st_mtime:
289            return 1
290
291        return 2
292
293    @staticmethod
294    def exists(path):
295        if not os.path.exists(path):
296            # Note: in linux, error may not occur: strace runner doesn't check
297            raise PathError("build dirs specified a non-existant path '%s'" % path)
298
299    @staticmethod
300    def has_atimes(paths):
301        """ Return whether a file created in each path supports atimes and mtimes.
302            Return value is the same as used by file_has_atimes
303            Note: for speed, this only tests files created at the top directory
304            of each path. A safe assumption in most build environments.
305            In the unusual case that any sub-directories are mounted
306            on alternate file systems that don't support atimes, the build may
307            fail to identify a dependency """
308
309        atimes = 2                  # start by assuming we have best atimes
310        for path in paths:
311            AtimesRunner.exists(path)
312            handle, filename = tempfile.mkstemp(dir=path)
313            try:
314                try:
315                    f = os.fdopen(handle, 'wb')
316                except:
317                    os.close(handle)
318                    raise
319                try:
320                    f.write('x')    # need a byte in the file for access test
321                finally:
322                    f.close()
323                atimes = min(atimes, AtimesRunner.file_has_atimes(filename))
324            finally:
325                os.remove(filename)
326        return atimes
327
328    def _file_times(self, path, depth):
329        """ Helper function for file_times().
330            Return a dict of file times, recursing directories that don't
331            start with self._builder.ignoreprefix """
332
333        AtimesRunner.exists(path)
334        names = os.listdir(path)
335        times = {}
336        ignoreprefix = self._builder.ignoreprefix
337        for name in names:
338            if ignoreprefix and name.startswith(ignoreprefix):
339                continue
340            if path == '.':
341                fullname = name
342            else:
343                fullname = os.path.join(path, name)
344            st = os.stat(fullname)
345            if stat.S_ISDIR(st.st_mode):
346                if depth > 1:
347                    times.update(self._file_times(fullname, depth-1))
348            elif stat.S_ISREG(st.st_mode):
349                times[fullname] = st.st_atime, st.st_mtime
350        return times
351
352    def file_times(self):
353        """ Return a dict of "filepath: (atime, mtime)" entries for each file
354            in self._builder.dirs. "filepath" is the absolute path, "atime" is
355            the access time, "mtime" the modification time.
356            Recurse directories that don't start with
357            self._builder.ignoreprefix and have depth less than
358            self._builder.dirdepth. """
359
360        times = {}
361        for path in self._builder.dirs:
362            times.update(self._file_times(path, self._builder.dirdepth))
363        return times
364
365    def _utime(self, filename, atime, mtime):
366        """ Call os.utime but ignore permission errors """
367        try:
368            os.utime(filename, (atime, mtime))
369        except OSError, e:
370            # ignore permission errors -- we can't build with files
371            # that we can't access anyway
372            if e.errno != 1:
373                raise
374
375    def _age_atimes(self, filetimes):
376        """ Age files' atimes and mtimes to be at least FAT_xx_resolution old.
377            Only adjust if the given filetimes dict says it isn't that old,
378            and return a new dict of filetimes with the ages adjusted. """
379        adjusted = {}
380        now = time.time()
381        for filename, entry in filetimes.iteritems():
382            if now-entry[0] < FAT_atime_resolution or now-entry[1] < FAT_mtime_resolution:
383                entry = entry[0] - FAT_atime_resolution, entry[1] - FAT_mtime_resolution
384                self._utime(filename, entry[0], entry[1])
385            adjusted[filename] = entry
386        return adjusted
387
388    def __call__(self, *args, **kwargs):
389        """ Run command and return its dependencies and outputs, using before
390            and after access times to determine dependencies. """
391
392        # For Python pre-2.5, ensure os.stat() returns float atimes
393        old_stat_float = os.stat_float_times()
394        os.stat_float_times(True)
395
396        originals = self.file_times()
397        if self.atimes == 2:
398            befores = originals
399            atime_resolution = 0
400            mtime_resolution = 0
401        else:
402            befores = self._age_atimes(originals)
403            atime_resolution = FAT_atime_resolution
404            mtime_resolution = FAT_mtime_resolution
405        shell_keywords = dict(silent=False)
406        shell_keywords.update(kwargs)
407        shell(*args, **shell_keywords)
408        afters = self.file_times()
409        deps = []
410        outputs = []
411        for name in afters:
412            if name in befores:
413                # if file exists before+after && mtime changed, add to outputs
414                # Note: Can't just check that atimes > than we think they were
415                #       before because os might have rounded them to a later
416                #       date than what we think we set them to in befores.
417                #       So we make sure they're > by at least 1/2 the
418                #       resolution.  This will work for anything with a
419                #       resolution better than FAT.
420                if afters[name][1]-mtime_resolution/2 > befores[name][1]:
421                    if not self.ignore(name):
422                        outputs.append(name)
423                elif afters[name][0]-atime_resolution/2 > befores[name][0]:
424                    # otherwise add to deps if atime changed
425                    if not self.ignore(name):
426                        deps.append(name)
427            else:
428                # file created (in afters but not befores), add as output
429                if not self.ignore(name):
430                    outputs.append(name)
431
432        if self.atimes < 2:
433            # Restore atimes of files we didn't access: not for any functional
434            # reason -- it's just to preserve the access time for the user's info
435            for name in deps:
436                originals.pop(name)
437            for name in originals:
438                original = originals[name]
439                if original != afters.get(name, None):
440                    self._utime(name, original[0], original[1])
441
442        os.stat_float_times(old_stat_float)  # restore stat_float_times value
443        return deps, outputs
444
445class StraceProcess(object):
446    def __init__(self, cwd='.'):
447        self.cwd = cwd
448        self.deps = set()
449        self.outputs = set()
450
451    def add_dep(self, dep):
452        self.deps.add(dep)
453
454    def add_output(self, output):
455        self.outputs.add(output)
456
457    def __str__(self):
458        return '<StraceProcess cwd=%s deps=%s outputs=%s>' % \
459               (self.cwd, self.deps, self.outputs)
460
461def _call_strace(self, *args, **kwargs):
462    """ Top level function call for Strace that can be run in parallel """
463    return self(*args, **kwargs)
464
465class StraceRunner(Runner):
466    keep_temps = False
467
468    def __init__(self, builder, build_dir=None):
469        self.strace_version = StraceRunner.get_strace_version()
470        if self.strace_version == 0:
471            raise RunnerUnsupportedException('strace is not available')
472        if self.strace_version == 32:
473            self._stat_re = self._stat32_re
474            self._stat_func = 'stat'
475        else:
476            self._stat_re = self._stat64_re
477            self._stat_func = 'stat64'
478        self._builder = builder
479        self.temp_count = 0
480        self.build_dir = os.path.abspath(build_dir or os.getcwd())
481
482    @staticmethod
483    def get_strace_version():
484        """ Return 0 if this system doesn't have strace, nonzero otherwise
485            (64 if strace supports stat64, 32 otherwise). """
486        if platform.system() == 'Windows':
487            # even if windows has strace, it's probably a dodgy cygwin one
488            return 0
489        try:
490            proc = subprocess.Popen(['strace', '-e', 'trace=stat64'], stderr=subprocess.PIPE)
491            stdout, stderr = proc.communicate()
492            proc.wait()
493            if 'invalid system call' in stderr:
494                return 32
495            else:
496                return 64
497        except OSError:
498            return 0
499
500    # Regular expressions for parsing of strace log
501    _open_re       = re.compile(r'(?P<pid>\d+)\s+open\("(?P<name>[^"]*)", (?P<mode>[^,)]*)')
502    _stat32_re     = re.compile(r'(?P<pid>\d+)\s+stat\("(?P<name>[^"]*)", .*')
503    _stat64_re     = re.compile(r'(?P<pid>\d+)\s+stat64\("(?P<name>[^"]*)", .*')
504    _execve_re     = re.compile(r'(?P<pid>\d+)\s+execve\("(?P<name>[^"]*)", .*')
505    _mkdir_re      = re.compile(r'(?P<pid>\d+)\s+mkdir\("(?P<name>[^"]*)", .*')
506    _rename_re     = re.compile(r'(?P<pid>\d+)\s+rename\("[^"]*", "(?P<name>[^"]*)"\)')
507    _kill_re       = re.compile(r'(?P<pid>\d+)\s+killed by.*')
508    _chdir_re      = re.compile(r'(?P<pid>\d+)\s+chdir\("(?P<cwd>[^"]*)"\)')
509    _exit_group_re = re.compile(r'(?P<pid>\d+)\s+exit_group\((?P<status>.*)\).*')
510    _clone_re      = re.compile(r'(?P<pid_clone>\d+)\s+(clone|fork|vfork)\(.*\)\s*=\s*(?P<pid>\d*)')
511
512    # Regular expressions for detecting interrupted lines in strace log
513    # 3618  clone( <unfinished ...>
514    # 3618  <... clone resumed> child_stack=0, flags=CLONE, child_tidptr=0x7f83deffa780) = 3622
515    _unfinished_start_re = re.compile(r'(?P<pid>\d+)(?P<body>.*)<unfinished ...>$')
516    _unfinished_end_re   = re.compile(r'(?P<pid>\d+)\s+\<\.\.\..*\>(?P<body>.*)')
517
518    def _do_strace(self, args, kwargs, outfile, outname):
519        """ Run strace on given command args/kwargs, sending output to file.
520            Return (status code, list of dependencies, list of outputs). """
521        shell_keywords = dict(silent=False)
522        shell_keywords.update(kwargs)
523        shell('strace', '-fo', outname, '-e',
524              'trace=open,%s,execve,exit_group,chdir,mkdir,rename,clone,vfork,fork' % self._stat_func,
525              args, **shell_keywords)
526        cwd = '.' 
527        status = 0
528        processes  = {}  # dictionary of processes (key = pid)
529        unfinished = {}  # list of interrupted entries in strace log
530        for line in outfile:
531            # look for split lines
532            unfinished_start_match = self._unfinished_start_re.match(line)
533            unfinished_end_match = self._unfinished_end_re.match(line)
534            if unfinished_start_match:
535                pid = unfinished_start_match.group('pid')
536                body = unfinished_start_match.group('body')
537                unfinished[pid] = pid + ' ' + body
538                continue
539            elif unfinished_end_match:
540                pid = unfinished_end_match.group('pid')
541                body = unfinished_end_match.group('body')
542                line = unfinished[pid] + body
543                del unfinished[pid]
544
545            is_output = False
546            open_match = self._open_re.match(line)
547            stat_match = self._stat_re.match(line)
548            execve_match = self._execve_re.match(line)
549            mkdir_match = self._mkdir_re.match(line)
550            rename_match = self._rename_re.match(line)
551            clone_match = self._clone_re.match(line) 
552
553            kill_match = self._kill_re.match(line)
554            if kill_match:
555                return None, None, None
556
557            match = None
558            if execve_match:
559                pid = execve_match.group('pid')
560                if pid not in processes:
561                    processes[pid] = StraceProcess()
562                    match = execve_match
563            elif clone_match:
564                pid = clone_match.group('pid')
565                pid_clone = clone_match.group('pid_clone')
566                processes[pid] = StraceProcess(processes[pid_clone].cwd)
567            elif open_match:
568                match = open_match
569                mode = match.group('mode')
570                if 'O_WRONLY' in mode or 'O_RDWR' in mode:
571                    # it's an output file if opened for writing
572                    is_output = True
573            elif stat_match:
574                match = stat_match
575            elif mkdir_match:
576                match = mkdir_match               
577            elif rename_match:
578                match = rename_match
579                # the destination of a rename is an output file
580                is_output = True
581               
582            if match:
583                name = match.group('name')
584                pid  = match.group('pid')
585                cwd = processes[pid].cwd
586                if cwd != '.':
587                    name = os.path.join(cwd, name)
588
589                # normalise path name to ensure files are only listed once
590                name = os.path.normpath(name)
591
592                # if it's an absolute path name under the build directory,
593                # make it relative to build_dir before saving to .deps file
594                if os.path.isabs(name) and name.startswith(self.build_dir):
595                    name = name[len(self.build_dir):]
596                    name = name.lstrip(os.path.sep)
597
598                if (self._builder._is_relevant(name)
599                    and not self.ignore(name)
600                    and (os.path.isfile(name)
601                         or os.path.isdir(name)
602                         or not os.path.lexists(name))):
603                    if is_output:
604                        processes[pid].add_output(name)
605                    else:
606                        processes[pid].add_dep(name)
607
608            match = self._chdir_re.match(line)
609            if match:
610                processes[pid].cwd = os.path.join(processes[pid].cwd, match.group('cwd'))
611
612            match = self._exit_group_re.match(line)
613            if match:
614                status = int(match.group('status'))
615
616        # collect outputs and dependencies from all processes
617        deps = set()
618        outputs = set()
619        for pid, process in processes.items():
620            deps = deps.union(process.deps)
621            outputs = outputs.union(process.outputs)
622
623        return status, list(deps), list(outputs)
624
625    def __call__(self, *args, **kwargs):
626        """ Run command and return its dependencies and outputs, using strace
627            to determine dependencies (by looking at what files are opened or
628            modified). """
629        ignore_status = kwargs.pop('ignore_status', False)
630        if self.keep_temps:
631            outname = 'strace%03d.txt' % self.temp_count
632            self.temp_count += 1
633            handle = os.open(outname, os.O_CREAT)
634        else:
635            handle, outname = tempfile.mkstemp()
636
637        try:
638            try:
639                outfile = os.fdopen(handle, 'r')
640            except:
641                os.close(handle)
642                raise
643            try:
644                status, deps, outputs = self._do_strace(args, kwargs, outfile, outname)
645                if status is None:
646                    raise ExecutionError(
647                        '%r was killed unexpectedly' % args[0], '', -1)
648            finally:
649                outfile.close()
650        finally:
651            if not self.keep_temps:
652                os.remove(outname)
653
654        if status and not ignore_status:
655            raise ExecutionError('%r exited with status %d'
656                                 % (os.path.basename(args[0]), status),
657                                 '', status)
658        return list(deps), list(outputs)
659
660class AlwaysRunner(Runner):
661    def __init__(self, builder):
662        pass
663
664    def __call__(self, *args, **kwargs):
665        """ Runner that always runs given command, used as a backup in case
666            a system doesn't have strace or atimes. """
667        shell_keywords = dict(silent=False)
668        shell_keywords.update(kwargs)
669        shell(*args, **shell_keywords)
670        return None, None
671
672class SmartRunner(Runner):
673    """ Smart command runner that uses StraceRunner if it can,
674        otherwise AtimesRunner if available, otherwise AlwaysRunner. """
675    def __init__(self, builder):
676        self._builder = builder
677        try:
678            self._runner = StraceRunner(self._builder)
679        except RunnerUnsupportedException:
680            try:
681                self._runner = AtimesRunner(self._builder)
682            except RunnerUnsupportedException:
683                self._runner = AlwaysRunner(self._builder)
684
685    def actual_runner(self):
686        return self._runner
687
688    def __call__(self, *args, **kwargs):
689        return self._runner(*args, **kwargs)
690
691class _running(object):
692    """ Represents a task put on the parallel pool
693        and its results when complete """
694    def __init__(self, async, command):
695        """ "async" is the AsyncResult object returned from pool.apply_async
696            "command" is the command that was run """
697        self.async = async
698        self.command = command
699        self.results = None
700       
701class _after(object):
702    """ Represents something waiting on completion of some previous commands """
703    def __init__(self, afters, do):
704        """ "afters" is a group id or a iterable of group ids to wait on
705            "do" is either a tuple representing a command (group, command,
706                arglist, kwargs) or a threading.Condition to be released """
707        self.afters = afters
708        self.do = do
709       
710class _Groups(object):
711    """ Thread safe mapping object whose values are lists of _running
712        or _after objects and a count of how many have *not* completed """
713    class value(object):
714        """ the value type in the map """
715        def __init__(self, val=None):
716            self.count = 0  # count of items not yet completed
717            self.items = [] # items in this group
718            if val is not None:
719                self.items.append(val)
720            self.ok = True  # True if no error from any command in group so far
721           
722    def __init__(self):
723        self.groups = {False: self.value()}
724        self.lock = threading.Lock()
725       
726    def item_list(self, id):
727        """ Return copy of the value list """
728        with self.lock:
729            return self.groups[id].items[:]
730   
731    def remove(self, id):
732        """ Remove the group """
733        with self.lock:
734            del self.groups[id]
735   
736    def remove_item(self, id, val):
737        with self.lock:
738            self.groups[id].items.remove(val)
739           
740    def add(self, id, val):
741        with self.lock:
742            if id in self.groups:
743                self.groups[id].items.append(val)
744            else:
745                self.groups[id] = self.value(val)
746            self.groups[id].count += 1
747   
748    def get_count(self, id):
749        with self.lock:
750            if id not in self.groups:
751                return 0
752            return self.groups[id].count
753
754    def dec_count(self, id):
755        with self.lock:
756            c = self.groups[id].count - 1
757            if c < 0:
758                raise ValueError
759            self.groups[id].count = c
760            return c
761   
762    def get_ok(self, id):
763        with self.lock:
764            return self.groups[id].ok
765   
766    def set_ok(self, id, to):
767        with self.lock:
768            self.groups[id].ok = to
769           
770    def ids(self):
771        with self.lock:
772            return self.groups.keys()
773
774# pool of processes to run parallel jobs, must not be part of any object that
775# is pickled for transfer to these processes, ie it must be global
776_pool = None
777# object holding results, must also be global
778_groups = _Groups()
779# results collecting thread
780_results = None
781_stop_results = threading.Event()
782
783class _todo(object):
784    """ holds the parameters for commands waiting on others """
785    def __init__(self, group, command, arglist, kwargs):
786        self.group = group      # which group it should run as
787        self.command = command  # string command
788        self.arglist = arglist  # command arguments
789        self.kwargs = kwargs    # keywork args for the runner
790       
791def _results_handler( builder, delay=0.01):
792    """ Body of thread that stores results in .deps and handles 'after'
793        conditions
794       "builder" the builder used """
795    try:
796        while not _stop_results.isSet():
797            # go through the lists and check any results available
798            for id in _groups.ids():
799                if id is False: continue # key of False is _afters not _runnings
800                for r in _groups.item_list(id):
801                    if r.results is None and r.async.ready():
802                        try:
803                            d, o = r.async.get()
804                        except Exception, e:
805                            r.results = e
806                            _groups.set_ok(False)
807                        else:
808                            builder.done(r.command, d, o) # save deps
809                            r.results = (r.command, d, o)
810                        _groups.dec_count(id)
811            # check if can now schedule things waiting on the after queue
812            for a in _groups.item_list(False):
813                still_to_do = sum(_groups.get_count(g) for g in a.afters)
814                no_error = all(_groups.get_ok(g) for g in a.afters)
815                if False in a.afters:
816                    still_to_do -= 1 # don't count yourself of course
817                if still_to_do == 0:
818                    if isinstance(a.do, tuple):
819                        if no_error:
820                            async = _pool.apply_async(_call_strace, a.do.arglist,
821                                        a.do.kwargs)
822                            _groups.add(a.do.group, _running(async, a.do.command))
823                    else:
824                        a.do.acquire()
825                        a.do.notify()
826                        a.do.release()
827                    _groups.remove_item(False, a)
828                    _groups.dec_count(False)
829            _stop_results.wait(delay)
830    except Exception:
831        etype, eval, etb = sys.exc_info()
832        printerr("Error: exception " + repr(etype) + " at line " + str(etb.tb_lineno))
833    finally:
834        if not _stop_results.isSet():
835            # oh dear, I am about to die for unexplained reasons, stop the whole
836            # app otherwise the main thread hangs waiting on non-existant me,
837            # Note: sys.exit() only kills me
838            printerr("Error: unexpected results handler exit")
839            os._exit(1)
840       
841class Builder(object):
842    """ The Builder.
843
844        You may supply a "runner" class to change the way commands are run
845        or dependencies are determined. For an example, see:
846            http://code.google.com/p/fabricate/wiki/HowtoMakeYourOwnRunner
847
848        A "runner" must be a subclass of Runner and must have a __call__()
849        function that takes a command as a list of args and returns a tuple of
850        (deps, outputs), where deps is a list of rel-path'd dependency files
851        and outputs is a list of rel-path'd output files. The default runner
852        is SmartRunner, which automatically picks one of StraceRunner,
853        AtimesRunner, or AlwaysRunner depending on your system.
854        A "runner" class may have an __init__() function that takes the
855        builder as a parameter.
856    """
857
858    def __init__(self, runner=None, dirs=None, dirdepth=100, ignoreprefix='.',
859                 ignore=None, hasher=md5_hasher, depsname='.deps',
860                 quiet=False, debug=False, inputs_only=False, parallel_ok=False):
861        """ Initialise a Builder with the given options.
862
863        "runner" specifies how programs should be run.  It is either a
864            callable compatible with the Runner class, or a string selecting
865            one of the standard runners ("atimes_runner", "strace_runner",
866            "always_runner", or "smart_runner").
867        "dirs" is a list of paths to look for dependencies (or outputs) in
868            if using the strace or atimes runners.
869        "dirdepth" is the depth to recurse into the paths in "dirs" (default
870            essentially means infinitely). Set to 1 to just look at the
871            immediate paths in "dirs" and not recurse at all. This can be
872            useful to speed up the AtimesRunner if you're building in a large
873            tree and you don't care about all of the subdirectories.
874        "ignoreprefix" prevents recursion into directories that start with
875            prefix.  It defaults to '.' to ignore svn directories.
876            Change it to '_svn' if you use _svn hidden directories.
877        "ignore" is a regular expression.  Any dependency that contains a
878            regex match is ignored and not put into the dependency list.
879            Note that the regex may be VERBOSE (spaces are ignored and # line
880            comments allowed -- use \ prefix to insert these characters)
881        "hasher" is a function which returns a string which changes when
882            the contents of its filename argument changes, or None on error.
883            Default is md5_hasher, but can also be mtime_hasher.
884        "depsname" is the name of the JSON dependency file to load/save.
885        "quiet" set to True tells the builder to not display the commands being
886            executed (or other non-error output).
887        "debug" set to True makes the builder print debug output, such as why
888            particular commands are being executed
889        "inputs_only" set to True makes builder only re-build if input hashes
890            have changed (ignores output hashes); use with tools that touch
891            files that shouldn't cause a rebuild; e.g. g++ collect phase
892        "parallel_ok" set to True to indicate script is safe for parallel running
893        """
894        if dirs is None:
895            dirs = ['.']
896        self.dirs = dirs
897        self.dirdepth = dirdepth
898        self.ignoreprefix = ignoreprefix
899        if ignore is None:
900            ignore = r'$x^'         # something that can't match
901        self.ignore = re.compile(ignore, re.VERBOSE)
902        self.depsname = depsname
903        self.hasher = hasher
904        self.quiet = quiet
905        self.debug = debug
906        self.inputs_only = inputs_only
907        self.checking = False
908        self.hash_cache = {}
909
910        # instantiate runner after the above have been set in case it needs them
911        if runner is not None:
912            self.set_runner(runner)
913        elif hasattr(self, 'runner'):
914            # For backwards compatibility, if a derived class has
915            # defined a "runner" method then use it:
916            pass
917        else:
918            self.runner = SmartRunner(self)
919
920        is_strace = isinstance(self.runner.actual_runner(), StraceRunner)
921        self.parallel_ok = parallel_ok and is_strace and _pool is not None
922        if self.parallel_ok:
923            _results = threading.Thread(target=_results_handler,
924                                        args=[self])
925            _results.setDaemon(True)
926            _results.start()
927            StraceRunner.keep_temps = False # unsafe for parallel execution
928           
929    def echo(self, message):
930        """ Print message, but only if builder is not in quiet mode. """
931        if not self.quiet:
932            print message
933
934    def echo_command(self, command, echo=None):
935        """ Show a command being executed. Also passed run's "echo" arg
936            so you can override what's displayed.
937        """
938        if echo is not None:
939            command = str(echo)
940        self.echo(command)
941
942    def echo_delete(self, filename, error=None):
943        """ Show a file being deleted. For subclassing Builder and overriding
944            this function, the exception is passed in if an OSError occurs
945            while deleting a file. """
946        if error is None:
947            self.echo('deleting %s' % filename)
948
949    def echo_debug(self, message):
950        """ Print message, but only if builder is in debug mode. """
951        if self.debug:
952            print 'DEBUG:', message
953
954    def run(self, *args, **kwargs):
955        """ Run command given in args with kwargs per shell(), but only if its
956            dependencies or outputs have changed or don't exist. Return tuple
957            of (command_line, deps_list, outputs_list) so caller or subclass
958            can use them.
959
960            Parallel operation keyword args "after" specifies a group or
961            iterable of groups to wait for after they finish, "group" specifies
962            the group to add this command to.
963
964            Optional "echo" keyword arg is passed to echo_command() so you can
965            override its output if you want.
966        """
967        after = kwargs.pop('after', None)
968        group = kwargs.pop('group', True)
969        echo = kwargs.pop('echo', None)
970        arglist = args_to_list(args)
971        if not arglist:
972            raise TypeError('run() takes at least 1 argument (0 given)')
973        # we want a command line string for the .deps file key and for display
974        command = subprocess.list2cmdline(arglist)
975        if not self.cmdline_outofdate(command):
976            return command, None, None
977
978        # if just checking up-to-date-ness, set flag and do nothing more
979        self.outofdate_flag = True
980        if self.checking:
981            return command, None, None
982
983        # use runner to run command and collect dependencies
984        self.echo_command(command, echo=echo)
985        if self.parallel_ok:
986            arglist.insert(0, self.runner)
987            if after is not None:
988                if not hasattr(after, '__iter__'):
989                    after = [after]
990                _groups.add(False,
991                            _after(after, _todo(group, command, arglist,
992                                                kwargs)))
993            else:
994                async = _pool.apply_async(_call_strace, arglist, kwargs)
995                _groups.add(group, _running(async, command))
996            return None
997        else:
998            deps, outputs = self.runner(*arglist, **kwargs)
999            return self.done(command, deps, outputs)
1000       
1001    def done(self, command, deps, outputs):
1002        """ Store the results in the .deps file when they are available """
1003        if deps is not None or outputs is not None:
1004            deps_dict = {}
1005
1006            # hash the dependency inputs and outputs
1007            for dep in deps:
1008                if dep in self.hash_cache:
1009                    # already hashed so don't repeat hashing work
1010                    hashed = self.hash_cache[dep]
1011                else:
1012                    hashed = self.hasher(dep)
1013                if hashed is not None:
1014                    deps_dict[dep] = "input-" + hashed
1015                    # store hash in hash cache as it may be a new file
1016                    self.hash_cache[dep] = hashed
1017
1018            for output in outputs:
1019                hashed = self.hasher(output)
1020                if hashed is not None:
1021                    deps_dict[output] = "output-" + hashed
1022                    # update hash cache as this file should already be in
1023                    # there but has probably changed
1024                    self.hash_cache[output] = hashed
1025
1026            self.deps[command] = deps_dict
1027       
1028        return command, deps, outputs
1029
1030    def memoize(self, command, **kwargs):
1031        """ Run the given command, but only if its dependencies have changed --
1032            like run(), but returns the status code instead of raising an
1033            exception on error. If "command" is a string (as per memoize.py)
1034            it's split into args using shlex.split() in a POSIX/bash style,
1035            otherwise it's a list of args as per run().
1036
1037            This function is for compatiblity with memoize.py and is
1038            deprecated. Use run() instead. """
1039        if isinstance(command, basestring):
1040            args = shlex.split(command)
1041        else:
1042            args = args_to_list(command)
1043        try:
1044            self.run(args, **kwargs)
1045            return 0
1046        except ExecutionError, exc:
1047            message, data, status = exc
1048            return status
1049
1050    def outofdate(self, func):
1051        """ Return True if given build function is out of date. """
1052        self.checking = True
1053        self.outofdate_flag = False
1054        func()
1055        self.checking = False
1056        return self.outofdate_flag
1057
1058    def cmdline_outofdate(self, command):
1059        """ Return True if given command line is out of date. """
1060        if command in self.deps:
1061            # command has been run before, see if deps have changed
1062            for dep, oldhash in self.deps[command].items():
1063                assert oldhash.startswith('input-') or \
1064                       oldhash.startswith('output-'), \
1065                    "%s file corrupt, do a clean!" % self.depsname
1066                io_type, oldhash = oldhash.split('-', 1)
1067
1068                # make sure this dependency or output hasn't changed
1069                if dep in self.hash_cache:
1070                    # already hashed so don't repeat hashing work
1071                    newhash = self.hash_cache[dep]
1072                else:
1073                    # not in hash_cache so make sure this dependency or
1074                    # output hasn't changed
1075                    newhash = self.hasher(dep)
1076                    if newhash is not None:
1077                       # Add newhash to the hash cache
1078                       self.hash_cache[dep] = newhash
1079
1080                if newhash is None:
1081                    self.echo_debug("rebuilding %r, %s %s doesn't exist" %
1082                                    (command, io_type, dep))
1083                    break
1084                if newhash != oldhash and (not self.inputs_only or io_type == 'input'):
1085                    self.echo_debug("rebuilding %r, hash for %s %s (%s) != old hash (%s)" %
1086                                    (command, io_type, dep, newhash, oldhash))
1087                    break
1088            else:
1089                # all dependencies are unchanged
1090                return False
1091        else:
1092            self.echo_debug('rebuilding %r, no dependency data' % command)
1093        # command has never been run, or one of the dependencies didn't
1094        # exist or had changed
1095        return True
1096
1097    def autoclean(self):
1098        """ Automatically delete all outputs of this build as well as the .deps
1099            file. """
1100        # first build a list of all the outputs from the .deps file
1101        outputs = []
1102        for command, deps in self.deps.items():
1103            outputs.extend(dep for dep, hashed in deps.items()
1104                           if hashed.startswith('output-'))
1105        outputs.append(self.depsname)
1106        self._deps = None
1107        for output in outputs:
1108            try:
1109                os.remove(output)
1110            except OSError, e:
1111                self.echo_delete(output, e)
1112            else:
1113                self.echo_delete(output)
1114
1115    @property
1116    def deps(self):
1117        """ Lazy load .deps file so that instantiating a Builder is "safe". """
1118        if not hasattr(self, '_deps') or self._deps is None:
1119            self.read_deps()
1120            atexit.register(self.write_deps, depsname=os.path.abspath(self.depsname))
1121        return self._deps
1122
1123    def read_deps(self):
1124        """ Read dependency JSON file into deps object. """
1125        try:
1126            f = open(self.depsname)
1127            try:
1128                self._deps = json.load(f)
1129                # make sure the version is correct
1130                if self._deps.get('.deps_version', 0) != deps_version:
1131                    printerr('Bad %s dependency file version! Rebuilding.'
1132                             % self.depsname)
1133                    self._deps = {}
1134                self._deps.pop('.deps_version', None)
1135            finally:
1136                f.close()
1137        except IOError:
1138            self._deps = {}
1139
1140    def write_deps(self, depsname=None):
1141        """ Write out deps object into JSON dependency file. """
1142        if self._deps is None:
1143            return                      # we've cleaned so nothing to save
1144        self.deps['.deps_version'] = deps_version
1145        if depsname is None:
1146            depsname = self.depsname
1147        f = open(depsname, 'w')
1148        try:
1149            json.dump(self.deps, f, indent=4, sort_keys=True)
1150        finally:
1151            f.close()
1152            self._deps.pop('.deps_version', None)
1153
1154    _runner_map = {
1155        'atimes_runner' : AtimesRunner,
1156        'strace_runner' : StraceRunner,
1157        'always_runner' : AlwaysRunner,
1158        'smart_runner' : SmartRunner,
1159        }
1160
1161    def set_runner(self, runner):
1162        """Set the runner for this builder.  "runner" is either a Runner
1163           subclass (e.g. SmartRunner), or a string selecting one of the
1164           standard runners ("atimes_runner", "strace_runner",
1165           "always_runner", or "smart_runner")."""
1166        try:
1167            self.runner = self._runner_map[runner](self)
1168        except KeyError:
1169            if isinstance(runner, basestring):
1170                # For backwards compatibility, allow runner to be the
1171                # name of a method in a derived class:
1172                self.runner = getattr(self, runner)
1173            else:
1174                # pass builder to runner class to get a runner instance
1175                self.runner = runner(self)
1176
1177    def _is_relevant(self, fullname):
1178        """ Return True if file is in the dependency search directories. """
1179
1180        # need to abspath to compare rel paths with abs
1181        fullname = os.path.abspath(fullname)
1182        for path in self.dirs:
1183            path = os.path.abspath(path)
1184            if fullname.startswith(path):
1185                rest = fullname[len(path):]
1186                # files in dirs starting with ignoreprefix are not relevant
1187                if os.sep+self.ignoreprefix in os.sep+os.path.dirname(rest):
1188                    continue
1189                # files deeper than dirdepth are not relevant
1190                if rest.count(os.sep) > self.dirdepth:
1191                    continue
1192                return True
1193        return False
1194
1195# default Builder instance, used by helper run() and main() helper functions
1196default_builder = None
1197default_command = 'build'
1198
1199# save the setup arguments for use by main()
1200_setup_builder = None
1201_setup_default = None
1202_setup_kwargs = {}
1203
1204def setup(builder=None, default=None, **kwargs):
1205    """ NOTE: setup functionality is now in main(), setup() is kept for
1206        backward compatibility and should not be used in new scripts.
1207
1208        Setup the default Builder (or an instance of given builder if "builder"
1209        is not None) with the same keyword arguments as for Builder().
1210        "default" is the name of the default function to run when the build
1211        script is run with no command line arguments. """
1212    global _setup_builder, _setup_default, _setup_kwargs
1213    _setup_builder = builder
1214    _setup_default = default
1215    _setup_kwargs = kwargs
1216setup.__doc__ += '\n\n' + Builder.__init__.__doc__
1217
1218def _set_default_builder():
1219    """ Set default builder to Builder() instance if it's not yet set. """
1220    global default_builder
1221    if default_builder is None:
1222        default_builder = Builder()
1223
1224def run(*args, **kwargs):
1225    """ Run the given command, but only if its dependencies have changed. Uses
1226        the default Builder. Return value as per Builder.run(). If there is
1227        only one positional argument which is an iterable treat each element
1228        as a command, returns a list of returns from Builder.run().
1229    """
1230    _set_default_builder()
1231    if len(args) == 1 and hasattr(args[0], '__iter__'):
1232        return [default_builder.run(*a, **kwargs) for a in args[0]]
1233    return default_builder.run(*args, **kwargs)
1234
1235def after(*args):
1236    """ wait until after the specified command groups complete and return
1237        results, or None if not parallel """
1238    _set_default_builder()
1239    if getattr(default_builder, 'parallel_ok', False):
1240        if len(args) == 0:
1241            args = _groups.ids()  # wait on all
1242        cond = threading.Condition()
1243        cond.acquire()
1244        _groups.add(False, _after(args, cond))
1245        cond.wait()
1246        results = []
1247        ids = _groups.ids()
1248        for a in args:
1249            if a in ids and a is not False:
1250                r = []
1251                for i in _groups.item_list(a):
1252                    r.append(i.results)
1253                results.append((a,r))
1254        return results
1255    else:
1256        return None
1257   
1258def autoclean():
1259    """ Automatically delete all outputs of the default build. """
1260    _set_default_builder()
1261    default_builder.autoclean()
1262
1263def memoize(command, **kwargs):
1264    _set_default_builder()
1265    return default_builder.memoize(command, **kwargs)
1266
1267memoize.__doc__ = Builder.memoize.__doc__
1268
1269def outofdate(command):
1270    """ Return True if given command is out of date and needs to be run. """
1271    _set_default_builder()
1272    return default_builder.outofdate(command)
1273
1274# save options for use by main() if parse_options called earlier by user script
1275_parsed_options = None
1276
1277# default usage message
1278_usage = '[options] build script functions to run'
1279
1280def parse_options(usage=_usage, extra_options=None):
1281    """ Parse command line options and return (parser, options, args). """
1282    parser = optparse.OptionParser(usage='Usage: %prog '+usage,
1283                                   version='%prog '+__version__)
1284    parser.disable_interspersed_args()
1285    parser.add_option('-t', '--time', action='store_true',
1286                      help='use file modification times instead of MD5 sums')
1287    parser.add_option('-d', '--dir', action='append',
1288                      help='add DIR to list of relevant directories')
1289    parser.add_option('-c', '--clean', action='store_true',
1290                      help='autoclean build outputs before running')
1291    parser.add_option('-q', '--quiet', action='store_true',
1292                      help="don't echo commands, only print errors")
1293    parser.add_option('-D', '--debug', action='store_true',
1294                      help="show debug info (why commands are rebuilt)")
1295    parser.add_option('-k', '--keep', action='store_true',
1296                      help='keep temporary strace output files')
1297    parser.add_option('-j', '--jobs', type='int',
1298                      help='maximum number of parallel jobs')
1299    if extra_options:
1300        # add any user-specified options passed in via main()
1301        for option in extra_options:
1302            parser.add_option(option)
1303    options, args = parser.parse_args()
1304    _parsed_options = (parser, options, args)
1305    return _parsed_options
1306
1307def fabricate_version(min=None, max=None):
1308    """ If min is given, assert that the running fabricate is at least that
1309        version or exit with an error message. If max is given, assert that
1310        the running fabricate is at most that version. Return the current
1311        fabricate version string. This function was introduced in v1.14;
1312        for prior versions, the version string is available only as module
1313        local string fabricate.__version__ """
1314
1315    if min is not None and float(__version__) < min:
1316        sys.stderr.write(("fabricate is version %s.  This build script "
1317            "requires at least version %.2f") % (__version__, min))
1318        sys.exit()
1319    if max is not None and float(__version__) > max:
1320        sys.stderr.write(("fabricate is version %s.  This build script "
1321            "requires at most version %.2f") % (__version__, max))
1322        sys.exit()
1323    return __version__
1324
1325def main(globals_dict=None, build_dir=None, extra_options=None, builder=None,
1326         default=None, jobs=1, **kwargs):
1327    """ Run the default function or the function(s) named in the command line
1328        arguments. Call this at the end of your build script. If one of the
1329        functions returns nonzero, main will exit with the last nonzero return
1330        value as its status code.
1331
1332        "extra_options" is an optional list of options created with
1333        optparse.make_option(). The pseudo-global variable main.options
1334        is set to the parsed options list.
1335        "builder" is the class of builder to create, default (None) is the
1336        normal builder
1337        "default" is the default user script function to call, None = 'build'
1338        "kwargs" is any other keyword arguments to pass to the builder """
1339    global default_builder, default_command, _pool
1340
1341    kwargs.update(_setup_kwargs)
1342    if _parsed_options is not None:
1343        parser, options, actions = _parsed_options
1344    else:
1345        parser, options, actions = parse_options(extra_options=extra_options)
1346    kwargs['quiet'] = options.quiet
1347    kwargs['debug'] = options.debug
1348    if options.time:
1349        kwargs['hasher'] = mtime_hasher
1350    if options.dir:
1351        kwargs['dirs'] = options.dir
1352    if options.keep:
1353        StraceRunner.keep_temps = options.keep
1354    main.options = options
1355    if options.jobs is not None:
1356        jobs = options.jobs
1357    if default is not None:
1358        default_command = default
1359    if default_command is None:
1360        default_command = _setup_default
1361    if not actions:
1362        actions = [default_command]
1363
1364    original_path = os.getcwd()
1365    if None in [globals_dict, build_dir]:
1366        try:
1367            frame = sys._getframe(1)
1368        except:
1369            printerr("Your Python version doesn't support sys._getframe(1),")
1370            printerr("call main(globals(), build_dir) explicitly")
1371            sys.exit(1)
1372        if globals_dict is None:
1373            globals_dict = frame.f_globals
1374        if build_dir is None:
1375            build_file = frame.f_globals.get('__file__', None)
1376            if build_file:
1377                build_dir = os.path.dirname(build_file)
1378    if build_dir:
1379        if not options.quiet and os.path.abspath(build_dir) != original_path:
1380            print "Entering directory '%s'" % build_dir
1381        os.chdir(build_dir)
1382    if _pool is None and jobs > 1:
1383        _pool = multiprocessing.Pool(jobs)
1384
1385    use_builder = Builder
1386    if _setup_builder is not None:
1387        use_builder = _setup_builder
1388    if builder is not None:
1389        use_builder = builder
1390    default_builder = use_builder(**kwargs)
1391
1392    if options.clean:
1393        default_builder.autoclean()
1394
1395    status = 0
1396    try:
1397        for action in actions:
1398            if '(' not in action:
1399                action = action.strip() + '()'
1400            name = action.split('(')[0].split('.')[0]
1401            if name in globals_dict:
1402                this_status = eval(action, globals_dict)
1403                if this_status:
1404                    status = int(this_status)
1405            else:
1406                printerr('%r command not defined!' % action)
1407                sys.exit(1)
1408        after() # wait till the build commands are finished
1409    except ExecutionError, exc:
1410        message, data, status = exc
1411        printerr('fabricate: ' + message)
1412    finally:
1413        _stop_results.set() # stop the results gatherer so I don't hang
1414        if not options.quiet and os.path.abspath(build_dir) != original_path:
1415            print "Leaving directory '%s' back to '%s'" % (build_dir, original_path)
1416        os.chdir(original_path)
1417    sys.exit(status)
1418
1419if __name__ == '__main__':
1420    # if called as a script, emulate memoize.py -- run() command line
1421    parser, options, args = parse_options('[options] command line to run')
1422    status = 0
1423    if args:
1424        status = memoize(args)
1425    elif not options.clean:
1426        parser.print_help()
1427        status = 1
1428    # autoclean may have been used
1429    sys.exit(status)
Note: See TracBrowser for help on using the repository browser.