Move into the 21st century by fixing some pylint warnings.

This commit is contained in:
Avery Pennarun 2018-12-02 23:15:37 -05:00
commit e1327540fb
22 changed files with 797 additions and 388 deletions

318
.pylintrc Normal file
View file

@ -0,0 +1,318 @@
[MASTER]
# Pickle collected data for later comparisons.
persistent=no
# Use multiple processes to speed up Pylint.
jobs=1
[MESSAGES CONTROL]
# We probably want to fix these eventually, but in the meantime, these
# ones are relatively harmless.
disable=redefined-builtin,multiple-imports,missing-docstring,wrong-import-position,locally-disabled,invalid-name,unused-argument,fixme,global-statement,redefined-variable-type,using-constant-test,unused-variable,file-ignored,simplifiable-if-statement
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]". This option is deprecated
# and it will be removed in Pylint 2.0.
files-output=no
# Tells whether to display a full report or only the messages
reports=no
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=20
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=80
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=LF
[BASIC]
# Good variable names which should always be accepted, separated by a comma
good-names=i,j,k,ex,Run,_
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=yes
# List of decorators that produce properties, such as abc.abstractproperty. Add
# to this list to register other decorators that produce valid properties.
property-classes=abc.abstractproperty
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for function names
function-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{0,30}$
# Naming hint for variable names
variable-name-hint=[a-z_][a-z0-9_]{0,30}$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for attribute names
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for argument names
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct module names
module-rgx=(([a-z_][-a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for method names
method-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=10
[ELIF]
# Maximum number of nested blocks for function / method body
max-nested-blocks=100
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=
# List of class names for which member attributes should not be checked (useful
# for classes with dynamically set attributes). This supports the use of
# qualified names.
ignored-classes=optparse.Values,thread._local,_thread._local
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=
# List of decorators that produce context managers, such as
# contextlib.contextmanager. Add to this list to register other decorators that
# produce valid context managers.
contextmanager-decorators=contextlib.contextmanager
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=(_+[a-zA-Z0-9]*?$)|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
# List of qualified module names which can have objects that can redefine
# builtins.
redefining-builtins-modules=six.moves,future.builtins
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make,_exit
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
# Force import order to recognize a module as part of the standard
# compatibility libraries.
known-standard-library=
# Force import order to recognize a module as part of a third party library.
known-third-party=enchant
# Analyse import fallback blocks. This can be used to support both Python 2 and
# 3 compatible code, which means that the block might have code that exists
# only in one or another interpreter, leading to false positives when analysed.
analyse-fallback-blocks=no
[DESIGN]
# Maximum number of arguments for function / method
max-args=100
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=100
# Maximum number of return / yield for function / method body
max-returns=100
# Maximum number of branch for function / method body
max-branches=100
# Maximum number of statements in function / method body
max-statements=500
# Maximum number of parents for a class (see R0901).
max-parents=7
# Maximum number of attributes for a class (see R0902).
max-attributes=100
# Minimum number of public methods for a class (see R0903).
min-public-methods=0
# Maximum number of public methods for a class (see R0904).
max-public-methods=100
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

View file

@ -1,8 +1,8 @@
import sys, os, errno, random, stat, signal, time import sys, os, errno, stat, signal, time
import vars, jwack, state, paths import vars, jwack, state, paths
from helpers import unlink, close_on_exec, join from helpers import unlink, close_on_exec
import logs import logs
from logs import debug, debug2, err, warn, meta, check_tty from logs import debug2, err, warn, meta, check_tty
def _nice(t): def _nice(t):
@ -81,15 +81,15 @@ def start_stdin_log_reader(status, details, pretty, color,
argv.append('--color' if color >= 2 else '--no-color') argv.append('--color' if color >= 2 else '--no-color')
argv.append('-') argv.append('-')
os.execvp(argv[0], argv) os.execvp(argv[0], argv)
except Exception, e: except Exception, e: # pylint: disable=broad-except
sys.stderr.write('redo-log: exec: %s\n' % e) sys.stderr.write('redo-log: exec: %s\n' % e)
finally: finally:
os._exit(99) os._exit(99)
def await_log_reader(): def await_log_reader():
if not vars.LOG: return if not vars.LOG:
global log_reader_pid return
if log_reader_pid > 0: if log_reader_pid > 0:
# never actually close fd#1 or fd#2; insanity awaits. # never actually close fd#1 or fd#2; insanity awaits.
# replace it with something else instead. # replace it with something else instead.
@ -108,14 +108,14 @@ class ImmediateReturn(Exception):
self.rv = rv self.rv = rv
class BuildJob: class BuildJob(object):
def __init__(self, t, sf, lock, shouldbuildfunc, donefunc): def __init__(self, t, sf, lock, shouldbuildfunc, donefunc):
self.t = t # original target name, not relative to vars.BASE self.t = t # original target name, not relative to vars.BASE
self.sf = sf self.sf = sf
tmpbase = t tmpbase = t
while not os.path.isdir(os.path.dirname(tmpbase) or '.'): while not os.path.isdir(os.path.dirname(tmpbase) or '.'):
ofs = tmpbase.rfind('/') ofs = tmpbase.rfind('/')
assert(ofs >= 0) assert ofs >= 0
tmpbase = tmpbase[:ofs] + '__' + tmpbase[ofs+1:] tmpbase = tmpbase[:ofs] + '__' + tmpbase[ofs+1:]
self.tmpname1 = '%s.redo1.tmp' % tmpbase self.tmpname1 = '%s.redo1.tmp' % tmpbase
self.tmpname2 = '%s.redo2.tmp' % tmpbase self.tmpname2 = '%s.redo2.tmp' % tmpbase
@ -125,7 +125,7 @@ class BuildJob:
self.before_t = _try_stat(self.t) self.before_t = _try_stat(self.t)
def start(self): def start(self):
assert(self.lock.owned) assert self.lock.owned
try: try:
try: try:
is_target, dirty = self.shouldbuildfunc(self.t) is_target, dirty = self.shouldbuildfunc(self.t)
@ -140,29 +140,29 @@ class BuildJob:
except ImmediateReturn, e: except ImmediateReturn, e:
return self._after2(e.rv) return self._after2(e.rv)
if vars.NO_OOB or dirty == True: if vars.NO_OOB or dirty == True: # pylint: disable=singleton-comparison
self._start_do() self._start_do()
else: else:
self._start_unlocked(dirty) self._start_unlocked(dirty)
def _start_do(self): def _start_do(self):
assert(self.lock.owned) assert self.lock.owned
t = self.t t = self.t
sf = self.sf sf = self.sf
newstamp = sf.read_stamp() newstamp = sf.read_stamp()
if (sf.is_generated and if (sf.is_generated and
newstamp != state.STAMP_MISSING and newstamp != state.STAMP_MISSING and
(sf.is_override or state.detect_override(sf.stamp, newstamp))): (sf.is_override or state.detect_override(sf.stamp, newstamp))):
state.warn_override(_nice(t)) state.warn_override(_nice(t))
if not sf.is_override: if not sf.is_override:
warn('%s - old: %r\n' % (_nice(t), sf.stamp)) warn('%s - old: %r\n' % (_nice(t), sf.stamp))
warn('%s - new: %r\n' % (_nice(t), newstamp)) warn('%s - new: %r\n' % (_nice(t), newstamp))
sf.set_override() sf.set_override()
sf.set_checked() sf.set_checked()
sf.save() sf.save()
return self._after2(0) return self._after2(0)
if (os.path.exists(t) and not os.path.isdir(t + '/.') if (os.path.exists(t) and not os.path.isdir(t + '/.')
and not sf.is_generated): and not sf.is_generated):
# an existing source file that was not generated by us. # an existing source file that was not generated by us.
# This step is mentioned by djb in his notes. # This step is mentioned by djb in his notes.
# For example, a rule called default.c.do could be used to try # For example, a rule called default.c.do could be used to try
@ -173,7 +173,7 @@ class BuildJob:
sf.save() sf.save()
return self._after2(0) return self._after2(0)
sf.zap_deps1() sf.zap_deps1()
(dodir, dofile, basedir, basename, ext) = paths.find_do_file(sf) (dodir, dofile, _, basename, ext) = paths.find_do_file(sf)
if not dofile: if not dofile:
if os.path.exists(t): if os.path.exists(t):
sf.set_static() sf.set_static()
@ -186,6 +186,7 @@ class BuildJob:
unlink(self.tmpname2) unlink(self.tmpname2)
ffd = os.open(self.tmpname1, os.O_CREAT|os.O_RDWR|os.O_EXCL, 0666) ffd = os.open(self.tmpname1, os.O_CREAT|os.O_RDWR|os.O_EXCL, 0666)
close_on_exec(ffd, True) close_on_exec(ffd, True)
# pylint: disable=attribute-defined-outside-init
self.f = os.fdopen(ffd, 'w+') self.f = os.fdopen(ffd, 'w+')
# this will run in the dofile's directory, so use only basenames here # this will run in the dofile's directory, so use only basenames here
arg1 = basename + ext # target name (including extension) arg1 = basename + ext # target name (including extension)
@ -196,15 +197,21 @@ class BuildJob:
arg2, arg2,
# temp output file name # temp output file name
state.relpath(os.path.abspath(self.tmpname2), dodir), state.relpath(os.path.abspath(self.tmpname2), dodir),
] ]
if vars.VERBOSE: argv[1] += 'v' if vars.VERBOSE:
if vars.XTRACE: argv[1] += 'x' argv[1] += 'v'
if vars.XTRACE:
argv[1] += 'x'
firstline = open(os.path.join(dodir, dofile)).readline().strip() firstline = open(os.path.join(dodir, dofile)).readline().strip()
if firstline.startswith('#!/'): if firstline.startswith('#!/'):
argv[0:2] = firstline[2:].split(' ') argv[0:2] = firstline[2:].split(' ')
# make sure to create the logfile *before* writing the meta() about it. # make sure to create the logfile *before* writing the meta() about it.
# that way redo-log won't trace into an obsolete logfile. # that way redo-log won't trace into an obsolete logfile.
if vars.LOG: open(state.logname(self.sf.id), 'w') if vars.LOG:
open(state.logname(self.sf.id), 'w')
# FIXME: put these variables somewhere else, instead of on-the-fly
# extending this class!
# pylint: disable=attribute-defined-outside-init
self.dodir = dodir self.dodir = dodir
self.basename = basename self.basename = basename
self.ext = ext self.ext = ext
@ -236,9 +243,10 @@ class BuildJob:
state.commit() state.commit()
def run(): def run():
os.environ['REDO_DEPTH'] = vars.DEPTH + ' ' os.environ['REDO_DEPTH'] = vars.DEPTH + ' '
signal.signal(signal.SIGPIPE, signal.SIG_DFL) # python ignores SIGPIPE # python ignores SIGPIPE
signal.signal(signal.SIGPIPE, signal.SIG_DFL)
os.execvp(argv[0], argv) os.execvp(argv[0], argv)
assert(0) assert 0
# returns only if there's an exception # returns only if there's an exception
def after(t, rv): def after(t, rv):
return self._after2(rv) return self._after2(rv)
@ -250,7 +258,7 @@ class BuildJob:
# redo-ifchange, and it might have done it from a different directory # redo-ifchange, and it might have done it from a different directory
# than we started it in. So os.getcwd() might be != REDO_PWD right # than we started it in. So os.getcwd() might be != REDO_PWD right
# now. # now.
assert(state.is_flushed()) assert state.is_flushed()
dn = self.dodir dn = self.dodir
newp = os.path.realpath(dn) newp = os.path.realpath(dn)
os.environ['REDO_PWD'] = state.relpath(newp, vars.STARTDIR) os.environ['REDO_PWD'] = state.relpath(newp, vars.STARTDIR)
@ -288,7 +296,7 @@ class BuildJob:
# FIXME: it would be nice to log the exit code to logf. # FIXME: it would be nice to log the exit code to logf.
# But that would have to happen in the parent process, which doesn't # But that would have to happen in the parent process, which doesn't
# have logf open. # have logf open.
assert(0) assert 0
# returns only if there's an exception # returns only if there's an exception
def _after(self, t, rv): def _after(self, t, rv):
@ -306,8 +314,8 @@ class BuildJob:
st1 = os.fstat(f.fileno()) st1 = os.fstat(f.fileno())
st2 = _try_stat(self.tmpname2) st2 = _try_stat(self.tmpname2)
if (after_t and if (after_t and
(not before_t or before_t.st_mtime != after_t.st_mtime) and (not before_t or before_t.st_mtime != after_t.st_mtime) and
not stat.S_ISDIR(after_t.st_mode)): not stat.S_ISDIR(after_t.st_mode)):
err('%s modified %s directly!\n' % (self.argv[2], t)) err('%s modified %s directly!\n' % (self.argv[2], t))
err('...you should update $3 (a temp file) or stdout, not $1.\n') err('...you should update $3 (a temp file) or stdout, not $1.\n')
rv = 206 rv = 206
@ -315,7 +323,7 @@ class BuildJob:
err('%s wrote to stdout *and* created $3.\n' % self.argv[2]) err('%s wrote to stdout *and* created $3.\n' % self.argv[2])
err('...you should write status messages to stderr, not stdout.\n') err('...you should write status messages to stderr, not stdout.\n')
rv = 207 rv = 207
if rv==0: if rv == 0:
# FIXME: race condition here between updating stamp/is_generated # FIXME: race condition here between updating stamp/is_generated
# and actually renaming the files into place. There needs to # and actually renaming the files into place. There needs to
# be some kind of two-stage commit, I guess. # be some kind of two-stage commit, I guess.
@ -372,7 +380,7 @@ class BuildJob:
def _after2(self, rv): def _after2(self, rv):
try: try:
self.donefunc(self.t, rv) self.donefunc(self.t, rv)
assert(self.lock.owned) assert self.lock.owned
finally: finally:
self.lock.unlock() self.lock.unlock()
@ -398,7 +406,8 @@ def main(targets, shouldbuildfunc):
selflock = myfile = me = None selflock = myfile = me = None
def cheat(): def cheat():
if not selflock: return 0 if not selflock:
return 0
selflock.trylock() selflock.trylock()
if not selflock.owned: if not selflock.owned:
# redo-log already owns it: let's cheat. # redo-log already owns it: let's cheat.
@ -419,7 +428,7 @@ def main(targets, shouldbuildfunc):
err('cannot build the empty target ("").\n') err('cannot build the empty target ("").\n')
retcode[0] = 204 retcode[0] = 204
break break
assert(state.is_flushed()) assert state.is_flushed()
if t in seen: if t in seen:
continue continue
seen[t] = 1 seen[t] = 1
@ -440,7 +449,7 @@ def main(targets, shouldbuildfunc):
lock.trylock() lock.trylock()
if not lock.owned: if not lock.owned:
meta('locked', state.target_relpath(t)) meta('locked', state.target_relpath(t))
locked.append((f.id,t,f.name)) locked.append((f.id, t, f.name))
else: else:
# We had to create f before we had a lock, because we need f.id # We had to create f before we had a lock, because we need f.id
# to make the lock. But someone may have updated the state # to make the lock. But someone may have updated the state
@ -450,7 +459,7 @@ def main(targets, shouldbuildfunc):
f.refresh() f.refresh()
BuildJob(t, f, lock, shouldbuildfunc, done).start() BuildJob(t, f, lock, shouldbuildfunc, done).start()
state.commit() state.commit()
assert(state.is_flushed()) assert state.is_flushed()
lock = None lock = None
del lock del lock
@ -465,7 +474,7 @@ def main(targets, shouldbuildfunc):
while locked or jwack.running(): while locked or jwack.running():
state.commit() state.commit()
jwack.wait_all() jwack.wait_all()
assert jwack._mytokens == 0 assert jwack._mytokens == 0 # pylint: disable=protected-access
jwack.ensure_token_or_cheat('self', cheat) jwack.ensure_token_or_cheat('self', cheat)
# at this point, we don't have any children holding any tokens, so # at this point, we don't have any children holding any tokens, so
# it's okay to block below. # it's okay to block below.
@ -476,7 +485,7 @@ def main(targets, shouldbuildfunc):
err('.redo directory disappeared; cannot continue.\n') err('.redo directory disappeared; cannot continue.\n')
retcode[0] = 205 retcode[0] = 205
break break
fid,t,fname = locked.pop(0) fid, t, _ = locked.pop(0)
lock = state.Lock(fid) lock = state.Lock(fid)
backoff = 0.01 backoff = 0.01
lock.trylock() lock.trylock()
@ -505,7 +514,7 @@ def main(targets, shouldbuildfunc):
lock.unlock() lock.unlock()
jwack.ensure_token_or_cheat(t, cheat) jwack.ensure_token_or_cheat(t, cheat)
lock.trylock() lock.trylock()
assert(lock.owned) assert lock.owned
meta('unlocked', state.target_relpath(t)) meta('unlocked', state.target_relpath(t))
if state.File(name=t).is_failed(): if state.File(name=t).is_failed():
err('%s: failed in another thread\n' % _nice(t)) err('%s: failed in another thread\n' % _nice(t))

26
deps.py
View file

@ -1,5 +1,5 @@
import sys, os import os
import vars, state, builder import vars, state
from logs import debug from logs import debug
CLEAN = 0 CLEAN = 0
@ -17,16 +17,18 @@ def isdirty(f, depth, max_changed,
already_checked = list(already_checked) + [f.id] already_checked = list(already_checked) + [f.id]
if vars.DEBUG >= 1: if vars.DEBUG >= 1:
debug('%s?%s %r,%r\n' % (depth, f.nicename(), f.is_generated, f.is_override)) debug('%s?%s %r,%r\n'
% (depth, f.nicename(), f.is_generated, f.is_override))
if f.failed_runid: if f.failed_runid:
debug('%s-- DIRTY (failed last time)\n' % depth) debug('%s-- DIRTY (failed last time)\n' % depth)
return DIRTY return DIRTY
if f.changed_runid == None: if f.changed_runid is None:
debug('%s-- DIRTY (never built)\n' % depth) debug('%s-- DIRTY (never built)\n' % depth)
return DIRTY return DIRTY
if f.changed_runid > max_changed: if f.changed_runid > max_changed:
debug('%s-- DIRTY (built %d > %d; %d)\n' % (depth, f.changed_runid, max_changed, vars.RUNID)) debug('%s-- DIRTY (built %d > %d; %d)\n'
% (depth, f.changed_runid, max_changed, vars.RUNID))
return DIRTY # has been built more recently than parent return DIRTY # has been built more recently than parent
if is_checked(f): if is_checked(f):
if vars.DEBUG >= 1: if vars.DEBUG >= 1:
@ -60,16 +62,16 @@ def isdirty(f, depth, max_changed,
return DIRTY return DIRTY
must_build = [] must_build = []
for mode,f2 in f.deps(): for mode, f2 in f.deps():
dirty = CLEAN dirty = CLEAN
if mode == 'c': if mode == 'c':
if os.path.exists(os.path.join(vars.BASE, f2.name)): if os.path.exists(os.path.join(vars.BASE, f2.name)):
debug('%s-- DIRTY (created)\n' % depth) debug('%s-- DIRTY (created)\n' % depth)
dirty = DIRTY dirty = DIRTY
elif mode == 'm': elif mode == 'm':
sub = isdirty(f2, depth = depth + ' ', sub = isdirty(f2, depth=depth + ' ',
max_changed = max(f.changed_runid, max_changed=max(f.changed_runid,
f.checked_runid), f.checked_runid),
already_checked=already_checked, already_checked=already_checked,
is_checked=is_checked, is_checked=is_checked,
set_checked=set_checked, set_checked=set_checked,
@ -78,14 +80,14 @@ def isdirty(f, depth, max_changed,
debug('%s-- DIRTY (sub)\n' % depth) debug('%s-- DIRTY (sub)\n' % depth)
dirty = sub dirty = sub
else: else:
assert(mode in ('c','m')) assert mode in ('c', 'm')
if not f.csum: if not f.csum:
# f is a "normal" target: dirty f2 means f is instantly dirty # f is a "normal" target: dirty f2 means f is instantly dirty
if dirty == DIRTY: if dirty == DIRTY:
# f2 is definitely dirty, so f definitely needs to # f2 is definitely dirty, so f definitely needs to
# redo. # redo.
return DIRTY return DIRTY
elif isinstance(dirty,list): elif isinstance(dirty, list):
# our child f2 might be dirty, but it's not sure yet. It's # our child f2 might be dirty, but it's not sure yet. It's
# given us a list of targets we have to redo in order to # given us a list of targets we have to redo in order to
# be sure. # be sure.
@ -99,7 +101,7 @@ def isdirty(f, depth, max_changed,
# redo. However, after that, f might turn out to be # redo. However, after that, f might turn out to be
# unchanged. # unchanged.
return [f] return [f]
elif isinstance(dirty,list): elif isinstance(dirty, list):
# our child f2 might be dirty, but it's not sure yet. It's # our child f2 might be dirty, but it's not sure yet. It's
# given us a list of targets we have to redo in order to # given us a list of targets we have to redo in order to
# be sure. # be sure.

View file

@ -1,5 +1,4 @@
import os, errno, fcntl import os, errno, fcntl
from atoi import atoi
def join(between, l): def join(between, l):
@ -25,5 +24,3 @@ def close_on_exec(fd, yes):
if yes: if yes:
fl |= fcntl.FD_CLOEXEC fl |= fcntl.FD_CLOEXEC
fcntl.fcntl(fd, fcntl.F_SETFD, fl) fcntl.fcntl(fd, fcntl.F_SETFD, fl)

View file

@ -74,7 +74,8 @@
# simpler :) # simpler :)
# #
import sys, os, errno, select, fcntl, signal import sys, os, errno, select, fcntl, signal
from helpers import atoi, close_on_exec from atoi import atoi
from helpers import close_on_exec
import state, vars import state, vars
_toplevel = 0 _toplevel = 0
@ -87,7 +88,7 @@ _waitfds = {}
def _debug(s): def _debug(s):
if 0: if 0:
sys.stderr.write('jwack#%d: %s' % (os.getpid(),s)) sys.stderr.write('jwack#%d: %s' % (os.getpid(), s))
def _create_tokens(n): def _create_tokens(n):
@ -132,7 +133,6 @@ def _release_except_mine():
def release_mine(): def release_mine():
global _mytokens
assert _mytokens >= 1 assert _mytokens >= 1
_debug('%d,%d -> release_mine()\n' % (_mytokens, _cheats)) _debug('%d,%d -> release_mine()\n' % (_mytokens, _cheats))
_release(1) _release(1)
@ -146,9 +146,9 @@ def _timeout(sig, frame):
# This makes it easier to differentiate different kinds of pipes when using # This makes it easier to differentiate different kinds of pipes when using
# strace. # strace.
def _make_pipe(startfd): def _make_pipe(startfd):
(a,b) = os.pipe() (a, b) = os.pipe()
fds = (fcntl.fcntl(a, fcntl.F_DUPFD, startfd), fds = (fcntl.fcntl(a, fcntl.F_DUPFD, startfd),
fcntl.fcntl(b, fcntl.F_DUPFD, startfd+1)) fcntl.fcntl(b, fcntl.F_DUPFD, startfd + 1))
os.close(a) os.close(a)
os.close(b) os.close(b)
return fds return fds
@ -162,7 +162,7 @@ def _try_read(fd, n):
# socket: http://cr.yp.to/unix/nonblock.html # socket: http://cr.yp.to/unix/nonblock.html
# We can't just make the socket non-blocking, because we want to be # We can't just make the socket non-blocking, because we want to be
# compatible with GNU Make, and they can't handle it. # compatible with GNU Make, and they can't handle it.
r,w,x = select.select([fd], [], [], 0) r, w, x = select.select([fd], [], [], 0)
if not r: if not r:
return None # try again return None # try again
# ok, the socket is readable - but some other process might get there # ok, the socket is readable - but some other process might get there
@ -206,12 +206,12 @@ def setup(maxjobs):
FIND = FIND1 FIND = FIND1
ofs = flags.find(FIND1) ofs = flags.find(FIND1)
if ofs < 0: if ofs < 0:
FIND = FIND2 FIND = FIND2
ofs = flags.find(FIND2) ofs = flags.find(FIND2)
if ofs >= 0: if ofs >= 0:
s = flags[ofs+len(FIND):] s = flags[ofs+len(FIND):]
(arg,junk) = s.split(' ', 1) (arg, junk) = s.split(' ', 1)
(a,b) = arg.split(',', 1) (a, b) = arg.split(',', 1)
a = atoi(a) a = atoi(a)
b = atoi(b) b = atoi(b)
if a <= 0 or b <= 0: if a <= 0 or b <= 0:
@ -221,19 +221,20 @@ def setup(maxjobs):
fcntl.fcntl(b, fcntl.F_GETFL) fcntl.fcntl(b, fcntl.F_GETFL)
except IOError, e: except IOError, e:
if e.errno == errno.EBADF: if e.errno == errno.EBADF:
raise ValueError('broken --jobserver-auth from make; prefix your Makefile rule with a "+"') raise ValueError('broken --jobserver-auth from make; ' +
'prefix your Makefile rule with a "+"')
else: else:
raise raise
_tokenfds = (a,b) _tokenfds = (a, b)
cheats = os.getenv('REDO_CHEATFDS', '') cheats = os.getenv('REDO_CHEATFDS', '')
if cheats: if cheats:
(a,b) = cheats.split(',', 1) (a, b) = cheats.split(',', 1)
a = atoi(a) a = atoi(a)
b = atoi(b) b = atoi(b)
if a <= 0 or b <= 0: if a <= 0 or b <= 0:
raise ValueError('invalid REDO_CHEATFDS: %r' % cheats) raise ValueError('invalid REDO_CHEATFDS: %r' % cheats)
_cheatfds = (a,b) _cheatfds = (a, b)
if not _tokenfds: if not _tokenfds:
# need to start a new server # need to start a new server
@ -256,7 +257,7 @@ def _wait(want_token, max_delay):
rfds.append(_tokenfds[0]) rfds.append(_tokenfds[0])
assert rfds assert rfds
assert state.is_flushed() assert state.is_flushed()
r,w,x = select.select(rfds, [], [], max_delay) r, w, x = select.select(rfds, [], [], max_delay)
_debug('_tokenfds=%r; wfds=%r; readable: %r\n' % (_tokenfds, _waitfds, r)) _debug('_tokenfds=%r; wfds=%r; readable: %r\n' % (_tokenfds, _waitfds, r))
for fd in r: for fd in r:
if fd == _tokenfds[0]: if fd == _tokenfds[0]:
@ -270,7 +271,7 @@ def _wait(want_token, max_delay):
# now need to recreate it. # now need to recreate it.
b = _try_read(_cheatfds[0], 1) b = _try_read(_cheatfds[0], 1)
_debug('GOT cheatfd\n') _debug('GOT cheatfd\n')
if b == None: if b is None:
_create_tokens(1) _create_tokens(1)
if has_token(): if has_token():
_release_except_mine() _release_except_mine()
@ -383,7 +384,7 @@ def force_return_tokens():
n = len(_waitfds) n = len(_waitfds)
_debug('%d,%d -> %d jobs left in force_return_tokens\n' _debug('%d,%d -> %d jobs left in force_return_tokens\n'
% (_mytokens, _cheats, n)) % (_mytokens, _cheats, n))
for k in _waitfds.keys(): for k in list(_waitfds):
del _waitfds[k] del _waitfds[k]
_create_tokens(n) _create_tokens(n)
if has_token(): if has_token():
@ -405,7 +406,7 @@ def _pre_job(r, w, pfn):
pfn() pfn()
class Job: class Job(object):
def __init__(self, name, pid, donefunc): def __init__(self, name, pid, donefunc):
self.name = name self.name = name
self.pid = pid self.pid = pid
@ -418,13 +419,12 @@ class Job:
def start_job(reason, jobfunc, donefunc): def start_job(reason, jobfunc, donefunc):
assert state.is_flushed() assert state.is_flushed()
global _mytokens
assert _mytokens <= 1 assert _mytokens <= 1
assert _mytokens == 1 assert _mytokens == 1
# Subprocesses always start with 1 token, so we have to destroy ours # Subprocesses always start with 1 token, so we have to destroy ours
# in order for the universe to stay in balance. # in order for the universe to stay in balance.
_destroy_tokens(1) _destroy_tokens(1)
r,w = _make_pipe(50) r, w = _make_pipe(50)
pid = os.fork() pid = os.fork()
if pid == 0: if pid == 0:
# child # child
@ -433,8 +433,8 @@ def start_job(reason, jobfunc, donefunc):
try: try:
try: try:
rv = jobfunc() or 0 rv = jobfunc() or 0
_debug('jobfunc completed (%r, %r)\n' % (jobfunc,rv)) _debug('jobfunc completed (%r, %r)\n' % (jobfunc, rv))
except Exception: except Exception: # pylint: disable=broad-except
import traceback import traceback
traceback.print_exc() traceback.print_exc()
finally: finally:

27
logs.py
View file

@ -1,12 +1,15 @@
import os, re, sys, time import os, re, sys, time
import vars import vars
RED = GREEN = YELLOW = BOLD = PLAIN = None
def check_tty(file, color): def check_tty(file, color):
global RED, GREEN, YELLOW, BOLD, PLAIN global RED, GREEN, YELLOW, BOLD, PLAIN
color_ok = file.isatty() and (os.environ.get('TERM') or 'dumb') != 'dumb' color_ok = file.isatty() and (os.environ.get('TERM') or 'dumb') != 'dumb'
if (color and color_ok) or color >= 2: if (color and color_ok) or color >= 2:
# ...use ANSI formatting codes. # ...use ANSI formatting codes.
# pylint: disable=bad-whitespace
RED = "\x1b[31m" RED = "\x1b[31m"
GREEN = "\x1b[32m" GREEN = "\x1b[32m"
YELLOW = "\x1b[33m" YELLOW = "\x1b[33m"
@ -25,7 +28,7 @@ class RawLog(object):
self.file = file self.file = file
def write(self, s): def write(self, s):
assert('\n' not in s) assert '\n' not in s
sys.stdout.flush() sys.stdout.flush()
sys.stderr.flush() sys.stderr.flush()
self.file.write(s + '\n') self.file.write(s + '\n')
@ -45,11 +48,12 @@ class PrettyLog(object):
redo = '%-6d redo ' % pid redo = '%-6d redo ' % pid
else: else:
redo = 'redo ' redo = 'redo '
self.file.write(''.join([color, redo, vars.DEPTH, self.file.write(
BOLD if color else '', s, PLAIN, '\n'])) ''.join([color, redo, vars.DEPTH,
BOLD if color else '', s, PLAIN, '\n']))
def write(self, s): def write(self, s):
assert('\n' not in s) assert '\n' not in s
sys.stdout.flush() sys.stdout.flush()
sys.stderr.flush() sys.stderr.flush()
g = REDO_RE.match(s) g = REDO_RE.match(s)
@ -58,7 +62,7 @@ class PrettyLog(object):
self.file.write(s[:-len(all)]) self.file.write(s[:-len(all)])
words = g.group(1).split(':') words = g.group(1).split(':')
text = g.group(2) text = g.group(2)
kind, pid, when = words[0:3] kind, pid, _ = words[0:3]
pid = int(pid) pid = int(pid)
if kind == 'unchanged': if kind == 'unchanged':
self._pretty(pid, '', '%s (unchanged)' % text) self._pretty(pid, '', '%s (unchanged)' % text)
@ -85,10 +89,10 @@ class PrettyLog(object):
self._pretty(pid, GREEN, '%s (...unlocked!)' % text) self._pretty(pid, GREEN, '%s (...unlocked!)' % text)
elif kind == 'error': elif kind == 'error':
self.file.write(''.join([RED, 'redo: ', self.file.write(''.join([RED, 'redo: ',
BOLD, text, PLAIN, '\n'])) BOLD, text, PLAIN, '\n']))
elif kind == 'warning': elif kind == 'warning':
self.file.write(''.join([YELLOW, 'redo: ', self.file.write(''.join([YELLOW, 'redo: ',
BOLD, text, PLAIN, '\n'])) BOLD, text, PLAIN, '\n']))
elif kind == 'debug': elif kind == 'debug':
self._pretty(pid, '', text) self._pretty(pid, '', text)
else: else:
@ -118,10 +122,11 @@ def write(s):
def meta(kind, s, pid=None): def meta(kind, s, pid=None):
assert(':' not in kind) assert ':' not in kind
assert('@' not in kind) assert '@' not in kind
assert('\n' not in s) assert '\n' not in s
if pid == None: pid = os.getpid() if pid is None:
pid = os.getpid()
write('@@REDO:%s:%d:%.4f@@ %s' write('@@REDO:%s:%d:%.4f@@ %s'
% (kind, pid, time.time(), s)) % (kind, pid, time.time(), s))

View file

@ -1,3 +1,5 @@
# pylint: skip-file
#
# Copyright 2011 Avery Pennarun and options.py contributors. # Copyright 2011 Avery Pennarun and options.py contributors.
# All rights reserved. # All rights reserved.
# #

View file

@ -1,19 +1,20 @@
import os import os
import vars import vars
from logs import err, debug2 from logs import debug2
def _default_do_files(filename): def _default_do_files(filename):
l = filename.split('.') l = filename.split('.')
for i in range(1,len(l)+1): for i in range(1, len(l)+1):
basename = '.'.join(l[:i]) basename = '.'.join(l[:i])
ext = '.'.join(l[i:]) ext = '.'.join(l[i:])
if ext: ext = '.' + ext if ext:
ext = '.' + ext
yield ("default%s.do" % ext), basename, ext yield ("default%s.do" % ext), basename, ext
def possible_do_files(t): def possible_do_files(t):
dirname,filename = os.path.split(t) dirname, filename = os.path.split(t)
yield (os.path.join(vars.BASE, dirname), "%s.do" % filename, yield (os.path.join(vars.BASE, dirname), "%s.do" % filename,
'', filename, '') '', filename, '')
@ -24,25 +25,25 @@ def possible_do_files(t):
# into theirs as a subdir. When they do, my rules should still be used # into theirs as a subdir. When they do, my rules should still be used
# for building my project in *all* cases. # for building my project in *all* cases.
t = os.path.normpath(os.path.join(vars.BASE, t)) t = os.path.normpath(os.path.join(vars.BASE, t))
dirname,filename = os.path.split(t) dirname, filename = os.path.split(t)
dirbits = dirname.split('/') dirbits = dirname.split('/')
# since t is an absolute path, dirbits[0] is always '', so we don't # since t is an absolute path, dirbits[0] is always '', so we don't
# need to count all the way down to i=0. # need to count all the way down to i=0.
for i in range(len(dirbits), 0, -1): for i in range(len(dirbits), 0, -1):
basedir = '/'.join(dirbits[:i]) basedir = '/'.join(dirbits[:i])
subdir = '/'.join(dirbits[i:]) subdir = '/'.join(dirbits[i:])
for dofile,basename,ext in _default_do_files(filename): for dofile, basename, ext in _default_do_files(filename):
yield (basedir, dofile, yield (basedir, dofile,
subdir, os.path.join(subdir, basename), ext) subdir, os.path.join(subdir, basename), ext)
def find_do_file(f): def find_do_file(f):
for dodir,dofile,basedir,basename,ext in possible_do_files(f.name): for dodir, dofile, basedir, basename, ext in possible_do_files(f.name):
dopath = os.path.join(dodir, dofile) dopath = os.path.join(dodir, dofile)
debug2('%s: %s:%s ?\n' % (f.name, dodir, dofile)) debug2('%s: %s:%s ?\n' % (f.name, dodir, dofile))
if os.path.exists(dopath): if os.path.exists(dopath):
f.add_dep('m', dopath) f.add_dep('m', dopath)
return dodir,dofile,basedir,basename,ext return dodir, dofile, basedir, basename, ext
else: else:
f.add_dep('c', dopath) f.add_dep('c', dopath)
return None,None,None,None,None return None, None, None, None, None

View file

@ -1,18 +1,22 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
import sys, os import sys, os
import vars, state import vars, state
from logs import err
try: def main():
me = os.path.join(vars.STARTDIR, try:
os.path.join(vars.PWD, vars.TARGET)) me = os.path.join(vars.STARTDIR,
f = state.File(name=me) os.path.join(vars.PWD, vars.TARGET))
f.add_dep('m', state.ALWAYS) f = state.File(name=me)
always = state.File(name=state.ALWAYS) f.add_dep('m', state.ALWAYS)
always.stamp = state.STAMP_MISSING always = state.File(name=state.ALWAYS)
always.set_changed() always.stamp = state.STAMP_MISSING
always.save() always.set_changed()
state.commit() always.save()
except KeyboardInterrupt: state.commit()
sys.exit(200) except KeyboardInterrupt:
sys.exit(200)
if __name__ == '__main__':
main()

View file

@ -5,56 +5,62 @@ import vars_init
vars_init.init(sys.argv[1:]) vars_init.init(sys.argv[1:])
import vars, state, builder, jwack, deps import vars, state, builder, jwack, deps
from helpers import unlink from logs import debug2, err
from logs import debug, debug2, err
def should_build(t): def should_build(t):
f = state.File(name=t) f = state.File(name=t)
if f.is_failed(): if f.is_failed():
raise builder.ImmediateReturn(32) raise builder.ImmediateReturn(32)
dirty = deps.isdirty(f, depth = '', max_changed = vars.RUNID, dirty = deps.isdirty(f, depth='', max_changed=vars.RUNID,
already_checked=[]) already_checked=[])
return f.is_generated, dirty==[f] and deps.DIRTY or dirty return f.is_generated, dirty == [f] and deps.DIRTY or dirty
rv = 202 def main():
try: rv = 202
if vars_init.is_toplevel and vars.LOG:
builder.close_stdin()
builder.start_stdin_log_reader(status=True, details=True,
pretty=True, color=True, debug_locks=False, debug_pids=False)
if vars.TARGET and not vars.UNLOCKED:
me = os.path.join(vars.STARTDIR,
os.path.join(vars.PWD, vars.TARGET))
f = state.File(name=me)
debug2('TARGET: %r %r %r\n' % (vars.STARTDIR, vars.PWD, vars.TARGET))
else:
f = me = None
debug2('redo-ifchange: not adding depends.\n')
jwack.setup(1)
try: try:
targets = sys.argv[1:] if vars_init.is_toplevel and vars.LOG:
if f: builder.close_stdin()
for t in targets: builder.start_stdin_log_reader(
f.add_dep('m', t) status=True, details=True,
f.save() pretty=True, color=True, debug_locks=False, debug_pids=False)
state.commit() if vars.TARGET and not vars.UNLOCKED:
rv = builder.main(targets, should_build) me = os.path.join(vars.STARTDIR,
finally: os.path.join(vars.PWD, vars.TARGET))
f = state.File(name=me)
debug2('TARGET: %r %r %r\n'
% (vars.STARTDIR, vars.PWD, vars.TARGET))
else:
f = me = None
debug2('redo-ifchange: not adding depends.\n')
jwack.setup(1)
try: try:
state.rollback() targets = sys.argv[1:]
if f:
for t in targets:
f.add_dep('m', t)
f.save()
state.commit()
rv = builder.main(targets, should_build)
finally: finally:
try: try:
jwack.force_return_tokens() state.rollback()
except Exception, e: finally:
traceback.print_exc(100, sys.stderr) try:
err('unexpected error: %r\n' % e) jwack.force_return_tokens()
rv = 1 except Exception, e: # pylint: disable=broad-except
except KeyboardInterrupt: traceback.print_exc(100, sys.stderr)
err('unexpected error: %r\n' % e)
rv = 1
except KeyboardInterrupt:
if vars_init.is_toplevel:
builder.await_log_reader()
sys.exit(200)
state.commit()
if vars_init.is_toplevel: if vars_init.is_toplevel:
builder.await_log_reader() builder.await_log_reader()
sys.exit(200) sys.exit(rv)
state.commit()
if vars_init.is_toplevel:
builder.await_log_reader() if __name__ == '__main__':
sys.exit(rv) main()

View file

@ -4,19 +4,24 @@ import vars, state
from logs import err from logs import err
try: def main():
me = os.path.join(vars.STARTDIR, try:
os.path.join(vars.PWD, vars.TARGET)) me = os.path.join(vars.STARTDIR,
f = state.File(name=me) os.path.join(vars.PWD, vars.TARGET))
for t in sys.argv[1:]: f = state.File(name=me)
if not t: for t in sys.argv[1:]:
err('cannot build the empty target ("").\n') if not t:
sys.exit(204) err('cannot build the empty target ("").\n')
if os.path.exists(t): sys.exit(204)
err('redo-ifcreate: error: %r already exists\n' % t) if os.path.exists(t):
sys.exit(1) err('redo-ifcreate: error: %r already exists\n' % t)
else: sys.exit(1)
f.add_dep('c', t) else:
state.commit() f.add_dep('c', t)
except KeyboardInterrupt: state.commit()
sys.exit(200) except KeyboardInterrupt:
sys.exit(200)
if __name__ == '__main__':
main()

View file

@ -1,5 +1,6 @@
#!/usr/bin/env python2 #!/usr/bin/env python2
import errno, fcntl, os, re, struct, sys, termios, time import errno, fcntl, os, re, struct, sys, time
import termios
from atoi import atoi from atoi import atoi
import options import options
@ -28,7 +29,6 @@ import vars, logs, state
topdir = os.getcwd() topdir = os.getcwd()
already = set() already = set()
queue = []
depth = [] depth = []
total_lines = 0 total_lines = 0
status = None status = None
@ -53,11 +53,10 @@ def _atoi(s):
def _tty_width(): def _tty_width():
s = struct.pack("HHHH", 0, 0, 0, 0) s = struct.pack("HHHH", 0, 0, 0, 0)
try: try:
import fcntl, termios
s = fcntl.ioctl(sys.stderr.fileno(), termios.TIOCGWINSZ, s) s = fcntl.ioctl(sys.stderr.fileno(), termios.TIOCGWINSZ, s)
except (IOError, ImportError): except (IOError, ImportError):
return _atoi(os.environ.get('WIDTH')) or 70 return _atoi(os.environ.get('WIDTH')) or 70
(ysize,xsize,ypix,xpix) = struct.unpack('HHHH', s) (ysize, xsize, ypix, xpix) = struct.unpack('HHHH', s)
return xsize or 70 return xsize or 70
@ -69,8 +68,8 @@ def _fix_depth():
vars.DEPTH = len(depth) * ' ' vars.DEPTH = len(depth) * ' '
def _rel(topdir, mydir, path): def _rel(top, mydir, path):
return os.path.relpath(os.path.join(topdir, mydir, path), topdir) return os.path.relpath(os.path.join(top, mydir, path), topdir)
def catlog(t): def catlog(t):
@ -92,7 +91,7 @@ def catlog(t):
sf = state.File(name=t, allow_add=False) sf = state.File(name=t, allow_add=False)
except KeyError: except KeyError:
sys.stderr.write('redo-log: [%s] %r: not known to redo.\n' sys.stderr.write('redo-log: [%s] %r: not known to redo.\n'
% (os.getcwd(), t,)) % (os.getcwd(), t,))
sys.exit(24) sys.exit(24)
fid = sf.id fid = sf.id
del sf del sf
@ -148,9 +147,10 @@ def catlog(t):
tail = n + ' ' + tail tail = n + ' ' + tail
status = head + tail status = head + tail
if len(status) > width: if len(status) > width:
sys.stderr.write('\nOVERSIZE STATUS (%d):\n%r\n' % sys.stderr.write(
(len(status), status)) '\nOVERSIZE STATUS (%d):\n%r\n'
assert(len(status) <= width) % (len(status), status))
assert len(status) <= width
sys.stdout.flush() sys.stdout.flush()
sys.stderr.write('\r%-*.*s\r' % (width, width, status)) sys.stderr.write('\r%-*.*s\r' % (width, width, status))
time.sleep(min(delay, 1.0)) time.sleep(min(delay, 1.0))
@ -184,9 +184,11 @@ def catlog(t):
elif fixname not in already: elif fixname not in already:
logs.meta('do', relname, pid=pid) logs.meta('do', relname, pid=pid)
if opt.recursive: if opt.recursive:
if loglock: loglock.unlock() if loglock:
loglock.unlock()
catlog(os.path.join(mydir, text)) catlog(os.path.join(mydir, text))
if loglock: loglock.waitlock(shared=True) if loglock:
loglock.waitlock(shared=True)
already.add(fixname) already.add(fixname)
elif kind in ('do', 'waiting', 'locked', 'unlocked'): elif kind in ('do', 'waiting', 'locked', 'unlocked'):
if opt.debug_locks: if opt.debug_locks:
@ -196,9 +198,11 @@ def catlog(t):
logs.meta('do', relname, pid=pid) logs.meta('do', relname, pid=pid)
if opt.recursive: if opt.recursive:
assert text assert text
if loglock: loglock.unlock() if loglock:
loglock.unlock()
catlog(os.path.join(mydir, text)) catlog(os.path.join(mydir, text))
if loglock: loglock.waitlock(shared=True) if loglock:
loglock.waitlock(shared=True)
already.add(fixname) already.add(fixname)
elif kind == 'done': elif kind == 'done':
rv, name = text.split(' ', 1) rv, name = text.split(' ', 1)
@ -218,40 +222,49 @@ def catlog(t):
# partial line never got terminated # partial line never got terminated
print line_head print line_head
if t != '-': if t != '-':
assert(depth[-1] == t) assert depth[-1] == t
depth.pop(-1) depth.pop(-1)
_fix_depth() _fix_depth()
try:
if not targets: def main():
sys.stderr.write('redo-log: give at least one target; maybe "all"?\n') queue = []
sys.exit(1) try:
if opt.status < 2 and not os.isatty(2): if not targets:
opt.status = False sys.stderr.write(
logs.setup(file=sys.stdout, pretty=opt.pretty, color=opt.color) 'redo-log: give at least one target; ' +
if opt.debug_locks: 'maybe "all"?\n')
vars.DEBUG_LOCKS = 1 sys.exit(1)
if opt.debug_pids: if opt.status < 2 and not os.isatty(2):
vars.DEBUG_PIDS = 1 opt.status = False
if opt.ack_fd: logs.setup(file=sys.stdout, pretty=opt.pretty, color=opt.color)
# Write back to owner, to let them know we started up okay and if opt.debug_locks:
# will be able to see their error output, so it's okay to close vars.DEBUG_LOCKS = 1
# their old stderr. if opt.debug_pids:
ack_fd = int(opt.ack_fd) vars.DEBUG_PIDS = 1
assert(ack_fd > 2) if opt.ack_fd:
if os.write(ack_fd, 'REDO-OK\n') != 8: # Write back to owner, to let them know we started up okay and
raise Exception('write to ack_fd returned wrong length') # will be able to see their error output, so it's okay to close
os.close(ack_fd) # their old stderr.
queue += targets ack_fd = int(opt.ack_fd)
while queue: assert ack_fd > 2
t = queue.pop(0) if os.write(ack_fd, 'REDO-OK\n') != 8:
if t != '-': raise Exception('write to ack_fd returned wrong length')
logs.meta('do', _rel(topdir, '.', t), pid=0) os.close(ack_fd)
catlog(t) queue += targets
except KeyboardInterrupt: while queue:
sys.exit(200) t = queue.pop(0)
except IOError, e: if t != '-':
if e.errno == errno.EPIPE: logs.meta('do', _rel(topdir, '.', t), pid=0)
pass catlog(t)
else: except KeyboardInterrupt:
raise sys.exit(200)
except IOError, e:
if e.errno == errno.EPIPE:
pass
else:
raise
if __name__ == '__main__':
main()

View file

@ -27,14 +27,19 @@ def log_override(name):
pass pass
cwd = os.getcwd() def main():
for f in state.files(): cwd = os.getcwd()
if f.is_target(): for f in state.files():
if deps.isdirty(f, if f.is_target():
depth='', if deps.isdirty(f,
max_changed=vars.RUNID, depth='',
already_checked=[], max_changed=vars.RUNID,
is_checked=is_checked, already_checked=[],
set_checked=set_checked, is_checked=is_checked,
log_override=log_override): set_checked=set_checked,
print state.relpath(os.path.join(vars.BASE, f.name), cwd) log_override=log_override):
print state.relpath(os.path.join(vars.BASE, f.name), cwd)
if __name__ == '__main__':
main()

View file

@ -7,11 +7,17 @@ vars_init.init([])
import state, vars import state, vars
from logs import err from logs import err
if len(sys.argv[1:]) != 0:
err('%s: no arguments expected.\n' % sys.argv[0])
sys.exit(1)
cwd = os.getcwd() def main():
for f in state.files(): if len(sys.argv[1:]) != 0:
if f.is_source(): err('%s: no arguments expected.\n' % sys.argv[0])
print state.relpath(os.path.join(vars.BASE, f.name), cwd) sys.exit(1)
cwd = os.getcwd()
for f in state.files():
if f.is_source():
print state.relpath(os.path.join(vars.BASE, f.name), cwd)
if __name__ == '__main__':
main()

View file

@ -3,51 +3,58 @@ import sys, os
import vars, state import vars, state
from logs import err, debug2 from logs import err, debug2
if len(sys.argv) > 1:
err('%s: no arguments expected.\n' % sys.argv[0])
sys.exit(1)
if os.isatty(0): def main():
err('%s: you must provide the data to stamp on stdin\n' % sys.argv[0]) if len(sys.argv) > 1:
sys.exit(1) err('%s: no arguments expected.\n' % sys.argv[0])
sys.exit(1)
# hashlib is only available in python 2.5 or higher, but the 'sha' module if os.isatty(0):
# produces a DeprecationWarning in python 2.6 or higher. We want to support err('%s: you must provide the data to stamp on stdin\n' % sys.argv[0])
# python 2.4 and above without any stupid warnings, so let's try using hashlib sys.exit(1)
# first, and downgrade if it fails.
try:
import hashlib
except ImportError:
import sha
sh = sha.sha()
else:
sh = hashlib.sha1()
while 1: # hashlib is only available in python 2.5 or higher, but the 'sha'
b = os.read(0, 4096) # module produces a DeprecationWarning in python 2.6 or higher. We want
sh.update(b) # to support python 2.4 and above without any stupid warnings, so let's
if not b: break # try using hashlib first, and downgrade if it fails.
try:
import hashlib
except ImportError:
import sha
sh = sha.sha()
else:
sh = hashlib.sha1()
csum = sh.hexdigest() while 1:
b = os.read(0, 4096)
sh.update(b)
if not b:
break
if not vars.TARGET: csum = sh.hexdigest()
sys.exit(0)
me = os.path.join(vars.STARTDIR, if not vars.TARGET:
os.path.join(vars.PWD, vars.TARGET)) sys.exit(0)
f = state.File(name=me)
changed = (csum != f.csum) me = os.path.join(vars.STARTDIR,
debug2('%s: old = %s\n' % (f.name, f.csum)) os.path.join(vars.PWD, vars.TARGET))
debug2('%s: sum = %s (%s)\n' % (f.name, csum, f = state.File(name=me)
changed and 'changed' or 'unchanged')) changed = (csum != f.csum)
f.is_generated = True debug2('%s: old = %s\n' % (f.name, f.csum))
f.is_override = False debug2('%s: sum = %s (%s)\n' % (f.name, csum,
f.failed_runid = None changed and 'changed' or 'unchanged'))
if changed: f.is_generated = True
f.set_changed() # update_stamp might not do this if the mtime is identical f.is_override = False
f.csum = csum f.failed_runid = None
else: if changed:
# unchanged f.set_changed() # update_stamp might skip this if mtime is identical
f.set_checked() f.csum = csum
f.save() else:
state.commit() # unchanged
f.set_checked()
f.save()
state.commit()
if __name__ == '__main__':
main()

View file

@ -7,11 +7,17 @@ vars_init.init([])
import state, vars import state, vars
from logs import err from logs import err
if len(sys.argv[1:]) != 0:
err('%s: no arguments expected.\n' % sys.argv[0])
sys.exit(1)
cwd = os.getcwd() def main():
for f in state.files(): if len(sys.argv[1:]) != 0:
if f.is_target(): err('%s: no arguments expected.\n' % sys.argv[0])
print state.relpath(os.path.join(vars.BASE, f.name), cwd) sys.exit(1)
cwd = os.getcwd()
for f in state.files():
if f.is_target():
print state.relpath(os.path.join(vars.BASE, f.name), cwd)
if __name__ == '__main__':
main()

View file

@ -3,33 +3,39 @@ import sys, os
import state import state
from logs import err from logs import err
if len(sys.argv[1:]) < 2:
err('%s: at least 2 arguments expected.\n' % sys.argv[0])
sys.exit(1)
target = sys.argv[1] def main():
deps = sys.argv[2:] if len(sys.argv[1:]) < 2:
err('%s: at least 2 arguments expected.\n' % sys.argv[0])
sys.exit(1)
for d in deps: target = sys.argv[1]
assert(d != target) deps = sys.argv[2:]
me = state.File(name=target) for d in deps:
assert d != target
# Build the known dependencies of our primary target. This *does* require me = state.File(name=target)
# grabbing locks.
os.environ['REDO_NO_OOB'] = '1'
argv = ['redo-ifchange'] + deps
rv = os.spawnvp(os.P_WAIT, argv[0], argv)
if rv:
sys.exit(rv)
# We know our caller already owns the lock on target, so we don't have to # Build the known dependencies of our primary target. This *does* require
# acquire another one; tell redo-ifchange about that. Also, REDO_NO_OOB # grabbing locks.
# persists from up above, because we don't want to do OOB now either. os.environ['REDO_NO_OOB'] = '1'
# (Actually it's most important for the primary target, since it's the one argv = ['redo-ifchange'] + deps
# who initiated the OOB in the first place.) rv = os.spawnvp(os.P_WAIT, argv[0], argv)
os.environ['REDO_UNLOCKED'] = '1' if rv:
argv = ['redo-ifchange', target] sys.exit(rv)
rv = os.spawnvp(os.P_WAIT, argv[0], argv)
if rv: # We know our caller already owns the lock on target, so we don't have to
sys.exit(rv) # acquire another one; tell redo-ifchange about that. Also, REDO_NO_OOB
# persists from up above, because we don't want to do OOB now either.
# (Actually it's most important for the primary target, since it's the one
# who initiated the OOB in the first place.)
os.environ['REDO_UNLOCKED'] = '1'
argv = ['redo-ifchange', target]
rv = os.spawnvp(os.P_WAIT, argv[0], argv)
if rv:
sys.exit(rv)
if __name__ == '__main__':
main()

View file

@ -7,22 +7,29 @@ vars_init.init_no_state()
import paths import paths
from logs import err from logs import err
if len(sys.argv[1:]) != 1:
err('%s: exactly one argument expected.\n' % sys.argv[0])
sys.exit(1)
want = sys.argv[1] def main():
if not want: if len(sys.argv[1:]) != 1:
err('cannot build the empty target ("").\n') err('%s: exactly one argument expected.\n' % sys.argv[0])
sys.exit(204) sys.exit(1)
abswant = os.path.abspath(want) want = sys.argv[1]
for dodir,dofile,basedir,basename,ext in paths.possible_do_files(abswant): if not want:
dopath = os.path.join('/', dodir, dofile) err('cannot build the empty target ("").\n')
relpath = os.path.relpath(dopath, '.') sys.exit(204)
exists = os.path.exists(dopath)
assert('\n' not in relpath) abswant = os.path.abspath(want)
print relpath pdf = paths.possible_do_files(abswant)
if exists: for dodir, dofile, basedir, basename, ext in pdf:
sys.exit(0) dopath = os.path.join('/', dodir, dofile)
sys.exit(1) # no appropriate dofile found relpath = os.path.relpath(dopath, '.')
exists = os.path.exists(dopath)
assert '\n' not in relpath
print relpath
if exists:
sys.exit(0)
sys.exit(1) # no appropriate dofile found
if __name__ == '__main__':
main()

11
redo.py
View file

@ -16,7 +16,7 @@
# #
import sys, os, traceback import sys, os, traceback
import options import options
from helpers import atoi from atoi import atoi
optspec = """ optspec = """
redo [targets...] redo [targets...]
@ -83,7 +83,8 @@ try:
if vars_init.is_toplevel and (vars.LOG or j > 1): if vars_init.is_toplevel and (vars.LOG or j > 1):
builder.close_stdin() builder.close_stdin()
if vars_init.is_toplevel and vars.LOG: if vars_init.is_toplevel and vars.LOG:
builder.start_stdin_log_reader(status=opt.status, details=opt.details, builder.start_stdin_log_reader(
status=opt.status, details=opt.details,
pretty=opt.pretty, color=opt.color, pretty=opt.pretty, color=opt.color,
debug_locks=opt.debug_locks, debug_pids=opt.debug_pids) debug_locks=opt.debug_locks, debug_pids=opt.debug_pids)
for t in targets: for t in targets:
@ -98,16 +99,16 @@ try:
err('invalid --jobs value: %r\n' % opt.jobs) err('invalid --jobs value: %r\n' % opt.jobs)
jwack.setup(j) jwack.setup(j)
try: try:
assert(state.is_flushed()) assert state.is_flushed()
retcode = builder.main(targets, lambda t: (True, True)) retcode = builder.main(targets, lambda t: (True, True))
assert(state.is_flushed()) assert state.is_flushed()
finally: finally:
try: try:
state.rollback() state.rollback()
finally: finally:
try: try:
jwack.force_return_tokens() jwack.force_return_tokens()
except Exception, e: except Exception, e: # pylint: disable=broad-except
traceback.print_exc(100, sys.stderr) traceback.print_exc(100, sys.stderr)
err('unexpected error: %r\n' % e) err('unexpected error: %r\n' % e)
retcode = 1 retcode = 1

View file

@ -1,7 +1,7 @@
import sys, os, errno, glob, stat, fcntl, sqlite3 import sys, os, errno, stat, fcntl, sqlite3
import vars import vars
from helpers import unlink, close_on_exec, join from helpers import unlink, close_on_exec, join
from logs import warn, err, debug2, debug3 from logs import warn, debug2, debug3
# When the module is imported, change the process title. # When the module is imported, change the process title.
# We do it here because this module is imported by all the scripts. # We do it here because this module is imported by all the scripts.
@ -14,17 +14,18 @@ else:
cmdline[0] = os.path.splitext(os.path.basename(cmdline[0]))[0] cmdline[0] = os.path.splitext(os.path.basename(cmdline[0]))[0]
setproctitle(" ".join(cmdline)) setproctitle(" ".join(cmdline))
SCHEMA_VER=2 SCHEMA_VER = 2
TIMEOUT=60 TIMEOUT = 60
ALWAYS='//ALWAYS' # an invalid filename that is always marked as dirty ALWAYS = '//ALWAYS' # an invalid filename that is always marked as dirty
STAMP_DIR='dir' # the stamp of a directory; mtime is unhelpful STAMP_DIR = 'dir' # the stamp of a directory; mtime is unhelpful
STAMP_MISSING='0' # the stamp of a nonexistent file STAMP_MISSING = '0' # the stamp of a nonexistent file
LOG_LOCK_MAGIC=0x10000000 # fid offset for "log locks" LOG_LOCK_MAGIC = 0x10000000 # fid offset for "log locks"
class CyclicDependencyError(Exception): pass class CyclicDependencyError(Exception):
pass
def _connect(dbfile): def _connect(dbfile):
@ -74,7 +75,8 @@ def db():
if ver != SCHEMA_VER: if ver != SCHEMA_VER:
# Don't use err() here because this might happen before # Don't use err() here because this might happen before
# redo-log spawns. # redo-log spawns.
sys.stderr.write('redo: %s: found v%s (expected v%s)\n' sys.stderr.write(
'redo: %s: found v%s (expected v%s)\n'
% (dbfile, ver, SCHEMA_VER)) % (dbfile, ver, SCHEMA_VER))
sys.stderr.write('redo: manually delete .redo dir to start over.\n') sys.stderr.write('redo: manually delete .redo dir to start over.\n')
sys.exit(1) sys.exit(1)
@ -155,7 +157,7 @@ def is_flushed():
_insane = None _insane = None
def check_sane(): def check_sane():
global _insane, _writable global _insane
if not _insane: if not _insane:
_insane = not os.path.exists('%s/.redo' % vars.BASE) _insane = not os.path.exists('%s/.redo' % vars.BASE)
return not _insane return not _insane
@ -170,7 +172,7 @@ def relpath(t, base):
base = os.path.normpath(base) base = os.path.normpath(base)
tparts = t.split('/') tparts = t.split('/')
bparts = base.split('/') bparts = base.split('/')
for tp,bp in zip(tparts,bparts): for tp, bp in zip(tparts, bparts):
if tp != bp: if tp != bp:
break break
tparts.pop(0) tparts.pop(0)
@ -225,13 +227,16 @@ class File(object):
# use this mostly to avoid accidentally assigning to typos # use this mostly to avoid accidentally assigning to typos
__slots__ = ['id'] + _file_cols[1:] __slots__ = ['id'] + _file_cols[1:]
# These warnings are a result of the weird way this class is
# initialized, which we should fix, and then re-enable warning.
# pylint: disable=attribute-defined-outside-init
def _init_from_idname(self, id, name, allow_add): def _init_from_idname(self, id, name, allow_add):
q = ('select %s from Files ' % join(', ', _file_cols)) q = ('select %s from Files ' % join(', ', _file_cols))
if id != None: if id != None:
q += 'where rowid=?' q += 'where rowid=?'
l = [id] l = [id]
elif name != None: elif name != None:
name = (name==ALWAYS) and ALWAYS or relpath(name, vars.BASE) name = (name == ALWAYS) and ALWAYS or relpath(name, vars.BASE)
q += 'where name=?' q += 'where name=?'
l = [name] l = [name]
else: else:
@ -250,7 +255,7 @@ class File(object):
# big deal. # big deal.
pass pass
row = d.execute(q, l).fetchone() row = d.execute(q, l).fetchone()
assert(row) assert row
return self._init_from_cols(row) return self._init_from_cols(row)
def _init_from_cols(self, cols): def _init_from_cols(self, cols):
@ -262,9 +267,9 @@ class File(object):
def __init__(self, id=None, name=None, cols=None, allow_add=True): def __init__(self, id=None, name=None, cols=None, allow_add=True):
if cols: if cols:
return self._init_from_cols(cols) self._init_from_cols(cols)
else: else:
return self._init_from_idname(id, name, allow_add=allow_add) self._init_from_idname(id, name, allow_add=allow_add)
def __repr__(self): def __repr__(self):
return "File(%r)" % (self.nicename(),) return "File(%r)" % (self.nicename(),)
@ -337,13 +342,13 @@ class File(object):
return False # special name, ignore return False # special name, ignore
newstamp = self.read_stamp() newstamp = self.read_stamp()
if (self.is_generated and if (self.is_generated and
(not self.is_failed() or newstamp != STAMP_MISSING) and (not self.is_failed() or newstamp != STAMP_MISSING) and
not self.is_override and not self.is_override and
self.stamp == newstamp): self.stamp == newstamp):
# target is as we left it # target is as we left it
return False return False
if ((not self.is_generated or self.stamp != newstamp) and if ((not self.is_generated or self.stamp != newstamp) and
newstamp == STAMP_MISSING): newstamp == STAMP_MISSING):
# target has gone missing after the last build. # target has gone missing after the last build.
# It's not usefully a source *or* a target. # It's not usefully a source *or* a target.
return False return False
@ -375,8 +380,8 @@ class File(object):
for row in db().execute(q, [self.id]).fetchall(): for row in db().execute(q, [self.id]).fetchall():
mode = row[0] mode = row[0]
cols = row[1:] cols = row[1:]
assert(mode in ('c', 'm')) assert mode in ('c', 'm')
yield mode,File(cols=cols) yield mode, File(cols=cols)
def zap_deps1(self): def zap_deps1(self):
debug2('zap-deps1: %r\n' % self.name) debug2('zap-deps1: %r\n' % self.name)
@ -389,7 +394,7 @@ class File(object):
def add_dep(self, mode, dep): def add_dep(self, mode, dep):
src = File(name=dep) src = File(name=dep)
debug3('add-dep: "%s" < %s "%s"\n' % (self.name, mode, src.name)) debug3('add-dep: "%s" < %s "%s"\n' % (self.name, mode, src.name))
assert(self.id != src.id) assert self.id != src.id
_write("insert or replace into Deps " _write("insert or replace into Deps "
" (target, mode, source, delete_me) values (?,?,?,?)", " (target, mode, source, delete_me) values (?,?,?,?)",
[self.id, mode, src.id, False]) [self.id, mode, src.id, False])
@ -404,10 +409,12 @@ class File(object):
return False, STAMP_DIR return False, STAMP_DIR
else: else:
# a "unique identifier" stamp for a regular file # a "unique identifier" stamp for a regular file
return (stat.S_ISLNK(st.st_mode), return (
stat.S_ISLNK(st.st_mode),
'-'.join(str(s) for s in '-'.join(str(s) for s in
('%.6f' % st.st_mtime, st.st_size, st.st_ino, ('%.6f' % st.st_mtime, st.st_size, st.st_ino,
st.st_mode, st.st_uid, st.st_gid))) st.st_mode, st.st_uid, st.st_gid))
)
def read_stamp(self): def read_stamp(self):
is_link, pre = self._read_stamp_st(os.lstat) is_link, pre = self._read_stamp_st(os.lstat)
@ -444,12 +451,12 @@ def logname(fid):
# ok, but it doesn't have F_GETLK, so we can't report which pid owns the lock. # ok, but it doesn't have F_GETLK, so we can't report which pid owns the lock.
# The makes debugging a bit harder. When we someday port to C, we can do that. # The makes debugging a bit harder. When we someday port to C, we can do that.
_locks = {} _locks = {}
class Lock: class Lock(object):
def __init__(self, fid): def __init__(self, fid):
self.owned = False self.owned = False
self.fid = fid self.fid = fid
assert(_lockfile >= 0) assert _lockfile >= 0
assert(_locks.get(fid,0) == 0) assert _locks.get(fid, 0) == 0
_locks[fid] = 1 _locks[fid] = 1
def __del__(self): def __del__(self):
@ -458,7 +465,7 @@ class Lock:
self.unlock() self.unlock()
def check(self): def check(self):
assert(not self.owned) assert not self.owned
if str(self.fid) in vars.get_locks(): if str(self.fid) in vars.get_locks():
# Lock already held by parent: cyclic dependence # Lock already held by parent: cyclic dependence
raise CyclicDependencyError() raise CyclicDependencyError()
@ -480,7 +487,8 @@ class Lock:
def waitlock(self, shared=False): def waitlock(self, shared=False):
self.check() self.check()
assert not self.owned assert not self.owned
fcntl.lockf(_lockfile, fcntl.lockf(
_lockfile,
fcntl.LOCK_SH if shared else fcntl.LOCK_EX, fcntl.LOCK_SH if shared else fcntl.LOCK_EX,
1, self.fid) 1, self.fid)
self.owned = True self.owned = True
@ -488,6 +496,6 @@ class Lock:
def unlock(self): def unlock(self):
if not self.owned: if not self.owned:
raise Exception("can't unlock %r - we don't own it" raise Exception("can't unlock %r - we don't own it"
% self.lockname) % self.fid)
fcntl.lockf(_lockfile, fcntl.LOCK_UN, 1, self.fid) fcntl.lockf(_lockfile, fcntl.LOCK_UN, 1, self.fid)
self.owned = False self.owned = False

12
vars.py
View file

@ -36,11 +36,11 @@ os.environ['REDO_NO_OOB'] = '' # not inheritable by subprocesses
def get_locks(): def get_locks():
"""Get the list of held locks.""" """Get the list of held locks."""
return os.environ.get('REDO_LOCKS', '').split(':') return os.environ.get('REDO_LOCKS', '').split(':')
def add_lock(name): def add_lock(name):
"""Add a lock to the list of held locks.""" """Add a lock to the list of held locks."""
locks = set(get_locks()) locks = set(get_locks())
locks.add(name) locks.add(name)
os.environ['REDO_LOCKS'] = ':'.join(list(locks)) os.environ['REDO_LOCKS'] = ':'.join(list(locks))

View file

@ -5,6 +5,7 @@ is_toplevel = False
def init_no_state(): def init_no_state():
global is_toplevel
if not os.environ.get('REDO'): if not os.environ.get('REDO'):
os.environ['REDO'] = 'NOT_DEFINED' os.environ['REDO'] = 'NOT_DEFINED'
is_toplevel = True is_toplevel = True
@ -13,9 +14,9 @@ def init_no_state():
def init(targets): def init(targets):
global is_toplevel
if not os.environ.get('REDO'): if not os.environ.get('REDO'):
# toplevel call to redo # toplevel call to redo
global is_toplevel
is_toplevel = True is_toplevel = True
if len(targets) == 0: if len(targets) == 0:
targets.append('all') targets.append('all')