redo-whichdo: fix a bug where the last dir was checked twice, and add tests.

When we can't find a .do file, we walk all the way back to the root
directory.  When that happens, the root directory is actually searched
twice.  This is harmless (since a .do file doesn't exist there anyway)
but causes redo-whichdo to produce the wrong output.

Also, add a test, which I forgot to do when writing whichdo in the
first place.

To make the test work from the root directory, we need a way to
initialize redo without actually creating a .redo directory.  Add a
init_no_state() function for that purpose, and split the necessary path
functions into their own module so we can avoid importing builder.py.
This commit is contained in:
Avery Pennarun 2018-10-30 23:23:04 -04:00
commit e40dc5bad2
10 changed files with 133 additions and 50 deletions

View file

@ -1,52 +1,9 @@
import sys, os, errno, random, stat, signal, time import sys, os, errno, random, stat, signal, time
import vars, jwack, state import vars, jwack, state, paths
from helpers import unlink, close_on_exec, join from helpers import unlink, close_on_exec, join
from log import log, log_, debug, debug2, err, warn from log import log, log_, debug, debug2, err, warn
def _default_do_files(filename):
l = filename.split('.')
for i in range(1,len(l)+1):
basename = join('.', l[:i])
ext = join('.', l[i:])
if ext: ext = '.' + ext
yield ("default%s.do" % ext), basename, ext
def possible_do_files(t):
dirname,filename = os.path.split(t)
yield (os.path.join(vars.BASE, dirname), "%s.do" % filename,
'', filename, '')
# It's important to try every possibility in a directory before resorting
# to a parent directory. Think about nested projects: I don't want
# ../../default.o.do to take precedence over ../default.do, because
# the former one might just be an artifact of someone embedding my project
# into theirs as a subdir. When they do, my rules should still be used
# for building my project in *all* cases.
t = os.path.normpath(os.path.join(vars.BASE, t))
dirname,filename = os.path.split(t)
dirbits = dirname.split('/')
for i in range(len(dirbits), -1, -1):
basedir = join('/', dirbits[:i])
subdir = join('/', dirbits[i:])
for dofile,basename,ext in _default_do_files(filename):
yield (basedir, dofile,
subdir, os.path.join(subdir, basename), ext)
def find_do_file(f):
for dodir,dofile,basedir,basename,ext in possible_do_files(f.name):
dopath = os.path.join(dodir, dofile)
debug2('%s: %s:%s ?\n' % (f.name, dodir, dofile))
if os.path.exists(dopath):
f.add_dep('m', dopath)
return dodir,dofile,basedir,basename,ext
else:
f.add_dep('c', dopath)
return None,None,None,None,None
def _nice(t): def _nice(t):
return state.relpath(t, vars.STARTDIR) return state.relpath(t, vars.STARTDIR)
@ -130,7 +87,7 @@ class BuildJob:
sf.save() sf.save()
return self._after2(0) return self._after2(0)
sf.zap_deps1() sf.zap_deps1()
(dodir, dofile, basedir, basename, ext) = find_do_file(sf) (dodir, dofile, basedir, basename, ext) = paths.find_do_file(sf)
if not dofile: if not dofile:
if os.path.exists(t): if os.path.exists(t):
sf.set_static() sf.set_static()

2
log.py
View file

@ -45,5 +45,3 @@ def debug2(s):
def debug3(s): def debug3(s):
if vars.DEBUG >= 3: if vars.DEBUG >= 3:
log_('redo: %s%s' % (vars.DEPTH, s)) log_('redo: %s%s' % (vars.DEPTH, s))

48
paths.py Normal file
View file

@ -0,0 +1,48 @@
import os
import vars
from log import err, debug2
def _default_do_files(filename):
l = filename.split('.')
for i in range(1,len(l)+1):
basename = '.'.join(l[:i])
ext = '.'.join(l[i:])
if ext: ext = '.' + ext
yield ("default%s.do" % ext), basename, ext
def possible_do_files(t):
dirname,filename = os.path.split(t)
yield (os.path.join(vars.BASE, dirname), "%s.do" % filename,
'', filename, '')
# It's important to try every possibility in a directory before resorting
# to a parent directory. Think about nested projects: We don't want
# ../../default.o.do to take precedence over ../default.do, because
# the former one might just be an artifact of someone embedding my project
# into theirs as a subdir. When they do, my rules should still be used
# for building my project in *all* cases.
t = os.path.normpath(os.path.join(vars.BASE, t))
dirname,filename = os.path.split(t)
dirbits = dirname.split('/')
# since t is an absolute path, dirbits[0] is always '', so we don't
# need to count all the way down to i=0.
for i in range(len(dirbits), 0, -1):
basedir = '/'.join(dirbits[:i])
subdir = '/'.join(dirbits[i:])
for dofile,basename,ext in _default_do_files(filename):
yield (basedir, dofile,
subdir, os.path.join(subdir, basename), ext)
def find_do_file(f):
for dodir,dofile,basedir,basename,ext in possible_do_files(f.name):
dopath = os.path.join(dodir, dofile)
debug2('%s: %s:%s ?\n' % (f.name, dodir, dofile))
if os.path.exists(dopath):
f.add_dep('m', dopath)
return dodir,dofile,basedir,basename,ext
else:
f.add_dep('c', dopath)
return None,None,None,None,None

View file

@ -2,9 +2,9 @@
import sys, os import sys, os
import vars_init import vars_init
vars_init.init([]) vars_init.init_no_state()
import builder import paths
from log import err from log import err
if len(sys.argv[1:]) != 1: if len(sys.argv[1:]) != 1:
@ -13,7 +13,7 @@ if len(sys.argv[1:]) != 1:
want = sys.argv[1] want = sys.argv[1]
abswant = os.path.abspath(want) abswant = os.path.abspath(want)
for dodir,dofile,basedir,basename,ext in builder.possible_do_files(abswant): for dodir,dofile,basedir,basename,ext in paths.possible_do_files(abswant):
dopath = os.path.join('/', dodir, dofile) dopath = os.path.join('/', dodir, dofile)
relpath = os.path.relpath(dopath, '.') relpath = os.path.relpath(dopath, '.')
exists = os.path.exists(dopath) exists = os.path.exists(dopath)

1
t/260-whichdo/all.do Normal file
View file

@ -0,0 +1 @@
redo exists nonexists

View file

@ -0,0 +1 @@
:

25
t/260-whichdo/exists.do Normal file
View file

@ -0,0 +1,25 @@
exec >&2
a=$(cd fakesub && redo-whichdo ../a/b/x.y.z)
# if sh doesn't abort after the above, then it found a .do file as expected
# Note: we expect redo-whichdo to return paths relative to $PWD at the time
# it's run, which in this case is fakesub.
# Likely bugs would be to return paths relative to the start dir, the .redo
# dir, the current target dir, the requested target dir, etc.
b=$(cat <<EOF
../a/b/x.y.z.do
../a/b/default.y.z.do
../a/b/default.z.do
../a/b/default.do
../a/default.y.z.do
../a/default.z.do
../a/default.do
../default.y.z.do
EOF
)
if [ "$a" != "$b" ]; then
printf 'redo-whichdo mismatch.\n\ngot:\n%s\n\nexpected:\n%s\n' "$a" "$b"
exit 11
fi

View file

View file

@ -0,0 +1,45 @@
exec >&2
# Testing the search path for non-existent do files is a little tricky.
# We can't be sure where our current directory is, so we don't know how
# far up the stack redo will need to search.
#
# To dodge the problem, let's "cd /" first so that we're testing a target
# relative to a known location (the root directory).
if [ -e '/default.do' -o \
-e '/default.z.do' -o \
-e '/default.y.z.do' ]; then
echo "Weird: /default.*.do exists; can't run this test."
exit 99
fi
# redo-whichdo *should* fail here, so don't abort the script for that.
set +e
a=$(cd / && redo-whichdo __nonexist/a/x.y.z)
rv=$?
set -e
if [ "$rv" -eq 0 ]; then
echo "redo-whichdo should return nonzero for a missing .do file."
exit 10
fi
b=$(cat <<EOF
__nonexist/a/x.y.z.do
__nonexist/a/default.y.z.do
__nonexist/a/default.z.do
__nonexist/a/default.do
__nonexist/default.y.z.do
__nonexist/default.z.do
__nonexist/default.do
default.y.z.do
default.z.do
default.do
EOF
)
if [ "$a" != "$b" ]; then
printf 'redo-whichdo mismatch.\n\ngot:\n%s\n\nexpected:\n%s\n' "$a" "$b"
exit 11
fi

View file

@ -1,5 +1,13 @@
import sys, os import sys, os
def init_no_state():
if not os.environ.get('REDO'):
os.environ['REDO'] = 'NOT_DEFINED'
if not os.environ.get('REDO_BASE'):
os.environ['REDO_BASE'] = 'NOT_DEFINED'
def init(targets): def init(targets):
if not os.environ.get('REDO'): if not os.environ.get('REDO'):
# toplevel call to redo # toplevel call to redo