mirror of
https://gitlab.com/ita1024/waf.git
synced 2025-01-20 23:40:21 +01:00
Cleanup: Break multiple statements on multiple lines
This commit is contained in:
parent
cfe9158664
commit
5ac6123e5e
@ -370,11 +370,13 @@ class BuildContext(Context.Context):
|
||||
:param funs: unused variable
|
||||
"""
|
||||
if isinstance(tool, list):
|
||||
for i in tool: self.setup(i, tooldir)
|
||||
for i in tool:
|
||||
self.setup(i, tooldir)
|
||||
return
|
||||
|
||||
module = Context.load_tool(tool, tooldir)
|
||||
if hasattr(module, "setup"): module.setup(self)
|
||||
if hasattr(module, "setup"):
|
||||
module.setup(self)
|
||||
|
||||
def get_env(self):
|
||||
"""Getter for the env property"""
|
||||
@ -514,7 +516,8 @@ class BuildContext(Context.Context):
|
||||
right = '][%s%s%s]' % (col1, self.timer, col2)
|
||||
|
||||
cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
|
||||
if cols < 7: cols = 7
|
||||
if cols < 7:
|
||||
cols = 7
|
||||
|
||||
ratio = ((cols * idx)//total) - 1
|
||||
|
||||
|
@ -44,9 +44,12 @@ class ConfigSet(object):
|
||||
if 'foo' in env:
|
||||
print(env['foo'])
|
||||
"""
|
||||
if key in self.table: return True
|
||||
try: return self.parent.__contains__(key)
|
||||
except AttributeError: return False # parent may not exist
|
||||
if key in self.table:
|
||||
return True
|
||||
try:
|
||||
return self.parent.__contains__(key)
|
||||
except AttributeError:
|
||||
return False # parent may not exist
|
||||
|
||||
def keys(self):
|
||||
"""Dict interface"""
|
||||
@ -180,7 +183,8 @@ class ConfigSet(object):
|
||||
:type key: string
|
||||
"""
|
||||
s = self[key]
|
||||
if isinstance(s, str): return s
|
||||
if isinstance(s, str):
|
||||
return s
|
||||
return ' '.join(s)
|
||||
|
||||
def _get_list_value_for_modification(self, key):
|
||||
@ -264,8 +268,10 @@ class ConfigSet(object):
|
||||
env = self
|
||||
while 1:
|
||||
table_list.insert(0, env.table)
|
||||
try: env = env.parent
|
||||
except AttributeError: break
|
||||
try:
|
||||
env = env.parent
|
||||
except AttributeError:
|
||||
break
|
||||
merged_table = {}
|
||||
for table in table_list:
|
||||
merged_table.update(table)
|
||||
|
@ -238,7 +238,8 @@ class ConfigurationContext(Context.Context):
|
||||
"""
|
||||
|
||||
tools = Utils.to_list(input)
|
||||
if tooldir: tooldir = Utils.to_list(tooldir)
|
||||
if tooldir:
|
||||
tooldir = Utils.to_list(tooldir)
|
||||
for tool in tools:
|
||||
# avoid loading the same tool more than once with the same functions
|
||||
# used by composite projects
|
||||
@ -265,8 +266,10 @@ class ConfigurationContext(Context.Context):
|
||||
else:
|
||||
func = getattr(module, 'configure', None)
|
||||
if func:
|
||||
if type(func) is type(Utils.readf): func(self)
|
||||
else: self.eval_rules(func)
|
||||
if type(func) is type(Utils.readf):
|
||||
func(self)
|
||||
else:
|
||||
self.eval_rules(func)
|
||||
|
||||
self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
|
||||
|
||||
|
@ -707,7 +707,8 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
|
||||
Context.tools[tool] = ret
|
||||
return ret
|
||||
else:
|
||||
if not with_sys_path: sys.path.insert(0, waf_dir)
|
||||
if not with_sys_path:
|
||||
sys.path.insert(0, waf_dir)
|
||||
try:
|
||||
for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
|
||||
try:
|
||||
@ -718,7 +719,8 @@ def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
|
||||
else: # raise an exception
|
||||
__import__(tool)
|
||||
finally:
|
||||
if not with_sys_path: sys.path.remove(waf_dir)
|
||||
if not with_sys_path:
|
||||
sys.path.remove(waf_dir)
|
||||
ret = sys.modules[x % tool]
|
||||
Context.tools[tool] = ret
|
||||
return ret
|
||||
|
@ -50,7 +50,8 @@ class BuildError(WafError):
|
||||
lst = ['Build failed']
|
||||
for tsk in self.tasks:
|
||||
txt = tsk.format_error()
|
||||
if txt: lst.append(txt)
|
||||
if txt:
|
||||
lst.append(txt)
|
||||
return '\n'.join(lst)
|
||||
|
||||
class ConfigurationError(WafError):
|
||||
|
@ -275,7 +275,8 @@ def error(*k, **kw):
|
||||
buf.append(' File %r, line %d, in %s' % (filename, lineno, name))
|
||||
if line:
|
||||
buf.append(' %s' % line.strip())
|
||||
if buf: log.error('\n'.join(buf))
|
||||
if buf:
|
||||
log.error('\n'.join(buf))
|
||||
|
||||
def warn(*k, **kw):
|
||||
"""
|
||||
|
@ -460,8 +460,10 @@ class Task(evil):
|
||||
|
||||
src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
|
||||
tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
|
||||
if self.outputs: sep = ' -> '
|
||||
else: sep = ''
|
||||
if self.outputs:
|
||||
sep = ' -> '
|
||||
else:
|
||||
sep = ''
|
||||
return '%s: %s%s%s' % (self.__class__.__name__, src_str, sep, tgt_str)
|
||||
|
||||
def keyword(self):
|
||||
@ -522,8 +524,10 @@ class Task(evil):
|
||||
:param inp: input nodes
|
||||
:type inp: node or list of nodes
|
||||
"""
|
||||
if isinstance(inp, list): self.inputs += inp
|
||||
else: self.inputs.append(inp)
|
||||
if isinstance(inp, list):
|
||||
self.inputs += inp
|
||||
else:
|
||||
self.inputs.append(inp)
|
||||
|
||||
def set_outputs(self, out):
|
||||
"""
|
||||
@ -532,8 +536,10 @@ class Task(evil):
|
||||
:param out: output nodes
|
||||
:type out: node or list of nodes
|
||||
"""
|
||||
if isinstance(out, list): self.outputs += out
|
||||
else: self.outputs.append(out)
|
||||
if isinstance(out, list):
|
||||
self.outputs += out
|
||||
else:
|
||||
self.outputs.append(out)
|
||||
|
||||
def set_run_after(self, task):
|
||||
"""
|
||||
@ -990,11 +996,15 @@ def compile_fun_shell(line):
|
||||
app = parm.append
|
||||
for (var, meth) in extr:
|
||||
if var == 'SRC':
|
||||
if meth: app('tsk.inputs%s' % meth)
|
||||
else: app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
|
||||
if meth:
|
||||
app('tsk.inputs%s' % meth)
|
||||
else:
|
||||
app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
|
||||
elif var == 'TGT':
|
||||
if meth: app('tsk.outputs%s' % meth)
|
||||
else: app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
|
||||
if meth:
|
||||
app('tsk.outputs%s' % meth)
|
||||
else:
|
||||
app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
|
||||
elif meth:
|
||||
if meth.startswith(':'):
|
||||
if var not in dvars:
|
||||
@ -1022,8 +1032,10 @@ def compile_fun_shell(line):
|
||||
if var not in dvars:
|
||||
dvars.append(var)
|
||||
app("p('%s')" % var)
|
||||
if parm: parm = "%% (%s) " % (',\n\t\t'.join(parm))
|
||||
else: parm = ''
|
||||
if parm:
|
||||
parm = "%% (%s) " % (',\n\t\t'.join(parm))
|
||||
else:
|
||||
parm = ''
|
||||
|
||||
c = COMPILE_TEMPLATE_SHELL % (line, parm)
|
||||
Logs.debug('action: %s', c.strip().splitlines())
|
||||
|
@ -186,7 +186,8 @@ class task_gen(object):
|
||||
tmp = []
|
||||
for a in keys:
|
||||
for x in prec.values():
|
||||
if a in x: break
|
||||
if a in x:
|
||||
break
|
||||
else:
|
||||
tmp.append(a)
|
||||
|
||||
@ -762,8 +763,10 @@ class subst_pc(Task.Task):
|
||||
self.generator.bld.raw_deps[self.uid()] = lst
|
||||
|
||||
# make sure the signature is updated
|
||||
try: delattr(self, 'cache_sig')
|
||||
except AttributeError: pass
|
||||
try:
|
||||
delattr(self, 'cache_sig')
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
self.force_permissions()
|
||||
|
||||
|
@ -898,7 +898,8 @@ def write_config_header(self, configfile='', guard='', top=False, defines=True,
|
||||
:type define_prefix: string
|
||||
:param define_prefix: prefix all the defines in the file with a particular prefix
|
||||
"""
|
||||
if not configfile: configfile = WAF_CONFIG_H
|
||||
if not configfile:
|
||||
configfile = WAF_CONFIG_H
|
||||
waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile)
|
||||
|
||||
node = top and self.bldnode or self.path.get_bld()
|
||||
|
@ -170,32 +170,56 @@ def reduce_nums(val_1, val_2, val_op):
|
||||
#print val_1, val_2, val_op
|
||||
|
||||
# now perform the operation, make certain a and b are numeric
|
||||
try: a = 0 + val_1
|
||||
except TypeError: a = int(val_1)
|
||||
try: b = 0 + val_2
|
||||
except TypeError: b = int(val_2)
|
||||
try:
|
||||
a = 0 + val_1
|
||||
except TypeError:
|
||||
a = int(val_1)
|
||||
try:
|
||||
b = 0 + val_2
|
||||
except TypeError:
|
||||
b = int(val_2)
|
||||
|
||||
d = val_op
|
||||
if d == '%': c = a%b
|
||||
elif d=='+': c = a+b
|
||||
elif d=='-': c = a-b
|
||||
elif d=='*': c = a*b
|
||||
elif d=='/': c = a/b
|
||||
elif d=='^': c = a^b
|
||||
elif d=='==': c = int(a == b)
|
||||
elif d=='|' or d == 'bitor': c = a|b
|
||||
elif d=='||' or d == 'or' : c = int(a or b)
|
||||
elif d=='&' or d == 'bitand': c = a&b
|
||||
elif d=='&&' or d == 'and': c = int(a and b)
|
||||
elif d=='!=' or d == 'not_eq': c = int(a != b)
|
||||
elif d=='^' or d == 'xor': c = int(a^b)
|
||||
elif d=='<=': c = int(a <= b)
|
||||
elif d=='<': c = int(a < b)
|
||||
elif d=='>': c = int(a > b)
|
||||
elif d=='>=': c = int(a >= b)
|
||||
elif d=='<<': c = a<<b
|
||||
elif d=='>>': c = a>>b
|
||||
else: c = 0
|
||||
if d == '%':
|
||||
c = a % b
|
||||
elif d=='+':
|
||||
c = a + b
|
||||
elif d=='-':
|
||||
c = a - b
|
||||
elif d=='*':
|
||||
c = a * b
|
||||
elif d=='/':
|
||||
c = a / b
|
||||
elif d=='^':
|
||||
c = a ^ b
|
||||
elif d=='==':
|
||||
c = int(a == b)
|
||||
elif d=='|' or d == 'bitor':
|
||||
c = a | b
|
||||
elif d=='||' or d == 'or' :
|
||||
c = int(a or b)
|
||||
elif d=='&' or d == 'bitand':
|
||||
c = a & b
|
||||
elif d=='&&' or d == 'and':
|
||||
c = int(a and b)
|
||||
elif d=='!=' or d == 'not_eq':
|
||||
c = int(a != b)
|
||||
elif d=='^' or d == 'xor':
|
||||
c = int(a^b)
|
||||
elif d=='<=':
|
||||
c = int(a <= b)
|
||||
elif d=='<':
|
||||
c = int(a < b)
|
||||
elif d=='>':
|
||||
c = int(a > b)
|
||||
elif d=='>=':
|
||||
c = int(a >= b)
|
||||
elif d=='<<':
|
||||
c = a << b
|
||||
elif d=='>>':
|
||||
c = a >> b
|
||||
else:
|
||||
c = 0
|
||||
return c
|
||||
|
||||
def get_num(lst):
|
||||
@ -207,7 +231,8 @@ def get_num(lst):
|
||||
:return: a pair containing the number and the rest of the list
|
||||
:rtype: tuple(value, list)
|
||||
"""
|
||||
if not lst: raise PreprocError('empty list for get_num')
|
||||
if not lst:
|
||||
raise PreprocError('empty list for get_num')
|
||||
(p, v) = lst[0]
|
||||
if p == OP:
|
||||
if v == '(':
|
||||
@ -263,7 +288,8 @@ def get_term(lst):
|
||||
:rtype: value, list
|
||||
"""
|
||||
|
||||
if not lst: raise PreprocError('empty list for get_term')
|
||||
if not lst:
|
||||
raise PreprocError('empty list for get_term')
|
||||
num, lst = get_num(lst)
|
||||
if not lst:
|
||||
return (num, [])
|
||||
@ -446,18 +472,22 @@ def reduce_tokens(lst, defs, ban=[]):
|
||||
one_param.append((p2, v2))
|
||||
count_paren += 1
|
||||
elif v2 == ')':
|
||||
if one_param: args.append(one_param)
|
||||
if one_param:
|
||||
args.append(one_param)
|
||||
break
|
||||
elif v2 == ',':
|
||||
if not one_param: raise PreprocError('empty param in funcall %r' % v)
|
||||
if not one_param:
|
||||
raise PreprocError('empty param in funcall %r' % v)
|
||||
args.append(one_param)
|
||||
one_param = []
|
||||
else:
|
||||
one_param.append((p2, v2))
|
||||
else:
|
||||
one_param.append((p2, v2))
|
||||
if v2 == '(': count_paren += 1
|
||||
elif v2 == ')': count_paren -= 1
|
||||
if v2 == '(':
|
||||
count_paren += 1
|
||||
elif v2 == ')':
|
||||
count_paren -= 1
|
||||
else:
|
||||
raise PreprocError('malformed macro')
|
||||
|
||||
@ -502,7 +532,8 @@ def reduce_tokens(lst, defs, ban=[]):
|
||||
for x in args[pt-st+1:]:
|
||||
va_toks.extend(x)
|
||||
va_toks.append((OP, ','))
|
||||
if va_toks: va_toks.pop() # extra comma
|
||||
if va_toks:
|
||||
va_toks.pop() # extra comma
|
||||
if len(accu)>1:
|
||||
(p3, v3) = accu[-1]
|
||||
(p4, v4) = accu[-2]
|
||||
@ -550,7 +581,8 @@ def eval_macro(lst, defs):
|
||||
:rtype: int
|
||||
"""
|
||||
reduce_tokens(lst, defs, [])
|
||||
if not lst: raise PreprocError('missing tokens to evaluate')
|
||||
if not lst:
|
||||
raise PreprocError('missing tokens to evaluate')
|
||||
|
||||
if lst:
|
||||
p, v = lst[0]
|
||||
@ -577,7 +609,8 @@ def extract_macro(txt):
|
||||
p, name = t[0]
|
||||
|
||||
p, v = t[1]
|
||||
if p != OP: raise PreprocError('expected (')
|
||||
if p != OP:
|
||||
raise PreprocError('expected (')
|
||||
|
||||
i = 1
|
||||
pindex = 0
|
||||
@ -680,16 +713,20 @@ def parse_char(txt):
|
||||
return ord(txt)
|
||||
c = txt[1]
|
||||
if c == 'x':
|
||||
if len(txt) == 4 and txt[3] in string.hexdigits: return int(txt[2:], 16)
|
||||
if len(txt) == 4 and txt[3] in string.hexdigits:
|
||||
return int(txt[2:], 16)
|
||||
return int(txt[2:], 16)
|
||||
elif c.isdigit():
|
||||
if c == '0' and len(txt)==2: return 0
|
||||
if c == '0' and len(txt)==2:
|
||||
return 0
|
||||
for i in 3, 2, 1:
|
||||
if len(txt) > i and txt[1:1+i].isdigit():
|
||||
return (1+i, int(txt[1:1+i], 8))
|
||||
else:
|
||||
try: return chr_esc[c]
|
||||
except KeyError: raise PreprocError('could not parse char literal %r' % txt)
|
||||
try:
|
||||
return chr_esc[c]
|
||||
except KeyError:
|
||||
raise PreprocError('could not parse char literal %r' % txt)
|
||||
|
||||
def tokenize(s):
|
||||
"""
|
||||
@ -722,16 +759,23 @@ def tokenize_private(s):
|
||||
v = 0
|
||||
name = NUM
|
||||
elif name == NUM:
|
||||
if m('oct'): v = int(v, 8)
|
||||
elif m('hex'): v = int(m('hex'), 16)
|
||||
elif m('n0'): v = m('n0')
|
||||
if m('oct'):
|
||||
v = int(v, 8)
|
||||
elif m('hex'):
|
||||
v = int(m('hex'), 16)
|
||||
elif m('n0'):
|
||||
v = m('n0')
|
||||
else:
|
||||
v = m('char')
|
||||
if v: v = parse_char(v)
|
||||
else: v = m('n2') or m('n4')
|
||||
if v:
|
||||
v = parse_char(v)
|
||||
else:
|
||||
v = m('n2') or m('n4')
|
||||
elif name == OP:
|
||||
if v == '%:': v = '#'
|
||||
elif v == '%:%:': v = '##'
|
||||
if v == '%:':
|
||||
v = '#'
|
||||
elif v == '%:%:':
|
||||
v = '##'
|
||||
elif name == STR:
|
||||
# remove the quotes around the string
|
||||
v = v[1:-1]
|
||||
@ -867,7 +911,8 @@ class c_parser(object):
|
||||
# return a list of tuples : keyword, line
|
||||
code = node.read()
|
||||
if use_trigraphs:
|
||||
for (a, b) in trig_def: code = code.split(a).join(b)
|
||||
for (a, b) in trig_def:
|
||||
code = code.split(a).join(b)
|
||||
code = re_nl.sub('', code)
|
||||
code = re_cpp.sub(repl, code)
|
||||
return re_lines.findall(code)
|
||||
@ -944,7 +989,8 @@ class c_parser(object):
|
||||
|
||||
try:
|
||||
ve = Logs.verbose
|
||||
if ve: Logs.debug('preproc: line is %s - %s state is %s', token, line, self.state)
|
||||
if ve:
|
||||
Logs.debug('preproc: line is %s - %s state is %s', token, line, self.state)
|
||||
state = self.state
|
||||
|
||||
# make certain we define the state if we are about to enter in an if block
|
||||
@ -960,19 +1006,26 @@ class c_parser(object):
|
||||
|
||||
if token == 'if':
|
||||
ret = eval_macro(tokenize(line), self.defs)
|
||||
if ret: state[-1] = accepted
|
||||
else: state[-1] = ignored
|
||||
if ret:
|
||||
state[-1] = accepted
|
||||
else:
|
||||
state[-1] = ignored
|
||||
elif token == 'ifdef':
|
||||
m = re_mac.match(line)
|
||||
if m and m.group() in self.defs: state[-1] = accepted
|
||||
else: state[-1] = ignored
|
||||
if m and m.group() in self.defs:
|
||||
state[-1] = accepted
|
||||
else:
|
||||
state[-1] = ignored
|
||||
elif token == 'ifndef':
|
||||
m = re_mac.match(line)
|
||||
if m and m.group() in self.defs: state[-1] = ignored
|
||||
else: state[-1] = accepted
|
||||
if m and m.group() in self.defs:
|
||||
state[-1] = ignored
|
||||
else:
|
||||
state[-1] = accepted
|
||||
elif token == 'include' or token == 'import':
|
||||
(kind, inc) = extract_include(line, self.defs)
|
||||
if ve: Logs.debug('preproc: include found %s (%s) ', inc, kind)
|
||||
if ve:
|
||||
Logs.debug('preproc: include found %s (%s) ', inc, kind)
|
||||
if kind == '"' or not strict_quotes:
|
||||
self.current_file = self.tryfind(inc)
|
||||
if token == 'import':
|
||||
@ -984,8 +1037,10 @@ class c_parser(object):
|
||||
if eval_macro(tokenize(line), self.defs):
|
||||
state[-1] = accepted
|
||||
elif token == 'else':
|
||||
if state[-1] == accepted: state[-1] = skipped
|
||||
elif state[-1] == ignored: state[-1] = accepted
|
||||
if state[-1] == accepted:
|
||||
state[-1] = skipped
|
||||
elif state[-1] == ignored:
|
||||
state[-1] = accepted
|
||||
elif token == 'define':
|
||||
try:
|
||||
self.defs[self.define_name(line)] = line
|
||||
|
@ -227,8 +227,10 @@ class stlink_task(link_task):
|
||||
def rm_tgt(cls):
|
||||
old = cls.run
|
||||
def wrap(self):
|
||||
try: os.remove(self.outputs[0].abspath())
|
||||
except OSError: pass
|
||||
try:
|
||||
os.remove(self.outputs[0].abspath())
|
||||
except OSError:
|
||||
pass
|
||||
return old(self)
|
||||
setattr(cls, 'run', wrap)
|
||||
rm_tgt(stlink_task)
|
||||
|
@ -29,7 +29,8 @@ def filter_comments(filename):
|
||||
i += 1
|
||||
while i < max:
|
||||
c = txt[i]
|
||||
if c == delim: break
|
||||
if c == delim:
|
||||
break
|
||||
elif c == '\\': # skip the character following backslash
|
||||
i += 1
|
||||
i += 1
|
||||
@ -38,7 +39,8 @@ def filter_comments(filename):
|
||||
elif c == '/': # try to replace a comment with whitespace
|
||||
buf.append(txt[begin:i])
|
||||
i += 1
|
||||
if i == max: break
|
||||
if i == max:
|
||||
break
|
||||
c = txt[i]
|
||||
if c == '+': # eat nesting /+ +/ comment
|
||||
i += 1
|
||||
@ -52,7 +54,8 @@ def filter_comments(filename):
|
||||
c = None
|
||||
elif prev == '+' and c == '/':
|
||||
nesting -= 1
|
||||
if nesting == 0: break
|
||||
if nesting == 0:
|
||||
break
|
||||
c = None
|
||||
i += 1
|
||||
elif c == '*': # eat /* */ comment
|
||||
@ -61,7 +64,8 @@ def filter_comments(filename):
|
||||
while i < max:
|
||||
prev = c
|
||||
c = txt[i]
|
||||
if prev == '*' and c == '/': break
|
||||
if prev == '*' and c == '/':
|
||||
break
|
||||
i += 1
|
||||
elif c == '/': # eat // comment
|
||||
i += 1
|
||||
@ -188,7 +192,8 @@ class d_parser(object):
|
||||
names = self.get_strings(code) # obtain the import strings
|
||||
for x in names:
|
||||
# optimization
|
||||
if x in self.allnames: continue
|
||||
if x in self.allnames:
|
||||
continue
|
||||
self.allnames.append(x)
|
||||
|
||||
# for each name, see if it is like a node or not
|
||||
|
@ -22,7 +22,8 @@ def flexfun(tsk):
|
||||
bld = tsk.generator.bld
|
||||
wd = bld.variant_dir
|
||||
def to_list(xx):
|
||||
if isinstance(xx, str): return [xx]
|
||||
if isinstance(xx, str):
|
||||
return [xx]
|
||||
return xx
|
||||
tsk.last_cmd = lst = []
|
||||
lst.extend(to_list(env.FLEX))
|
||||
|
@ -50,8 +50,10 @@ def get_gfortran_version(conf, fc):
|
||||
version_re = re.compile(r"GNU\s*Fortran", re.I).search
|
||||
cmd = fc + ['--version']
|
||||
out, err = fc_config.getoutput(conf, cmd, stdin=False)
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('Could not determine the compiler type')
|
||||
|
||||
|
@ -70,7 +70,8 @@ class glib_genmarshal(Task.Task):
|
||||
)
|
||||
|
||||
ret = bld.exec_command(cmd1)
|
||||
if ret: return ret
|
||||
if ret:
|
||||
return ret
|
||||
|
||||
#print self.outputs[1].abspath()
|
||||
c = '''#include "%s"\n''' % self.outputs[0].name
|
||||
|
@ -126,8 +126,10 @@ def gather_ifort_versions(conf, versions):
|
||||
continue
|
||||
targets = {}
|
||||
for target,arch in all_ifort_platforms:
|
||||
if target=='intel64': targetDir='EM64T_NATIVE'
|
||||
else: targetDir=target
|
||||
if target=='intel64':
|
||||
targetDir='EM64T_NATIVE'
|
||||
else:
|
||||
targetDir=target
|
||||
try:
|
||||
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
|
||||
icl_version=Utils.winreg.OpenKey(all_versions,version)
|
||||
@ -333,7 +335,8 @@ def find_ifort_win32(conf):
|
||||
|
||||
# before setting anything, check if the compiler is really intel fortran
|
||||
env = dict(conf.environ)
|
||||
if path: env.update(PATH = ';'.join(path))
|
||||
if path:
|
||||
env.update(PATH = ';'.join(path))
|
||||
if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env):
|
||||
conf.fatal('not intel fortran compiler could not be identified')
|
||||
|
||||
|
@ -86,8 +86,10 @@ def apply_intltool_in_f(self):
|
||||
:param install_path: installation path
|
||||
:type install_path: string
|
||||
"""
|
||||
try: self.meths.remove('process_source')
|
||||
except ValueError: pass
|
||||
try:
|
||||
self.meths.remove('process_source')
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.ensure_localedir()
|
||||
|
||||
@ -141,8 +143,10 @@ def apply_intltool_po(self):
|
||||
|
||||
The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
|
||||
"""
|
||||
try: self.meths.remove('process_source')
|
||||
except ValueError: pass
|
||||
try:
|
||||
self.meths.remove('process_source')
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
self.ensure_localedir()
|
||||
|
||||
|
@ -500,8 +500,10 @@ def gather_icl_versions(conf, versions):
|
||||
continue
|
||||
targets = {}
|
||||
for target,arch in all_icl_platforms:
|
||||
if target=='intel64': targetDir='EM64T_NATIVE'
|
||||
else: targetDir=target
|
||||
if target=='intel64':
|
||||
targetDir='EM64T_NATIVE'
|
||||
else:
|
||||
targetDir=target
|
||||
try:
|
||||
Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
|
||||
icl_version=Utils.winreg.OpenKey(all_versions,version)
|
||||
@ -552,8 +554,10 @@ def gather_intel_composer_versions(conf, versions):
|
||||
continue
|
||||
targets = {}
|
||||
for target,arch in all_icl_platforms:
|
||||
if target=='intel64': targetDir='EM64T_NATIVE'
|
||||
else: targetDir=target
|
||||
if target=='intel64':
|
||||
targetDir='EM64T_NATIVE'
|
||||
else:
|
||||
targetDir=target
|
||||
try:
|
||||
try:
|
||||
defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
|
||||
@ -796,7 +800,8 @@ def find_msvc(conf):
|
||||
|
||||
# before setting anything, check if the compiler is really msvc
|
||||
env = dict(conf.environ)
|
||||
if path: env.update(PATH = ';'.join(path))
|
||||
if path:
|
||||
env.update(PATH = ';'.join(path))
|
||||
if not conf.cmd_and_log(cxx + ['/nologo', '/help'], env=env):
|
||||
conf.fatal('the msvc compiler could not be identified')
|
||||
|
||||
|
@ -36,7 +36,8 @@ def init_perlext(self):
|
||||
*lib* prefix from library names.
|
||||
"""
|
||||
self.uselib = self.to_list(getattr(self, 'uselib', []))
|
||||
if not 'PERLEXT' in self.uselib: self.uselib.append('PERLEXT')
|
||||
if not 'PERLEXT' in self.uselib:
|
||||
self.uselib.append('PERLEXT')
|
||||
self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN
|
||||
|
||||
@extension('.xs')
|
||||
|
@ -328,7 +328,8 @@ def apply_qt5(self):
|
||||
|
||||
lst = []
|
||||
for flag in self.to_list(self.env.CXXFLAGS):
|
||||
if len(flag) < 2: continue
|
||||
if len(flag) < 2:
|
||||
continue
|
||||
f = flag[0:2]
|
||||
if f in ('-D', '-I', '/D', '/I'):
|
||||
if (f[0] == '/'):
|
||||
@ -375,8 +376,10 @@ class rcc(Task.Task):
|
||||
root = self.inputs[0].parent
|
||||
for x in curHandler.files:
|
||||
nd = root.find_resource(x)
|
||||
if nd: nodes.append(nd)
|
||||
else: names.append(x)
|
||||
if nd:
|
||||
nodes.append(nd)
|
||||
else:
|
||||
names.append(x)
|
||||
return (nodes, names)
|
||||
|
||||
class moc(Task.Task):
|
||||
|
@ -48,7 +48,8 @@ def bibunitscan(self):
|
||||
node = self.inputs[0]
|
||||
|
||||
nodes = []
|
||||
if not node: return nodes
|
||||
if not node:
|
||||
return nodes
|
||||
|
||||
code = node.read()
|
||||
for match in re_bibunit.finditer(code):
|
||||
@ -158,7 +159,8 @@ class tex(Task.Task):
|
||||
nodes = []
|
||||
names = []
|
||||
seen = []
|
||||
if not node: return (nodes, names)
|
||||
if not node:
|
||||
return (nodes, names)
|
||||
|
||||
def parse_node(node):
|
||||
if node in seen:
|
||||
|
@ -38,7 +38,8 @@ class rc_parser(c_preproc.c_parser):
|
||||
"""
|
||||
code = node.read()
|
||||
if c_preproc.use_trigraphs:
|
||||
for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
|
||||
for (a, b) in c_preproc.trig_def:
|
||||
code = code.split(a).join(b)
|
||||
code = c_preproc.re_nl.sub('', code)
|
||||
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
|
||||
ret = []
|
||||
|
@ -33,7 +33,8 @@ re_inc = re.compile(
|
||||
def lines_includes(node):
|
||||
code = node.read()
|
||||
if c_preproc.use_trigraphs:
|
||||
for (a, b) in c_preproc.trig_def: code = code.split(a).join(b)
|
||||
for (a, b) in c_preproc.trig_def:
|
||||
code = code.split(a).join(b)
|
||||
code = c_preproc.re_nl.sub('', code)
|
||||
code = c_preproc.re_cpp.sub(c_preproc.repl, code)
|
||||
return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
|
||||
|
@ -29,8 +29,10 @@ def get_sxc_version(conf, fc):
|
||||
p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
|
||||
out, err = p.communicate()
|
||||
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('Could not determine the NEC C compiler version.')
|
||||
k = match.groupdict()
|
||||
@ -41,7 +43,8 @@ def sxc_common_flags(conf):
|
||||
v=conf.env
|
||||
v['CC_SRC_F']=[]
|
||||
v['CC_TGT_F']=['-c','-o']
|
||||
if not v['LINK_CC']:v['LINK_CC']=v['CC']
|
||||
if not v['LINK_CC']:
|
||||
v['LINK_CC']=v['CC']
|
||||
v['CCLNK_SRC_F']=[]
|
||||
v['CCLNK_TGT_F']=['-o']
|
||||
v['CPPPATH_ST']='-I%s'
|
||||
|
@ -244,7 +244,8 @@ def compile_template(line):
|
||||
extr = []
|
||||
def repl(match):
|
||||
g = match.group
|
||||
if g('dollar'): return "$"
|
||||
if g('dollar'):
|
||||
return "$"
|
||||
elif g('backslash'):
|
||||
return "\\"
|
||||
elif g('subst'):
|
||||
@ -872,8 +873,3 @@ class codelite_generator(BuildContext):
|
||||
p.iter_path = p.tg.path
|
||||
make_parents(p)
|
||||
|
||||
|
||||
|
||||
def options(ctx):
|
||||
pass
|
||||
|
||||
|
@ -316,10 +316,12 @@ def apply_objdeps(self):
|
||||
lst = y.to_list(y.add_objects)
|
||||
lst.reverse()
|
||||
for u in lst:
|
||||
if u in seen: continue
|
||||
if u in seen:
|
||||
continue
|
||||
added = 1
|
||||
names = [u]+names
|
||||
if added: continue # list of names modified, loop
|
||||
if added:
|
||||
continue # list of names modified, loop
|
||||
|
||||
# safe to process the current object
|
||||
y.post()
|
||||
@ -341,8 +343,10 @@ def add_obj_file(self, file):
|
||||
"""Small example on how to link object files as if they were source
|
||||
obj = bld.create_obj('cc')
|
||||
obj.add_obj_file('foo.o')"""
|
||||
if not hasattr(self, 'obj_files'): self.obj_files = []
|
||||
if not 'process_obj_files' in self.meths: self.meths.append('process_obj_files')
|
||||
if not hasattr(self, 'obj_files'):
|
||||
self.obj_files = []
|
||||
if not 'process_obj_files' in self.meths:
|
||||
self.meths.append('process_obj_files')
|
||||
self.obj_files.append(file)
|
||||
|
||||
|
||||
|
@ -33,7 +33,8 @@ def dcc_common_flags(conf):
|
||||
v['CC_TGT_F'] = ['-c', '-o']
|
||||
|
||||
# linker
|
||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
|
||||
if not v['LINK_CC']:
|
||||
v['LINK_CC'] = v['CC']
|
||||
v['CCLNK_SRC_F'] = []
|
||||
v['CCLNK_TGT_F'] = ['-o']
|
||||
v['CPPPATH_ST'] = '-I%s'
|
||||
|
@ -33,8 +33,10 @@ def get_crayftn_version(conf, fc):
|
||||
version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
|
||||
cmd = fc + ['-V']
|
||||
out,err = fc_config.getoutput(conf, cmd, stdin=False)
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('Could not determine the Cray Fortran compiler version.')
|
||||
k = match.groupdict()
|
||||
|
@ -33,12 +33,16 @@ def get_sxfc_version(conf, fc):
|
||||
version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
|
||||
cmd = fc + ['-V']
|
||||
out,err = fc_config.getoutput(conf, cmd, stdin=False)
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P<major>\S*)\s*\(c\)\s*(?P<minor>\d*)",re.I).search
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('Could not determine the NEC Fortran compiler version.')
|
||||
k = match.groupdict()
|
||||
|
@ -39,8 +39,10 @@ def get_open64_version(conf, fc):
|
||||
cmd = fc + ['-version']
|
||||
|
||||
out, err = fc_config.getoutput(conf,cmd,stdin=False)
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('Could not determine the Open64 version.')
|
||||
k = match.groupdict()
|
||||
|
@ -30,8 +30,10 @@ def get_pgfortran_version(conf,fc):
|
||||
version_re = re.compile(r"The Portland Group", re.I).search
|
||||
cmd = fc + ['-V']
|
||||
out,err = fc_config.getoutput(conf, cmd, stdin=False)
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('Could not verify PGI signature')
|
||||
cmd = fc + ['-help=variable']
|
||||
@ -45,10 +47,12 @@ def get_pgfortran_version(conf,fc):
|
||||
lst = line.partition('=')
|
||||
if lst[1] == '=':
|
||||
key = lst[0].rstrip()
|
||||
if key == '': key = prevk
|
||||
if key == '':
|
||||
key = prevk
|
||||
val = lst[2].rstrip()
|
||||
k[key] = val
|
||||
else: prevk = line.partition(' ')[0]
|
||||
else:
|
||||
prevk = line.partition(' ')[0]
|
||||
def isD(var):
|
||||
return var in k
|
||||
def isT(var):
|
||||
|
@ -43,8 +43,10 @@ def get_solstudio_version(conf, fc):
|
||||
cmd = fc + ['-V']
|
||||
|
||||
out, err = fc_config.getoutput(conf,cmd,stdin=False)
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
if not match:
|
||||
conf.fatal('Could not determine the Sun Studio Fortran version.')
|
||||
k = match.groupdict()
|
||||
|
@ -89,7 +89,8 @@ def halide(self):
|
||||
# Return a node with a new extension, in an appropriate folder
|
||||
name = src.name
|
||||
xpos = src.name.rfind('.')
|
||||
if xpos == -1: xpos = len(src.name)
|
||||
if xpos == -1:
|
||||
xpos = len(src.name)
|
||||
newname = name[:xpos] + ext
|
||||
if src.is_child_of(bld.bldnode):
|
||||
node = src.get_src().parent.find_or_declare(newname)
|
||||
|
@ -53,11 +53,14 @@ def configure(self):
|
||||
kdeconfig = self.find_program('kde4-config')
|
||||
prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip()
|
||||
fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
|
||||
try: os.stat(fname)
|
||||
try:
|
||||
os.stat(fname)
|
||||
except OSError:
|
||||
fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
|
||||
try: os.stat(fname)
|
||||
except OSError: self.fatal('could not open %s' % fname)
|
||||
try:
|
||||
os.stat(fname)
|
||||
except OSError:
|
||||
self.fatal('could not open %s' % fname)
|
||||
|
||||
try:
|
||||
txt = Utils.readf(fname)
|
||||
|
@ -301,7 +301,8 @@ def compile_template(line):
|
||||
extr = []
|
||||
def repl(match):
|
||||
g = match.group
|
||||
if g('dollar'): return "$"
|
||||
if g('dollar'):
|
||||
return "$"
|
||||
elif g('backslash'):
|
||||
return "\\"
|
||||
elif g('subst'):
|
||||
|
@ -20,9 +20,12 @@ foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*
|
||||
def filter_comments(txt):
|
||||
meh = [0]
|
||||
def repl(m):
|
||||
if m.group(1): meh[0] += 1
|
||||
elif m.group(2): meh[0] -= 1
|
||||
elif not meh[0]: return m.group()
|
||||
if m.group(1):
|
||||
meh[0] += 1
|
||||
elif m.group(2):
|
||||
meh[0] -= 1
|
||||
elif not meh[0]:
|
||||
return m.group()
|
||||
return ''
|
||||
return foo.sub(repl, txt)
|
||||
|
||||
@ -42,7 +45,8 @@ def scan(self):
|
||||
nd = None
|
||||
for x in self.incpaths:
|
||||
nd = x.find_resource(name.lower()+'.ml')
|
||||
if not nd: nd = x.find_resource(name+'.ml')
|
||||
if not nd:
|
||||
nd = x.find_resource(name+'.ml')
|
||||
if nd:
|
||||
found_lst.append(nd)
|
||||
break
|
||||
@ -83,12 +87,14 @@ def init_envs_ml(self):
|
||||
self.native_env = None
|
||||
if self.type in native_lst:
|
||||
self.native_env = self.env.derive()
|
||||
if self.islibrary: self.native_env['OCALINKFLAGS'] = '-a'
|
||||
if self.islibrary:
|
||||
self.native_env['OCALINKFLAGS'] = '-a'
|
||||
|
||||
self.bytecode_env = None
|
||||
if self.type in bytecode_lst:
|
||||
self.bytecode_env = self.env.derive()
|
||||
if self.islibrary: self.bytecode_env['OCALINKFLAGS'] = '-a'
|
||||
if self.islibrary:
|
||||
self.bytecode_env['OCALINKFLAGS'] = '-a'
|
||||
|
||||
if self.type == 'c_object':
|
||||
self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
|
||||
@ -126,8 +132,10 @@ def apply_vars_ml(self):
|
||||
for vname in varnames:
|
||||
cnt = self.env[vname+'_'+name]
|
||||
if cnt:
|
||||
if self.bytecode_env: self.bytecode_env.append_value(vname, cnt)
|
||||
if self.native_env: self.native_env.append_value(vname, cnt)
|
||||
if self.bytecode_env:
|
||||
self.bytecode_env.append_value(vname, cnt)
|
||||
if self.native_env:
|
||||
self.native_env.append_value(vname, cnt)
|
||||
|
||||
@feature('ocaml')
|
||||
@after_method('process_source')
|
||||
@ -143,9 +151,12 @@ def apply_link_ml(self):
|
||||
self.linktasks.append(linktask)
|
||||
|
||||
if self.native_env:
|
||||
if self.type == 'c_object': ext = '.o'
|
||||
elif self.islibrary: ext = '.cmxa'
|
||||
else: ext = ''
|
||||
if self.type == 'c_object':
|
||||
ext = '.o'
|
||||
elif self.islibrary:
|
||||
ext = '.cmxa'
|
||||
else:
|
||||
ext = ''
|
||||
|
||||
linktask = self.create_task('ocalinkx')
|
||||
linktask.set_outputs(self.path.find_or_declare(self.target + ext))
|
||||
@ -207,8 +218,10 @@ def compile_may_start(self):
|
||||
|
||||
# the evil part is that we can only compute the dependencies after the
|
||||
# source files can be read (this means actually producing the source files)
|
||||
if getattr(self, 'bytecode', ''): alltasks = self.generator.bytecode_tasks
|
||||
else: alltasks = self.generator.native_tasks
|
||||
if getattr(self, 'bytecode', ''):
|
||||
alltasks = self.generator.bytecode_tasks
|
||||
else:
|
||||
alltasks = self.generator.native_tasks
|
||||
|
||||
self.signature() # ensure that files are scanned - unfortunately
|
||||
tree = self.generator.bld
|
||||
@ -216,7 +229,8 @@ def compile_may_start(self):
|
||||
lst = tree.node_deps[self.uid()]
|
||||
for depnode in lst:
|
||||
for t in alltasks:
|
||||
if t == self: continue
|
||||
if t == self:
|
||||
continue
|
||||
if depnode in t.inputs:
|
||||
self.set_run_after(t)
|
||||
|
||||
@ -270,8 +284,10 @@ class ocamlyacc(Task.Task):
|
||||
|
||||
def link_may_start(self):
|
||||
|
||||
if getattr(self, 'bytecode', 0): alltasks = self.generator.bytecode_tasks
|
||||
else: alltasks = self.generator.native_tasks
|
||||
if getattr(self, 'bytecode', 0):
|
||||
alltasks = self.generator.bytecode_tasks
|
||||
else:
|
||||
alltasks = self.generator.native_tasks
|
||||
|
||||
for x in alltasks:
|
||||
if not x.hasrun:
|
||||
@ -286,7 +302,8 @@ def link_may_start(self):
|
||||
pendant = []+alltasks
|
||||
while pendant:
|
||||
task = pendant.pop(0)
|
||||
if task in seen: continue
|
||||
if task in seen:
|
||||
continue
|
||||
for x in task.run_after:
|
||||
if not x in seen:
|
||||
pendant.append(task)
|
||||
|
@ -13,8 +13,10 @@ a file named pdebug.svg in the source directory::
|
||||
"""
|
||||
|
||||
import time, sys, re, threading
|
||||
try: from Queue import Queue
|
||||
except: from queue import Queue
|
||||
try:
|
||||
from Queue import Queue
|
||||
except:
|
||||
from queue import Queue
|
||||
from waflib import Runner, Options, Utils, Task, Logs, Errors
|
||||
|
||||
#import random
|
||||
@ -125,7 +127,8 @@ def compile_template(line):
|
||||
extr = []
|
||||
def repl(match):
|
||||
g = match.group
|
||||
if g('dollar'): return "$"
|
||||
if g('dollar'):
|
||||
return "$"
|
||||
elif g('backslash'):
|
||||
return "\\"
|
||||
elif g('subst'):
|
||||
|
@ -21,10 +21,14 @@ def find_pgi_compiler(conf, var, name):
|
||||
|
||||
v = conf.env
|
||||
cc = None
|
||||
if v[var]: cc = v[var]
|
||||
elif var in conf.environ: cc = conf.environ[var]
|
||||
if not cc: cc = conf.find_program(name, var=var)
|
||||
if not cc: conf.fatal('PGI Compiler (%s) was not found' % name)
|
||||
if v[var]:
|
||||
cc = v[var]
|
||||
elif var in conf.environ:
|
||||
cc = conf.environ[var]
|
||||
if not cc:
|
||||
cc = conf.find_program(name, var=var)
|
||||
if not cc:
|
||||
conf.fatal('PGI Compiler (%s) was not found' % name)
|
||||
|
||||
v[var + '_VERSION'] = conf.get_pgi_version(cc)
|
||||
v[var] = cc
|
||||
@ -41,8 +45,10 @@ def get_pgi_version(conf, cc):
|
||||
except Exception:
|
||||
conf.fatal('Could not find pgi compiler %r' % cmd)
|
||||
|
||||
if out: match = version_re(out)
|
||||
else: match = version_re(err)
|
||||
if out:
|
||||
match = version_re(out)
|
||||
else:
|
||||
match = version_re(err)
|
||||
|
||||
if not match:
|
||||
conf.fatal('Could not verify PGI signature')
|
||||
|
@ -49,7 +49,8 @@ class protoc(Task):
|
||||
names = []
|
||||
seen = []
|
||||
|
||||
if not node: return (nodes, names)
|
||||
if not node:
|
||||
return (nodes, names)
|
||||
|
||||
def parse_node(node):
|
||||
if node in seen:
|
||||
|
@ -163,8 +163,10 @@ class pyrcc(Task.Task):
|
||||
root = self.inputs[0].parent
|
||||
for x in curHandler.files:
|
||||
nd = root.find_resource(x)
|
||||
if nd: nodes.append(nd)
|
||||
else: names.append(x)
|
||||
if nd:
|
||||
nodes.append(nd)
|
||||
else:
|
||||
names.append(x)
|
||||
return (nodes, names)
|
||||
|
||||
|
||||
|
@ -321,7 +321,8 @@ def apply_qt4(self):
|
||||
|
||||
lst = []
|
||||
for flag in self.to_list(self.env['CXXFLAGS']):
|
||||
if len(flag) < 2: continue
|
||||
if len(flag) < 2:
|
||||
continue
|
||||
f = flag[0:2]
|
||||
if f in ('-D', '-I', '/D', '/I'):
|
||||
if (f[0] == '/'):
|
||||
@ -368,8 +369,10 @@ class rcc(Task.Task):
|
||||
root = self.inputs[0].parent
|
||||
for x in curHandler.files:
|
||||
nd = root.find_resource(x)
|
||||
if nd: nodes.append(nd)
|
||||
else: names.append(x)
|
||||
if nd:
|
||||
nodes.append(nd)
|
||||
else:
|
||||
names.append(x)
|
||||
return (nodes, names)
|
||||
|
||||
class moc(Task.Task):
|
||||
|
@ -242,7 +242,8 @@ class ReviewContext(Context.Context):
|
||||
"""
|
||||
Return true if the review sets specified are equal.
|
||||
"""
|
||||
if len(set1.keys()) != len(set2.keys()): return False
|
||||
if len(set1.keys()) != len(set2.keys()):
|
||||
return False
|
||||
for key in set1.keys():
|
||||
if not key in set2 or set1[key] != set2[key]:
|
||||
return False
|
||||
@ -259,7 +260,8 @@ class ReviewContext(Context.Context):
|
||||
name = ", ".join(opt._short_opts + opt._long_opts)
|
||||
help = opt.help
|
||||
actual = None
|
||||
if dest in review_set: actual = review_set[dest]
|
||||
if dest in review_set:
|
||||
actual = review_set[dest]
|
||||
default = review_defaults[dest]
|
||||
lines.append(self.format_option(name, help, actual, default, term_width))
|
||||
return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
|
||||
@ -278,7 +280,8 @@ class ReviewContext(Context.Context):
|
||||
|
||||
w = textwrap.TextWrapper()
|
||||
w.width = term_width - 1
|
||||
if w.width < 60: w.width = 60
|
||||
if w.width < 60:
|
||||
w.width = 60
|
||||
|
||||
out = ""
|
||||
|
||||
|
@ -54,9 +54,9 @@ def apply_run_py_script(tg):
|
||||
Attributes:
|
||||
|
||||
* source -- A **single** source node or string. (required)
|
||||
* target -- A single target or list of targets (nodes or strings).
|
||||
* target -- A single target or list of targets (nodes or strings)
|
||||
* deps -- A single dependency or list of dependencies (nodes or strings)
|
||||
* add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable.
|
||||
* add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable
|
||||
|
||||
If the build environment has an attribute "PROJECT_PATHS" with
|
||||
a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
|
||||
@ -64,7 +64,8 @@ def apply_run_py_script(tg):
|
||||
|
||||
# Set the Python version to use, default to 3.
|
||||
v = getattr(tg, 'version', 3)
|
||||
if v not in (2, 3): raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v)
|
||||
if v not in (2, 3):
|
||||
raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v)
|
||||
|
||||
# Convert sources and targets to nodes
|
||||
src_node = tg.path.find_resource(tg.source)
|
||||
|
@ -53,9 +53,12 @@ def apply_sas(self):
|
||||
deps = self.to_list(self.deps)
|
||||
for filename in deps:
|
||||
n = self.path.find_resource(filename)
|
||||
if not n: n = self.bld.root.find_resource(filename)
|
||||
if not n: raise Errors.WafError('cannot find input file %s for processing' % filename)
|
||||
if not n in deps_lst: deps_lst.append(n)
|
||||
if not n:
|
||||
n = self.bld.root.find_resource(filename)
|
||||
if not n:
|
||||
raise Errors.WafError('cannot find input file %s for processing' % filename)
|
||||
if not n in deps_lst:
|
||||
deps_lst.append(n)
|
||||
|
||||
for node in self.to_nodes(self.source):
|
||||
if self.type == 'sas':
|
||||
|
@ -87,7 +87,8 @@ class scalac(javaw.javac):
|
||||
bld = gen.bld
|
||||
wd = bld.bldnode.abspath()
|
||||
def to_list(xx):
|
||||
if isinstance(xx, str): return [xx]
|
||||
if isinstance(xx, str):
|
||||
return [xx]
|
||||
return xx
|
||||
self.last_cmd = lst = []
|
||||
lst.extend(to_list(env['SCALAC']))
|
||||
@ -122,5 +123,6 @@ def configure(self):
|
||||
v['CLASSPATH'] = self.environ['CLASSPATH']
|
||||
|
||||
v.SCALACFLAGS = ['-verbose']
|
||||
if not v['SCALAC']: self.fatal('scalac is required for compiling scala classes')
|
||||
if not v['SCALAC']:
|
||||
self.fatal('scalac is required for compiling scala classes')
|
||||
|
||||
|
@ -48,9 +48,11 @@ class sll_finder(Task):
|
||||
result.seek(0)
|
||||
for line in result.readlines():
|
||||
words = line.split()
|
||||
if len(words) < 3 or words[1] != '=>': continue
|
||||
if len(words) < 3 or words[1] != '=>':
|
||||
continue
|
||||
lib = words[2]
|
||||
if lib == 'not': continue
|
||||
if lib == 'not':
|
||||
continue
|
||||
if any([lib.startswith(p) for p in
|
||||
[bld.bldnode.abspath(), '('] +
|
||||
self.env.SOFTLINK_EXCLUDE]):
|
||||
|
@ -60,7 +60,8 @@ def find_tiar(conf):
|
||||
def ticc_common_flags(conf):
|
||||
v = conf.env
|
||||
|
||||
if not v['LINK_CC']: v['LINK_CC'] = v['CC']
|
||||
if not v['LINK_CC']:
|
||||
v['LINK_CC'] = v['CC']
|
||||
v['CCLNK_SRC_F'] = []
|
||||
v['CCLNK_TGT_F'] = ['-o']
|
||||
v['CPPPATH_ST'] = '-I%s'
|
||||
|
@ -17,8 +17,10 @@ from waflib import Task, Utils, Logs, Errors
|
||||
|
||||
def signature(self):
|
||||
# compute the result one time, and suppose the scan_signature will give the good result
|
||||
try: return self.cache_sig
|
||||
except AttributeError: pass
|
||||
try:
|
||||
return self.cache_sig
|
||||
except AttributeError:
|
||||
pass
|
||||
|
||||
self.m = Utils.md5()
|
||||
self.m.update(self.hcode)
|
||||
|
Loading…
x
Reference in New Issue
Block a user