mirror of
https://gitlab.com/ita1024/waf.git
synced 2024-11-22 09:57:15 +01:00
Warn and force --nostrip on the presence of f-strings in waf files
This commit is contained in:
parent
61ff2ac19c
commit
4b317058de
11
wscript
11
wscript
@ -116,7 +116,7 @@ def options(opt):
|
|||||||
opt.add_option('--namesfrom', action='store', help='Obtain the file names from a model archive', dest='namesfrom', default=None)
|
opt.add_option('--namesfrom', action='store', help='Obtain the file names from a model archive', dest='namesfrom', default=None)
|
||||||
opt.load('python')
|
opt.load('python')
|
||||||
|
|
||||||
def process_tokens(tokens):
|
def process_tokens(tokens, filename):
|
||||||
accu = []
|
accu = []
|
||||||
prev = tokenize.NEWLINE
|
prev = tokenize.NEWLINE
|
||||||
|
|
||||||
@ -147,7 +147,10 @@ def process_tokens(tokens):
|
|||||||
line_buf.append(token)
|
line_buf.append(token)
|
||||||
elif type == tokenize.STRING:
|
elif type == tokenize.STRING:
|
||||||
if not line_buf and token.startswith('"'): pass
|
if not line_buf and token.startswith('"'): pass
|
||||||
else: line_buf.append(token)
|
else:
|
||||||
|
if token.lower().startswith('f'):
|
||||||
|
raise ValueError('Found f-strings in %s which require Python >= 3.6, use "waf-light --nostrip"' % filename)
|
||||||
|
line_buf.append(token)
|
||||||
elif type == tokenize.COMMENT:
|
elif type == tokenize.COMMENT:
|
||||||
pass
|
pass
|
||||||
elif type == tokenize.OP:
|
elif type == tokenize.OP:
|
||||||
@ -189,10 +192,10 @@ def sfilter(path):
|
|||||||
with open(path, 'rb') as f:
|
with open(path, 'rb') as f:
|
||||||
tk = tokenize.tokenize(f.readline)
|
tk = tokenize.tokenize(f.readline)
|
||||||
next(tk) # the first one is always tokenize.ENCODING for Python 3, ignore it
|
next(tk) # the first one is always tokenize.ENCODING for Python 3, ignore it
|
||||||
cnt = process_tokens(tk)
|
cnt = process_tokens(tk, path)
|
||||||
else:
|
else:
|
||||||
with open(path, 'r') as f:
|
with open(path, 'r') as f:
|
||||||
cnt = process_tokens(tokenize.generate_tokens(f.readline))
|
cnt = process_tokens(tokenize.generate_tokens(f.readline), path)
|
||||||
else:
|
else:
|
||||||
with open(path, 'r') as f:
|
with open(path, 'r') as f:
|
||||||
cnt = f.read()
|
cnt = f.read()
|
||||||
|
Loading…
Reference in New Issue
Block a user