2
0
mirror of https://gitlab.com/ita1024/waf.git synced 2024-11-22 01:46:15 +01:00

Do not try to tokenize non-python files when using Python 3: Issue 1265.

This commit is contained in:
Harald Klimach 2013-03-11 09:35:54 +01:00
parent 0f64aea102
commit 7bfe947b31

48
wscript
View File

@ -204,28 +204,29 @@ def process_decorators(body):
return "\n".join(accu+all_deco)
def sfilter(path):
if sys.version_info[0] >= 3 and Options.options.strip_comments:
f = open(path, "rb")
try:
tk = tokenize.tokenize(f.readline)
next(tk) # the first one is always tokenize.ENCODING for Python 3, ignore it
cnt = process_tokens(tk)
finally:
f.close()
elif Options.options.strip_comments and path.endswith('.py'):
f = open(path, "r")
try:
cnt = process_tokens(tokenize.generate_tokens(f.readline))
finally:
f.close()
else:
f = open(path, "r")
try:
cnt = f.read()
finally:
f.close()
if path.endswith('.py') :
if Options.options.strip_comments:
if sys.version_info[0] >= 3:
f = open(path, "rb")
try:
tk = tokenize.tokenize(f.readline)
next(tk) # the first one is always tokenize.ENCODING for Python 3, ignore it
cnt = process_tokens(tk)
finally:
f.close()
else:
f = open(path, "r")
try:
cnt = process_tokens(tokenize.generate_tokens(f.readline))
finally:
f.close()
else:
f = open(path, "r")
try:
cnt = f.read()
finally:
f.close()
# WARNING: since we now require python 2.4, we do not process the decorators anymore
# if you need such a thing, uncomment the code below:
#cnt = process_decorators(cnt)
@ -233,6 +234,13 @@ def sfilter(path):
# cnt = 'import sys\nif sys.hexversion < 0x020400f0: from sets import Set as set\n' + cnt
cnt = '#! /usr/bin/env python\n# encoding: utf-8\n# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file\n\n' + cnt
else:
f = open(path, "r")
try:
cnt = f.read()
finally:
f.close()
if sys.hexversion > 0x030000f0:
return (io.BytesIO(cnt.encode('utf-8')), len(cnt), cnt)
return (io.BytesIO(cnt), len(cnt), cnt)