Do not try to tokenize non-python files when using Python 3: Issue 1265.

This commit is contained in:
Harald Klimach 2013-03-11 09:35:54 +01:00
parent 0f64aea102
commit 7bfe947b31
1 changed files with 28 additions and 20 deletions

16
wscript
View File

@ -204,7 +204,10 @@ def process_decorators(body):
return "\n".join(accu+all_deco)
def sfilter(path):
if sys.version_info[0] >= 3 and Options.options.strip_comments:
if path.endswith('.py') :
if Options.options.strip_comments:
if sys.version_info[0] >= 3:
f = open(path, "rb")
try:
tk = tokenize.tokenize(f.readline)
@ -212,7 +215,7 @@ def sfilter(path):
cnt = process_tokens(tk)
finally:
f.close()
elif Options.options.strip_comments and path.endswith('.py'):
else:
f = open(path, "r")
try:
cnt = process_tokens(tokenize.generate_tokens(f.readline))
@ -224,8 +227,6 @@ def sfilter(path):
cnt = f.read()
finally:
f.close()
if path.endswith('.py') :
# WARNING: since we now require python 2.4, we do not process the decorators anymore
# if you need such a thing, uncomment the code below:
#cnt = process_decorators(cnt)
@ -233,6 +234,13 @@ def sfilter(path):
# cnt = 'import sys\nif sys.hexversion < 0x020400f0: from sets import Set as set\n' + cnt
cnt = '#! /usr/bin/env python\n# encoding: utf-8\n# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file\n\n' + cnt
else:
f = open(path, "r")
try:
cnt = f.read()
finally:
f.close()
if sys.hexversion > 0x030000f0:
return (io.BytesIO(cnt.encode('utf-8')), len(cnt), cnt)
return (io.BytesIO(cnt), len(cnt), cnt)