Issue 1115

This commit is contained in:
Thomas Nagy 2012-02-18 00:53:18 +01:00
parent 652d42f0ff
commit 35e7a05ffa
1 changed files with 4 additions and 2 deletions

View File

@ -712,7 +712,6 @@ def parse_char(txt):
try: return chr_esc[c] try: return chr_esc[c]
except KeyError: raise PreprocError("could not parse char literal '%s'" % txt) except KeyError: raise PreprocError("could not parse char literal '%s'" % txt)
@Utils.run_once
def tokenize(s): def tokenize(s):
""" """
Convert a string into a list of tokens (shlex.split does not apply to c/c++/d) Convert a string into a list of tokens (shlex.split does not apply to c/c++/d)
@ -722,7 +721,10 @@ def tokenize(s):
:return: a list of tokens :return: a list of tokens
:rtype: list of tuple(token, value) :rtype: list of tuple(token, value)
""" """
# the same headers are read again and again - 10% improvement on preprocessing the samba headers return tokenize_private(s)[:] # force a copy of the results
@Utils.run_once
def tokenize_private(s):
ret = [] ret = []
for match in re_clexer.finditer(s): for match in re_clexer.finditer(s):
m = match.group m = match.group