[fix] tokenize_add_tokens respects specified length

This commit is contained in:
Al
2016-01-17 20:51:43 -05:00
parent 10cadc67d7
commit de240d2b94
2 changed files with 10 additions and 2 deletions

View File

@@ -440360,7 +440360,9 @@ void tokenize_add_tokens(token_array *tokens, const char *input, size_t len, boo
size_t token_start, token_length; size_t token_start, token_length;
uint16_t token_type; uint16_t token_type;
while ( ( token_type = scan_token(&scanner)) != END ) { size_t consumed = 0;
while (consumed < len && (token_type = scan_token(&scanner)) != END) {
token_start = scanner.start - scanner.src; token_start = scanner.start - scanner.src;
token_length = scanner.cursor - scanner.start; token_length = scanner.cursor - scanner.start;
@@ -440374,6 +440376,8 @@ void tokenize_add_tokens(token_array *tokens, const char *input, size_t len, boo
token.type = token_type; token.type = token_type;
token_array_push(tokens, token); token_array_push(tokens, token);
consumed += token_length;
} }
} }

View File

@@ -232,7 +232,9 @@ void tokenize_add_tokens(token_array *tokens, const char *input, size_t len, boo
size_t token_start, token_length; size_t token_start, token_length;
uint16_t token_type; uint16_t token_type;
while ( ( token_type = scan_token(&scanner)) != END ) { size_t consumed = 0;
while (consumed < len && (token_type = scan_token(&scanner)) != END) {
token_start = scanner.start - scanner.src; token_start = scanner.start - scanner.src;
token_length = scanner.cursor - scanner.start; token_length = scanner.cursor - scanner.start;
@@ -246,6 +248,8 @@ void tokenize_add_tokens(token_array *tokens, const char *input, size_t len, boo
token.type = token_type; token.type = token_type;
token_array_push(tokens, token); token_array_push(tokens, token);
consumed += token_length;
} }
} }