[fix] better allocation sizes for tokenized strings

This commit is contained in:
Al
2015-04-05 22:02:31 -04:00
parent 198e51b8a3
commit 53844067b1

View File

@@ -22,7 +22,7 @@ void tokenized_string_add_token(tokenized_string_t *self, const char *src, size_
tokenized_string_t *tokenized_string_from_tokens(char *src, token_array *tokens) {
tokenized_string_t *self = malloc(sizeof(tokenized_string_t));
self->str = cstring_array_new_size(strlen(src));
self->str = cstring_array_new_size(strlen(src) + tokens->n);
self->tokens = tokens;
token_t token;