diff --git a/src/tokens.c b/src/tokens.c index 3fd6a949..6c7ae94b 100644 --- a/src/tokens.c +++ b/src/tokens.c @@ -6,7 +6,6 @@ tokenized_string_t *tokenized_string_new(void) { self->str = NULL; self->strings = cstring_array_new(); self->tokens = token_array_new(); - return self; } @@ -21,7 +20,7 @@ tokenized_string_t *tokenized_string_new_size(size_t len, size_t num_tokens) { inline tokenized_string_t *tokenized_string_new_from_str_size(char *src, size_t len, size_t num_tokens) { tokenized_string_t *self = tokenized_string_new_size(len, num_tokens); - self->str = src; + self->str = strndup(src); return self; } @@ -38,7 +37,7 @@ void tokenized_string_add_token(tokenized_string_t *self, const char *src, size_ tokenized_string_t *tokenized_string_from_tokens(char *src, token_array *tokens, bool copy_tokens) { tokenized_string_t *self = malloc(sizeof(tokenized_string_t)); - self->str = src; + self->str = stdup(src); self->strings = cstring_array_new_size(strlen(src) + tokens->n); if (copy_tokens) { self->tokens = token_array_new_copy(tokens, tokens->n); @@ -48,7 +47,7 @@ tokenized_string_t *tokenized_string_from_tokens(char *src, token_array *tokens, token_t token; - for (int i = 0; i < tokens->n; i++) { + for (size_t i = 0; i < tokens->n; i++) { token = tokens->a[i]; cstring_array_add_string_len(self->strings, src + token.offset, token.len); }