Skip to content

Commit

Permalink
fix: Fix lexer bug on file ending with a directive
Browse files Browse the repository at this point in the history
  • Loading branch information
keyvank committed Nov 30, 2024
1 parent acba071 commit d98714f
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 7 deletions.
19 changes: 12 additions & 7 deletions lexer.c
Original file line number Diff line number Diff line change
Expand Up @@ -183,17 +183,18 @@ typed_token *next_op(char **inp_ptr, int is_newline)
}
continue;
}
if (*inp == '\n' || *inp == '\0')
{
line[sz] = '\0';
*inp_ptr = inp;
typed_token *dir_tkns = tokenize(line);
return new_tkn(TKN_DIRECTIVE, dir_tkns, directive_tkn_debug);
}

if (*inp == '\n')
break;

line[sz] = *inp;
sz++;
inp++;
}
line[sz] = '\0';
*inp_ptr = inp;
typed_token *dir_tkns = tokenize(line);
return new_tkn(TKN_DIRECTIVE, dir_tkns, directive_tkn_debug);
}
if (*inp == '?')
{
Expand Down Expand Up @@ -625,4 +626,8 @@ typed_token *tokenize_file(char *path)
return tokenize(data);
ret:
return NULL;
}

typed_token *eof_token() {
return new_simp_tkn(TKN_EOF);
}
1 change: 1 addition & 0 deletions lexer.h
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ typed_token *new_tkn(int tkn_id, void *data, void (*debug)(typed_token *));
void str_tkn_debug(typed_token *tkn);
typed_token *tokenize_file(char *path);
typed_token *tokenize(char *inp);
typed_token *eof_token();

#define TKN_EOF 0
#define TKN_VOID 1
Expand Down
4 changes: 4 additions & 0 deletions preprocess/preprocess.c
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,10 @@ typed_token *chain_tokens(linked_list *tkns)
curr_tkn->next = (typed_token *)curr->value;
curr_tkn = curr_tkn->next;
}
else
{
curr_tkn->next = eof_token();
}
}
return res_first;
}
Expand Down

0 comments on commit d98714f

Please sign in to comment.