feat(lexer): destroy_lexer_context
This commit is contained in:
parent
927e4ea25a
commit
d59b72dc45
2 changed files with 27 additions and 7 deletions
|
|
@ -10,6 +10,8 @@
|
||||||
#include "io_backend/io_backend.h"
|
#include "io_backend/io_backend.h"
|
||||||
#include "utils/string_utils/string_utils.h"
|
#include "utils/string_utils/string_utils.h"
|
||||||
|
|
||||||
|
// ######## STATIC FUNCTIONS ##############
|
||||||
|
|
||||||
/* @brief: sets the ctx->current_token to [tok].
|
/* @brief: sets the ctx->current_token to [tok].
|
||||||
* this function is called by token_peek().
|
* this function is called by token_peek().
|
||||||
*/
|
*/
|
||||||
|
|
@ -37,10 +39,10 @@ static void update_previous_token(struct token *tok, struct lexer_context *ctx)
|
||||||
static void save_state(char *stream, ssize_t i, struct lexer_context *ctx)
|
static void save_state(char *stream, ssize_t i, struct lexer_context *ctx)
|
||||||
{
|
{
|
||||||
ctx->remaining_chars -= i;
|
ctx->remaining_chars -= i;
|
||||||
ctx->end_ctx->previous_token = stream + i;
|
ctx->end_previous_token = stream + i;
|
||||||
|
|
||||||
update_ctx->previous_token(ctx->current_token);
|
update_previous_token(ctx->current_token);
|
||||||
update_ctx->current_token(NULL);
|
update_current_token(NULL);
|
||||||
}
|
}
|
||||||
|
|
||||||
/* @return: true if a special character from the grammar was found,
|
/* @return: true if a special character from the grammar was found,
|
||||||
|
|
@ -176,6 +178,20 @@ static void set_token_word(struct token *tok, char *begin, ssize_t size)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void destroy_lexer_context(struct lexer_context **ctx)
|
||||||
|
{
|
||||||
|
if (ctx == NULL || *ctx == NULL)
|
||||||
|
return;
|
||||||
|
if (ctx->end_previous_token != NULL)
|
||||||
|
free(ctx->end_previous_token);
|
||||||
|
if (ctx->previous_token != NULL)
|
||||||
|
free(ctx->previous_token);
|
||||||
|
if (ctx->current_token != NULL)
|
||||||
|
free(ctx->current_token);
|
||||||
|
free(*ctx);
|
||||||
|
*ctx = NULL;
|
||||||
|
}
|
||||||
|
|
||||||
struct token *new_token(char *begin, ssize_t size)
|
struct token *new_token(char *begin, ssize_t size)
|
||||||
{
|
{
|
||||||
struct token *tok = calloc(1, sizeof(struct token));
|
struct token *tok = calloc(1, sizeof(struct token));
|
||||||
|
|
@ -209,7 +225,7 @@ char *stream_init(struct lexer_context *ctx)
|
||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
stream = ctx->end_ctx->previous_token;
|
stream = ctx->end_previous_token;
|
||||||
}
|
}
|
||||||
|
|
||||||
char *trimed_stream = trim_blank_left(stream);
|
char *trimed_stream = trim_blank_left(stream);
|
||||||
|
|
@ -297,7 +313,7 @@ struct token *peek_token(struct lexer_context *ctx)
|
||||||
}
|
}
|
||||||
|
|
||||||
struct token *tok = new_token(stream, i);
|
struct token *tok = new_token(stream, i);
|
||||||
update_ctx->current_token(tok);
|
update_current_token(tok);
|
||||||
return tok;
|
return tok;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -5,13 +5,17 @@
|
||||||
|
|
||||||
struct lexer_context
|
struct lexer_context
|
||||||
{
|
{
|
||||||
char *end_last_token;
|
char *end_previous_token;
|
||||||
ssize_t remaining_chars;
|
ssize_t remaining_chars;
|
||||||
|
|
||||||
struct token *last_token;
|
struct token *previous_token;
|
||||||
struct token *current_token;
|
struct token *current_token;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/* @brief: frees all fields of ctx and sets ctx to NULL.
|
||||||
|
*/
|
||||||
|
void destroy_lexer_context(struct lexer_context **ctx);
|
||||||
|
|
||||||
enum lexing_mode
|
enum lexing_mode
|
||||||
{
|
{
|
||||||
LEXER_NORMAL,
|
LEXER_NORMAL,
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue