42sh/src/lexer/lexer.h
2026-01-14 19:58:59 +01:00

74 lines
1.5 KiB
C

#ifndef LEXER_H
#define LEXER_H
#include <sys/types.h>
enum token_type
{
TOKEN_NULL = 0,
TOKEN_EOF,
TOKEN_WORD,
TOKEN_NEWLINE,
TOKEN_QUOTE,
TOKEN_SEMICOLON,
TOKEN_IF,
TOKEN_THEN,
TOKEN_ELSE,
TOKEN_FI,
TOKEN_ELIF
};
struct token
{
enum token_type type;
char *data;
};
/*
* @brief: returns the next (newly allocated) token without consuming it.
* if end of input is reached, returns a token of type TOKEN_EOF.
*
*/
struct token *peek_token(void);
/*
* @brief: returns the next (newly allocated) token and consumes it.
* if end of input is reached, returns a token of type TOKEN_EOF.
*
*/
struct token *pop_token(void);
/*
* @warning: NOT IMPLEMENTED.
*
* @note: maybe usefull for subshells.
*/
struct token *get_token_str(void);
/*
* @brief: return a newly allocated token, with the corresponding type.
* The data contains [size] char, starting from [begin].
*
* @return: NULL on error, a token otherwise.
*
*/
struct token *new_token(char *begin, ssize_t size);
/* @brief: frees the token given in argument
*
*/
void free_token(struct token *tok);
/*
* @brief: checks if the stream used for the last token creation is empty.
* If it is, it calls stream_read() from IO_backend,
* and sets [remaing_chars].
* If not, it starts from the end of the last token.
* Also trims left blanks before returning.
*
* @return: char* stream from which we tokenise.
*/
char *stream_init(void);
#endif /* ! LEXER_H */