Merge branch 'lexer' into dev

This commit is contained in:
Matteo Flebus 2026-01-13 17:22:07 +01:00
commit 180dbdaf3a
2 changed files with 57 additions and 19 deletions

View file

@ -1,15 +1,53 @@
#include "lexer.h"
#include <ctype.h>
#include <stdbool.h>
#include <stddef.h>
#include <stdlib.h>
#include <string.h>
#include "io_backend/io_backend.h"
#include "utils/string_utils/string_utils.h"
static char *end_last_token;
static ssize_t remaining_chars;
/* @brief: saves state for the next call the the lexer.
*
*/
static void save_state(char *stream, ssize_t i)
{
remaining_chars -= i;
end_last_token = stream + i;
return;
}
/* @return: true if a special character from the grammar was found,
* false otherwise.
*
*/
static bool is_special_char(char c)
{
return c == '\'' || c == '\n' || c == ';';
}
/* @return: true if a keyword from the grammar was found, false otherwise.
*
*/
static bool is_keyword(char *stream, ssize_t i)
{
if (i == 2)
{
return strcmp(stream, "if") == 0 || strcmp(stream, "fi") == 0;
}
if (i == 4)
{
return strcmp(stream, "then") || strcmp(stream, "else")
|| strcmp(stream, "elif");
}
return false;
}
char *new_token(char *begin, ssize_t size)
{
char *res = calloc(size + 1, sizeof(char));
@ -32,6 +70,10 @@ char *stream_init(void)
stream = end_last_token;
}
char *trimed_stream = trim_blank_left(stream);
remaining_chars -= trimed_stream - stream;
stream = trimed_stream;
return stream;
}
@ -39,34 +81,29 @@ char *get_token(void)
{
char *stream = stream_init();
bool inquotes = false;
ssize_t i = 0;
while (i < remaining_chars)
{
switch (stream[i])
if (is_special_char(stream[i]))
{
case '\'':
inquotes = !inquotes;
if (i == 0) // where we create spe_char token
i++;
break;
case ' ' | '\n' | '\t':
if (inquotes)
break;
else
{
// token creation
// skip blank char
// exit from loop
char *token = new_token(stream, i);
return token;
}
default:
}
if (isblank(stream[i]))
{
break;
}
else if (is_keyword(stream, i))
{
i++;
break;
}
i++;
}
remaining_chars -= i;
return NULL;
save_state(stream, i);
return new_token(stream, i);
}

View file

@ -30,6 +30,7 @@ char *new_token(char *begin, ssize_t size);
* If it is, it calls stream_read() from IO_backend,
* and sets [remaing_chars].
* If not, it starts from the end of the last token.
* Also trims left blanks before returning.
*
* @return: char* stream from which we tokenise.
*/