Merge branch 'lexer' into dev
This commit is contained in:
commit
180dbdaf3a
2 changed files with 57 additions and 19 deletions
|
|
@ -1,15 +1,53 @@
|
||||||
#include "lexer.h"
|
#include "lexer.h"
|
||||||
|
|
||||||
|
#include <ctype.h>
|
||||||
#include <stdbool.h>
|
#include <stdbool.h>
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
#include <stdlib.h>
|
#include <stdlib.h>
|
||||||
#include <string.h>
|
#include <string.h>
|
||||||
|
|
||||||
#include "io_backend/io_backend.h"
|
#include "io_backend/io_backend.h"
|
||||||
|
#include "utils/string_utils/string_utils.h"
|
||||||
|
|
||||||
static char *end_last_token;
|
static char *end_last_token;
|
||||||
static ssize_t remaining_chars;
|
static ssize_t remaining_chars;
|
||||||
|
|
||||||
|
/* @brief: saves state for the next call the the lexer.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
static void save_state(char *stream, ssize_t i)
|
||||||
|
{
|
||||||
|
remaining_chars -= i;
|
||||||
|
end_last_token = stream + i;
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* @return: true if a special character from the grammar was found,
|
||||||
|
* false otherwise.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
static bool is_special_char(char c)
|
||||||
|
{
|
||||||
|
return c == '\'' || c == '\n' || c == ';';
|
||||||
|
}
|
||||||
|
|
||||||
|
/* @return: true if a keyword from the grammar was found, false otherwise.
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
static bool is_keyword(char *stream, ssize_t i)
|
||||||
|
{
|
||||||
|
if (i == 2)
|
||||||
|
{
|
||||||
|
return strcmp(stream, "if") == 0 || strcmp(stream, "fi") == 0;
|
||||||
|
}
|
||||||
|
if (i == 4)
|
||||||
|
{
|
||||||
|
return strcmp(stream, "then") || strcmp(stream, "else")
|
||||||
|
|| strcmp(stream, "elif");
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
char *new_token(char *begin, ssize_t size)
|
char *new_token(char *begin, ssize_t size)
|
||||||
{
|
{
|
||||||
char *res = calloc(size + 1, sizeof(char));
|
char *res = calloc(size + 1, sizeof(char));
|
||||||
|
|
@ -32,6 +70,10 @@ char *stream_init(void)
|
||||||
stream = end_last_token;
|
stream = end_last_token;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
char *trimed_stream = trim_blank_left(stream);
|
||||||
|
remaining_chars -= trimed_stream - stream;
|
||||||
|
stream = trimed_stream;
|
||||||
|
|
||||||
return stream;
|
return stream;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -39,34 +81,29 @@ char *get_token(void)
|
||||||
{
|
{
|
||||||
char *stream = stream_init();
|
char *stream = stream_init();
|
||||||
|
|
||||||
bool inquotes = false;
|
|
||||||
ssize_t i = 0;
|
ssize_t i = 0;
|
||||||
|
|
||||||
while (i < remaining_chars)
|
while (i < remaining_chars)
|
||||||
{
|
{
|
||||||
switch (stream[i])
|
if (is_special_char(stream[i]))
|
||||||
{
|
{
|
||||||
case '\'':
|
if (i == 0) // where we create spe_char token
|
||||||
inquotes = !inquotes;
|
i++;
|
||||||
break;
|
break;
|
||||||
|
}
|
||||||
case ' ' | '\n' | '\t':
|
if (isblank(stream[i]))
|
||||||
if (inquotes)
|
{
|
||||||
break;
|
break;
|
||||||
else
|
}
|
||||||
{
|
else if (is_keyword(stream, i))
|
||||||
// token creation
|
{
|
||||||
// skip blank char
|
i++;
|
||||||
// exit from loop
|
|
||||||
char *token = new_token(stream, i);
|
|
||||||
return token;
|
|
||||||
}
|
|
||||||
default:
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
|
|
||||||
remaining_chars -= i;
|
save_state(stream, i);
|
||||||
return NULL;
|
|
||||||
|
return new_token(stream, i);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -30,6 +30,7 @@ char *new_token(char *begin, ssize_t size);
|
||||||
* If it is, it calls stream_read() from IO_backend,
|
* If it is, it calls stream_read() from IO_backend,
|
||||||
* and sets [remaing_chars].
|
* and sets [remaing_chars].
|
||||||
* If not, it starts from the end of the last token.
|
* If not, it starts from the end of the last token.
|
||||||
|
* Also trims left blanks before returning.
|
||||||
*
|
*
|
||||||
* @return: char* stream from which we tokenise.
|
* @return: char* stream from which we tokenise.
|
||||||
*/
|
*/
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue