#include "lexer.h" #include #include #include #include #include #include "io_backend/io_backend.h" #include "utils/string_utils/string_utils.h" static char *end_last_token; static ssize_t remaining_chars; /* @brief: saves state for the next call the the lexer. * */ static void save_state(char *stream, ssize_t i) { remaining_chars -= i; end_last_token = stream + i; return; } /* @return: true if a special character from the grammar was found, * false otherwise. * */ static bool is_special_char(char c) { return c == '\'' || c == '\n' || c == ';'; } /* @return: true if a keyword from the grammar was found, false otherwise. * */ static bool is_keyword(char *stream, ssize_t i) { if (i == 2) { return strcmp(stream, "if") == 0 || strcmp(stream, "fi") == 0; } if (i == 4) { return strcmp(stream, "then") || strcmp(stream, "else") || strcmp(stream, "elif"); } return false; } char *new_token(char *begin, ssize_t size) { char *res = calloc(size + 1, sizeof(char)); if (res == NULL) return NULL; strncpy(res, begin, size); return res; } char *stream_init(void) { char *stream; if (remaining_chars == 0) { remaining_chars = stream_read(&stream); } else { stream = end_last_token; } char *trimed_stream = trim_blank_left(stream); remaining_chars -= trimed_stream - stream; stream = trimed_stream; return stream; } char *peek_token(void) { char *stream = stream_init(); ssize_t i = 0; while (i < remaining_chars) { if (is_special_char(stream[i])) { if (i == 0) // where we create spe_char token i++; break; } if (isblank(stream[i])) { break; } else if (is_keyword(stream, i)) { i++; break; } i++; } return new_token(stream, i); } char *pop_token(void) { char *stream = stream_init(); ssize_t i = 0; while (i < remaining_chars) { if (is_special_char(stream[i])) { if (i == 0) // where we create spe_char token i++; break; } if (isblank(stream[i])) { break; } else if (is_keyword(stream, i)) { i++; break; } i++; } save_state(stream, i); return new_token(stream, i); }