orga-comp/lexer.cpp

117 lines
2.4 KiB
C++

#include "orga-comp.h"
std::vector<struct token*>
lexer::lex_file(std::string filename) {
//leer un char -> ver que es -> pasar a un estado
// [a-z] -> id
// [0-9] -> num
// +- -> op
// = -> assign
// ; -> semi
std::ifstream file("tst.cfran", std::ifstream::in);
std::string *value = new std::string();
char c;
while(file.get(c)) {
switch (c) {
case 'a' ... 'z':
case 'A' ... 'Z':
if(!num) {
id = true;
*value += c;
} else {
print_tokens(_tokens);
std::cout << "Invalid identifier " << value << c << '\n';
exit(-1);
}
//ID o condicional
break;
case '0' ... '9':
if(!id) num = true;
*value += c;
//NUM o ID
break;
case '+':
case '-':
if(id) add_token(token::TOK_ID, *value);
if(num) add_token(token::TOK_NUM, *value);
delete value;
value = new std::string();
*value += c;
add_token(token::TOK_OP, *value);
id = false;
num = false;
delete value;
value = new std::string();
//OP
break;
case '=':
if(id) add_token(token::TOK_ID, *value);
if(num) add_token(token::TOK_NUM, *value);
if(cond) {
add_token(token::TOK_EQ, "=");
} else {
add_token(token::TOK_ASSIGN, "");
}
id = false;
num = false;
delete value;
value = new std::string();
break;
case ';':
if(id) add_token(token::TOK_ID, *value);
if(num) add_token(token::TOK_NUM, *value);
add_token(token::TOK_SEMI, "");
id = false;
num = false;
delete value;
value = new std::string();
//STM END
break;
case '\n':
case ' ':
if(id) add_token(token::TOK_ID, *value);
if(num) add_token(token::TOK_NUM, *value);
id = false;
num = false;
delete value;
value = new std::string();
default:
break;
}
}
print_tokens(_tokens);
return _tokens;
}
void
lexer::add_token(token::type type, std::string value) {
struct token *token = new struct token;
if(type == token::TOK_ID) {
token->tok_type = type;
token->value = value;
if(value == "if") {
token->tok_type = token::TOK_IF;
cond = true;
}
if(value == "do") {
token->tok_type = token::TOK_DO;
cond = false;
}
if(value == "end") token->tok_type = token::TOK_END;
_tokens.push_back(token);
return;
}
token->tok_type = type;
token->value = value;
_tokens.push_back(token);
}
lexer::~lexer(){
for(int i = 0; i < _tokens.size(); i++){
delete _tokens[i];
}
}