-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmain.cpp
executable file
·54 lines (43 loc) · 1.23 KB
/
main.cpp
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
/*
* main.cpp
* Benjamin Ferid Issa
* February 8th 2017
*
* Based on files provided by Dr. Frank Jones, Computer Science Department, Brigham Young University
*/
#include <iostream>
#include <vector>
#include <string>
#include "lexer.h"
//g++ -std=c++14 main.cpp facts.cpp lexer.cpp singles.cpp state.cpp strings.cpp token.cpp rules.cpp schemes.cpp queries.cpp id.cpp comment.cpp -o main
std::vector<token*>* lex(std::string fileName);
int main(int argC, char* argV[])
{
//process input arguments
if(argC != 2)
{
std::cout << "USAGE: " << argV[0] << "<inputFileName>" << std::endl;
return 0;
}
//otherwise let's procede with the lexical analysis of the file...
std::vector<token*>* tokens = lex(std::string(argV[1]));
if(tokens == nullptr)
{
//Maybe the file was empty?
//std::cout << "There was an error. Was the file empty?";
return 0;
}
lexer::printAnalysis(tokens);
return 0;
}
std::vector<token*>* lex(std::string fileName)
{
std::vector<char>* characters = lexer::fileToVectorOfChars(fileName);
if(characters == nullptr)//check for failure
{
return nullptr;
}
lexer ALexer;
std::vector<token*>* tokens = ALexer.analyze(characters);
return tokens;
}