1namespace libfcf;
2
3public static class Tokenizer {
4 public static Token[] TokenizeFromFile(string path) {
5 return TokenizeFromMemory(File.ReadAllLines(path));
6 }
7
8 public static Token[] TokenizeFromMemory(string[] fileLines) {
9 if(fileLines[0][0] != '[') {
10 fileLines[0] = fileLines[0].Insert(0, "{");
11 fileLines[fileLines.Length - 1] = fileLines[fileLines.Length - 1].Insert(fileLines[fileLines.Length - 1].Length, "}");
12 }
13
14 for(var i = 0; i < fileLines.Length; i++) {
15 fileLines[i] = fileLines[i].Split("//")[0];
16 }
17
18 List<Token> tokens = new List<Token>();
19
20 string currentWord = "";
21
22 int lineIdx;
23 int charIdx;
24
25 bool isString = false;
26 string[] boolValues = new string[] {"false", "true"};
27
28 void HandleCurrentWord() {
29 if(currentWord != "") {
30 if(isString) {
31 tokens.Add(new TokenString(currentWord, lineIdx, lineIdx, charIdx - currentWord.Length, charIdx));
32 } else {
33 float value;
34 if(float.TryParse(currentWord, out value)) {
35 tokens.Add(new TokenNumber(value, lineIdx, lineIdx, charIdx - value.ToString().Length, charIdx));
36 } else {
37 if(boolValues.Contains(currentWord.ToLower())) {
38 tokens.Add(new TokenBoolean(currentWord.ToLower() == "true", lineIdx, lineIdx, charIdx - currentWord.Length, charIdx));
39 } else {
40 tokens.Add(new TokenIdentifier(currentWord, lineIdx, lineIdx, charIdx - currentWord.Length, charIdx));
41 }
42 }
43 }
44 }
45 currentWord = "";
46 }
47
48 lineIdx = -1;
49 foreach(string line in fileLines) {
50 lineIdx++;
51 charIdx = -1;
52 foreach(char c in line) {
53 charIdx++;
54
55 if(isString) {
56 if(c == '"') {
57 HandleCurrentWord();
58 isString = false;
59 continue;
60 }
61 currentWord += c;
62 continue;
63 }
64
65 switch(c) {
66 case '=': {
67 HandleCurrentWord();
68
69 tokens.Add(new TokenAssign(lineIdx, lineIdx, charIdx - 1, charIdx));
70 } break;
71 case '"': {
72 isString = true;
73 } break;
74 case ',': {
75 HandleCurrentWord();
76
77 tokens.Add(new TokenComma(lineIdx, lineIdx, charIdx - 1, charIdx));
78 } break;
79
80 case '[': {
81 HandleCurrentWord();
82
83 tokens.Add(new TokenArrayStart(lineIdx, lineIdx, charIdx - 1, charIdx));
84 } break;
85 case ']': {
86 HandleCurrentWord();
87
88 tokens.Add(new TokenArrayEnd(lineIdx, lineIdx, charIdx - 1, charIdx));
89 } break;
90
91 case '{': {
92 HandleCurrentWord();
93
94 tokens.Add(new TokenDictStart(lineIdx, lineIdx, charIdx - 1, charIdx));
95 } break;
96 case '}': {
97 HandleCurrentWord();
98
99 tokens.Add(new TokenDictEnd(lineIdx, lineIdx, charIdx - 1, charIdx));
100 } break;
101 default: {
102 if(c != ' ') {
103 currentWord += c;
104 } else {
105 HandleCurrentWord();
106 }
107 } break;
108 }
109 }
110 }
111
112 return tokens.ToArray();
113 }
114
115 internal static string GetTokenAsHuman(Token t)
116 {
117 if (t.GetType() == typeof(TokenString))
118 {
119 return $"[{t}: \"{((dynamic)t).value}\"]";
120 }
121 else
122 {
123 return $"[{t}: '{((dynamic)t).value}']";
124 }
125 }
126}