1 module buffer.compiler;
2 
3 import std.string;
4 import std.conv;
5 import std.array;
6 import std.typecons;
7 import std.algorithm.searching;
8 import std.uni;
9 import std.exception;
10 
11 template LoadBufferFile(string fileName)
12 {
13     pragma(msg, "Compiling file: ", fileName, "...");
14     const char[] LoadBufferFile = compiler!(import(fileName));
15 }
16 
17 template LoadBufferScript(string src)
18 {
19     pragma(msg, "Compiling script: ", extractScriptfragment(src), "...");
20     const char[] LoadBufferScript = compiler!src;
21 }
22 
23 private string compiler(string source)()
24 {
25     Token[] tokens = lexer(source);
26     Sentence[] sentences = parser(tokens);
27 
28     Appender!string code;
29     code.put("import buffer;\r\n\r\n");
30 
31     foreach (sentence; sentences)
32     {
33         code.put("final static class " ~ sentence.name ~ " : Message\r\n");
34         code.put("{\r\n");
35 
36         foreach (field; sentence.fields)
37         {
38             code.put("\t" ~ field.type ~ " " ~ field.name ~ ";\r\n");
39         }
40 
41         code.put("\r\n");
42         code.put("\tubyte[] serialize(string method = string.init)\r\n");
43         code.put("\t{\r\n");
44         code.put("\t\treturn super.serialize!(typeof(this))(this, method);\r\n");
45         code.put("\t}\r\n");
46         code.put("}\r\n");
47         code.put("\r\n");
48     }
49 
50     return code.data;
51 }
52 
53 /// lexer
54 
55 enum TokenType
56 {
57     Define            = 1,           // message
58     Keyword           = 2,           // type: int8...
59     Identifier        = 3,
60     SentenceEnd       = 110,         // ;
61     DelimiterOpen     = 120,         // {
62     DelimiterClose    = 121          // }
63 }
64 
65 private const string[] keywords = [
66     "int8", "uint8", "int16", "uint16", "int32", "uint32", "int64", "uint64",
67     "float32", "float64", "float128", "bool", "char", "string"
68 ];
69 
70 struct Token
71 {
72     TokenType type;
73     string name;
74 
75     this(string name)
76     {
77         if (name == "message")
78         {
79             this.type = TokenType.Define;
80         }
81         else if (keywords.any!(x => x == name))
82         {
83             this.type = TokenType.Keyword;
84         }
85         else
86         {
87             this.type = TokenType.Identifier;
88         }
89 
90         this.name = name;
91     }
92 
93     this(TokenType type, string name)
94     {
95         this.type = type;
96         this.name = name;
97     }
98 }
99 
100 Token[] lexer(string source)
101 {
102     /* State transition diagram:
103     0:	none      1: word      2: {      3: ;      4: }
104     -1: /        -2: //       -3: /*
105 
106     0   -> \s[ \f\n\r\t\v]      0
107         -> A..Za..z_            1
108         -> {                    2 -> add token -> 0
109         -> ;                    3 -> add token -> 0
110         -> }                    4 -> add token -> 0
111         -> /                    hang state, -1
112         -> other                Exception
113     1   -> \s[ \f\n\r\t\v]      1 -> add token -> 0
114         -> A..Za..z0..9_        1
115         -> {                    1 -> add token -> 2 -> add token -> 0
116         -> ;                    1 -> add token -> 3 -> add token -> 0
117         -> }                    1 -> add token -> 4 -> add token -> 0
118         -> /                    hang state, -1
119         -> other                Exception
120     2   ->                      0
121     3   ->                      0
122     4   ->                      0
123    -1   -> /                    -2
124         -> *                    -3
125         -> other                Exception
126    -2   -> \n                   restore state, hang = 0
127         -> other                skip
128    -3   -> /                    if last is * then restore state & hang = 0, else skip
129         -> other                skip
130     */
131 
132     Token[] tokens;
133     int state = 0;
134     int stateHang;
135     char last;
136     string token = string.init;
137 
138     source ~= "\r\n";
139     foreach (i, ch; source)
140     {
141         switch (state)
142         {
143         case 0:
144             if (isWhite(ch))
145                 continue;
146             else if (isIdentifierFirstChar(ch))
147             {
148                 token = ch.to!string;
149                 state = 1;
150             }
151             else if (ch == '{')
152             {
153                 tokens ~= Token(TokenType.DelimiterOpen, "{");
154                 state = 0;
155             }
156             else if (ch == ';')
157             {
158                 tokens ~= Token(TokenType.SentenceEnd, ";");
159                 state = 0;
160             }
161             else if (ch == '}')
162             {
163                 tokens ~= Token(TokenType.DelimiterClose, "}");
164                 state = 0;
165             }
166             else if (ch == '/')
167             {
168                 stateHang = state;
169                 state = -1;
170             }
171             else
172             {
173                 enforce(0, "Invalid character: " ~ ch.to!string);
174             }
175             break;
176         case 1:
177             if (isWhite(ch))
178             {
179                 tokens ~= Token(token);
180                 token = string.init;
181                 state = 0;
182             }
183             else if (isIdentifierChar(ch))
184             {
185                 token ~= ch.to!string;
186             }
187             else if (ch == '{')
188             {
189                 tokens ~= Token(token);
190                 tokens ~= Token(TokenType.DelimiterOpen, "{");
191                 token = string.init;
192                 state = 0;
193             }
194             else if (ch == ';')
195             {
196                 tokens ~= Token(token);
197                 tokens ~= Token(TokenType.SentenceEnd, ";");
198                 token = string.init;
199                 state = 0;
200             }
201             else if (ch == '}')
202             {
203                 tokens ~= Token(token);
204                 tokens ~= Token(TokenType.DelimiterClose, "}");
205                 token = string.init;
206                 state = 0;
207             }
208             else if (ch == '/')
209             {
210                 stateHang = state;
211                 state = -1;
212             }
213             else
214             {
215                 enforce(0, "Invalid character: " ~ ch.to!string);
216             }
217             break;
218         case -1:
219             if (ch == '/')
220             {
221                 state = -2;
222             }
223             else if (ch == '*')
224             {
225                 state = -3;
226             }
227             else
228             {
229                 enforce(0, "Invalid character: " ~ ch.to!string);
230             }
231             break;
232         case -2:
233             if (ch == '\n')
234             {
235                 state = stateHang;
236                 stateHang = 0;
237             }
238             else
239             {
240                 continue;
241             }
242             break;
243         case -3:
244             if ((ch == '/') && (last == '*'))
245             {
246                 state = stateHang;
247                 stateHang = 0;
248             }
249             else
250             {
251                 continue;
252             }
253             break;
254         default:
255             break;
256         }
257 
258         last = ch;
259     }
260 
261     return tokens;
262 }
263 
264 private bool isIdentifierFirstChar(char ch)
265 {
266     return isAlpha(ch) || ch == '_';
267 }
268 
269 private bool isIdentifierChar(char ch)
270 {
271     return isAlphaNum(ch) || ch == '_';
272 }
273 
274 private string extractScriptfragment(string source)
275 {
276     string ret = string.init;
277 
278     foreach (ch; source)
279     {
280         if (ret.length >= 50)
281             break;
282         if (!isWhite(ch))
283             ret ~= ch.to!string;
284         else if ((ret.length > 0) && (ret[$ - 1] != ' '))
285             ret ~= " ";
286     }
287 
288     return ret;
289 }
290 
291 /// parser
292 
293 struct Field
294 {
295     string type;
296     string name;
297 }
298 
299 struct Sentence
300 {
301     string name;
302     Field[] fields;
303 }
304 
305 Sentence[] parser(Token[] tokens)
306 {
307     Sentence[] sentences;
308     int pos;
309     while (pos < cast(int)tokens.length - 1)
310     {
311         if (tokens[pos].type != TokenType.Define)
312         {
313             enforce(0, "Syntax error at " ~ tokens[pos].name);
314         }
315 
316         sentences ~= parser_define(tokens, pos);
317     }
318 
319     return sentences;
320 }
321 
322 private Sentence parser_define(Token[] tokens, ref int pos)
323 {
324     if ((cast(int)tokens.length - pos < 4) || (tokens[pos].type != TokenType.Define) || (tokens[pos + 1].type != TokenType.Identifier) || (tokens[pos + 2].type != TokenType.DelimiterOpen))
325     {
326         enforce(0, "Syntax error at " ~ tokens[pos].name);
327     }
328 
329     Sentence sentence;
330     sentence.name = tokens[pos + 1].name;
331     pos += 3;
332 
333     while (pos < tokens.length)
334     {
335         Nullable!Field field = parser_field(tokens, pos);
336 
337         if (field.isNull)
338             return sentence;
339 
340         sentence.fields ~= field;
341     }
342 
343     return sentence;
344 }
345 
346 private Nullable!Field parser_field(Token[] tokens, ref int pos)
347 {
348     if ((cast(int)tokens.length - pos >= 1) && (tokens[pos].type == TokenType.DelimiterClose))
349     {
350         pos++;
351         return Nullable!Field();
352     }
353 
354     if ((cast(int)tokens.length - pos < 3) || (tokens[pos].type != TokenType.Keyword)
355             || (tokens[pos + 1].type != TokenType.Identifier)
356             || (tokens[pos + 2].type != TokenType.SentenceEnd))
357     {
358         enforce(0, "Syntax error at " ~ tokens[pos].name);
359     }
360 
361     Field field;
362     field.type = tokens[pos].name;
363     field.name = tokens[pos + 1].name;
364     pos += 3;
365 
366     return Nullable!Field(field);
367 }
368 
369 /*
370 import buffer;
371 
372 final static class Sample : Message
373 {
374     string name;
375     int32 age;
376     int16 sex;
377 
378     ubyte[] serialize(string method = string.init)
379     {
380         return super.serialize!(typeof(this))(this, method);
381     }
382 }
383 */