1 module buffer.compiler;
2 
3 import std.string;
4 import std.conv;
5 import std.array;
6 import std.typecons;
7 import std.algorithm.searching;
8 import std.uni;
9 
10 template LoadBufferFile(string fileName)
11 {
12     pragma(msg, "Compiling file: ", fileName, "...");
13     const char[] LoadBufferFile = compiler!(import(fileName));
14 }
15 
16 template LoadBufferScript(string src)
17 {
18     pragma(msg, "Compiling script: ", extractScriptfragment(src), "...");
19     const char[] LoadBufferScript = compiler!src;
20 }
21 
22 private string compiler(string source)()
23 {
24     Token[] tokens = lexer(source);
25     Sentence[] sentences = parser(tokens);
26 
27     Appender!string code;
28     code.put("import buffer.message;\r\n\r\n");
29 
30     foreach (sentence; sentences)
31     {
32         code.put("final static class " ~ sentence.name ~ " : buffer.message.Message\r\n");
33         code.put("{\r\n");
34 
35         foreach (field; sentence.fields)
36         {
37             code.put("\t" ~ field.type ~ " " ~ field.name ~ ";\r\n");
38         }
39 
40         code.put("\r\n");
41         code.put("\tthis()\r\n");
42         code.put("\t{\r\n");
43         code.put("\t\t_messageId = " ~ sentence.id ~ ";\r\n");
44         code.put("\t}\r\n\r\n");
45 
46         code.put("\tstatic this()\r\n");
47         code.put("\t{\r\n");
48         code.put("\t\tif (" ~ sentence.id ~ " in _messages)\r\n");
49         code.put("\t\t{\r\n");
50         code.put("\t\t\tassert(0, \"message id conflict: " ~ sentence.id ~ "\");\r\n");
51         code.put("\t\t}\r\n");
52         code.put("\t\t_messages[" ~ sentence.id ~ "] = " ~ sentence.name ~ ".classinfo;\r\n");
53         code.put("\t}\r\n\r\n");
54 
55         code.put("\tubyte[] serialize(string method = string.init)\r\n");
56         code.put("\t{\r\n");
57         code.put("\t\treturn super.serialize!(typeof(this))(this, method);\r\n");
58         code.put("\t}\r\n");
59         code.put("}\r\n\r\n");
60     }
61 
62     return code.data;
63 }
64 
65 /// lexer
66 
67 private enum TokenType
68 {
69     Define            = 1,           // message
70     Keyword           = 2,           // type: int8...
71     Identifier        = 3,
72     Id                = 4,           // digit,positive integer
73     IdOpen            = 100,         // (
74     IdClose           = 101,         // )
75     SentenceEnd       = 110,         // ;
76     DelimiterOpen     = 120,         // {
77     DelimiterClose    = 121          // }
78 }
79 
80 private const string[] keywords = [
81     "int8", "uint8", "int16", "uint16", "int32", "uint32", "int64", "uint64",
82     "float32", "float64", "bool,", "char", "string"
83 ];
84 
85 private struct Token
86 {
87     TokenType type;
88     string name;
89 
90     this(string name)
91     {
92         if (name == "message")
93         {
94             this.type = TokenType.Define;
95         }
96         else if (keywords.any!(x => x == name))
97         {
98             this.type = TokenType.Keyword;
99         }
100         else
101         {
102             this.type = TokenType.Identifier;
103         }
104 
105         this.name = name;
106     }
107 
108     this(TokenType type, string name)
109     {
110         this.type = type;
111         this.name = name;
112     }
113 }
114 
115 private Token[] lexer(string source)
116 {
117     /* State transition diagram:
118 	0:	none		1: word			2: {		3: ;		4: }		5: (		6: id		7: )
119 		-1: /		-2: //			-3: /*
120 
121 	0	-> \s[ \f\n\r\t\v]      0
122 		-> A..Za..z_            1
123 		-> {                    2 -> add token -> 0
124 		-> ;                    3 -> add token -> 0
125 		-> }                    4 -> add token -> 0
126 		-> (                    5
127 		-> /                    hang state, -1
128 		-> other                Exception
129 	1	-> \s[ \f\n\r\t\v]      1 -> add token -> 0
130 		-> A..Za..z0..9_        1
131 		-> {                    1 -> add token -> 2 -> add token -> 0
132 		-> ;                    1 -> add token -> 3 -> add token -> 0
133 		-> }                    1 -> add token -> 4 -> add token -> 0
134 		-> (                    1 -> add token -> 5 -> add token -> 5
135 		-> /                    hang state, -1
136 		-> other                Exception
137 	2	->                      0
138 	3	->                      0
139 	4	->                      0
140 	5	-> 0..9                 6
141 		-> other                Exception
142 	6	-> 0..9                 6
143 		-> )                    7 -> add token -> 0
144 		-> other                Exception
145 	7	->                      0
146 	-1	-> /                    -2
147 		-> *                    -3
148 		-> other                Exception
149 	-2	-> \n                   restore state, hang = 0
150 		-> other                skip
151 	-3	-> /                    if last is * then restore state & hang = 0, else skip
152 		-> other                skip
153 	*/
154 
155     Token[] tokens;
156     int state = 0;
157     int stateHang;
158     char last;
159     string token = string.init;
160 
161     source ~= "\r\n";
162     foreach (i, ch; source)
163     {
164         switch (state)
165         {
166         case 0:
167             if (isWhite(ch))
168                 continue;
169             else if (isIdentifierFirstChar(ch))
170             {
171                 token = ch.to!string;
172                 state = 1;
173             }
174             else if (ch == '{')
175             {
176                 tokens ~= Token(TokenType.DelimiterOpen, "{");
177                 state = 0;
178             }
179             else if (ch == ';')
180             {
181                 tokens ~= Token(TokenType.SentenceEnd, ";");
182                 state = 0;
183             }
184             else if (ch == '}')
185             {
186                 tokens ~= Token(TokenType.DelimiterClose, "}");
187                 state = 0;
188             }
189             else if (ch == '(')
190             {
191                 state = 5;
192             }
193             else if (ch == '/')
194             {
195                 stateHang = state;
196                 state = -1;
197             }
198             else
199             {
200                 assert(0, "Invalid character: " ~ ch.to!string);
201             }
202             break;
203         case 1:
204             if (isWhite(ch))
205             {
206                 tokens ~= Token(token);
207                 token = string.init;
208                 state = 0;
209             }
210             else if (isIdentifierChar(ch))
211             {
212                 token ~= ch.to!string;
213             }
214             else if (ch == '{')
215             {
216                 tokens ~= Token(token);
217                 tokens ~= Token(TokenType.DelimiterOpen, "{");
218                 token = string.init;
219                 state = 0;
220             }
221             else if (ch == ';')
222             {
223                 tokens ~= Token(token);
224                 tokens ~= Token(TokenType.SentenceEnd, ";");
225                 token = string.init;
226                 state = 0;
227             }
228             else if (ch == '}')
229             {
230                 tokens ~= Token(token);
231                 tokens ~= Token(TokenType.DelimiterClose, "}");
232                 token = string.init;
233                 state = 0;
234             }
235             else if (ch == '(')
236             {
237                 tokens ~= Token(token);
238                 tokens ~= Token(TokenType.IdOpen, "(");
239                 token = string.init;
240                 state = 5;
241             }
242             else if (ch == '/')
243             {
244                 stateHang = state;
245                 state = -1;
246             }
247             else
248             {
249                 assert(0, "Invalid character: " ~ ch.to!string);
250             }
251             break;
252         case 5:
253             if (isNumber(ch))
254             {
255                 token ~= ch.to!string;
256                 state = 6;
257             }
258             else
259             {
260                 assert(0, "Invalid character: " ~ ch.to!string);
261             }
262             break;
263         case 6:
264             if (isNumber(ch))
265             {
266                 token ~= ch.to!string;
267                 continue;
268             }
269             else if (ch == ')')
270             {
271                 tokens ~= Token(TokenType.Id, token);
272                 tokens ~= Token(TokenType.IdClose, ")");
273                 token = string.init;
274                 state = 0;
275             }
276             else
277             {
278                 assert(0, "Invalid character: " ~ ch.to!string);
279             }
280             break;
281         case -1:
282             if (ch == '/')
283             {
284                 state = -2;
285             }
286             else if (ch == '*')
287             {
288                 state = -3;
289             }
290             else
291             {
292                 assert(0, "Invalid character: " ~ ch.to!string);
293             }
294             break;
295         case -2:
296             if (ch == '\n')
297             {
298                 state = stateHang;
299                 stateHang = 0;
300             }
301             else
302             {
303                 continue;
304             }
305             break;
306         case -3:
307             if ((ch == '/') && (last == '*'))
308             {
309                 state = stateHang;
310                 stateHang = 0;
311             }
312             else
313             {
314                 continue;
315             }
316             break;
317         default:
318             break;
319         }
320 
321         last = ch;
322     }
323 
324     return tokens;
325 }
326 
327 private bool isIdentifierFirstChar(char ch)
328 {
329     return isAlpha(ch) || ch == '_';
330 }
331 
332 private bool isIdentifierChar(char ch)
333 {
334     return isAlphaNum(ch) || ch == '_';
335 }
336 
337 private string extractScriptfragment(string source)
338 {
339     string ret = string.init;
340 
341     foreach (ch; source)
342     {
343         if (ret.length >= 50)
344             break;
345         if (!isWhite(ch))
346             ret ~= ch.to!string;
347         else if ((ret.length > 0) && (ret[$ - 1] != ' '))
348             ret ~= " ";
349     }
350 
351     return ret;
352 }
353 
354 /// parser
355 
356 private struct Field
357 {
358     string type;
359     string name;
360 }
361 
362 private struct Sentence
363 {
364     string id;
365     string name;
366     Field[] fields;
367 }
368 
369 private Sentence[] parser(Token[] tokens)
370 {
371     Sentence[] sentences;
372     int pos;
373     while (pos < tokens.length - 1)
374     {
375         if (tokens[pos].type != TokenType.Define)
376         {
377             assert(0, "Syntax error at " ~ tokens[pos].name);
378         }
379 
380         sentences ~= parser_define(tokens, pos);
381     }
382 
383     return sentences;
384 }
385 
386 private Sentence parser_define(Token[] tokens, ref int pos)
387 {
388     if ((tokens.length - pos < 7) || (tokens[pos].type != TokenType.Define)
389             || (tokens[pos + 1].type != TokenType.IdOpen) || (tokens[pos + 2].type != TokenType.Id)
390             || (tokens[pos + 3].type != TokenType.IdClose) || (tokens[pos + 4].type != TokenType.Identifier)
391             || (tokens[pos + 5].type != TokenType.DelimiterOpen))
392     {
393         assert(0, "Syntax error at " ~ tokens[pos].name);
394     }
395 
396     Sentence sentence;
397     sentence.id = tokens[pos + 2].name;
398     sentence.name = tokens[pos + 4].name;
399     pos += 6;
400 
401     while (pos < tokens.length)
402     {
403         Nullable!Field field = parser_field(tokens, pos);
404 
405         if (field.isNull)
406             return sentence;
407 
408         sentence.fields ~= field;
409     }
410 
411     return sentence;
412 }
413 
414 private Nullable!Field parser_field(Token[] tokens, ref int pos)
415 {
416     if ((tokens.length - pos >= 1) && (tokens[pos].type == TokenType.DelimiterClose))
417     {
418         pos++;
419         return Nullable!Field();
420     }
421 
422     if ((tokens.length - pos < 3) || (tokens[pos].type != TokenType.Keyword)
423             || (tokens[pos + 1].type != TokenType.Identifier)
424             || (tokens[pos + 2].type != TokenType.SentenceEnd))
425     {
426         assert(0, "Syntax error at " ~ tokens[pos].name);
427     }
428 
429     Field field;
430     field.type = tokens[pos].name;
431     field.name = tokens[pos + 1].name;
432     pos += 3;
433 
434     return Nullable!Field(field);
435 }
436 
437 /*
438 import buffer.message;
439 
440 final static class Sample : buffer.message.Message
441 {
442     string name;
443     int32 age;
444     int16 sex;
445 
446     this()
447     {
448         _messageId = 3;
449     }
450 
451     static this()
452     {
453         if (3 in _messages)
454         {
455             assert(0, "message id conflict: " ~ "3");
456         }
457         _messages[3] = Sample.classinfo;
458     }
459 
460     ubyte[] serialize(string method = string.init)
461     {
462         return super.serialize!(typeof(this))(this, method);
463     }
464 }
465 */