HyperDbg Debugger
Loading...
Searching...
No Matches
ll1_parser.LL1Parser Class Reference

Public Member Functions

 __init__ (self, SourceFile, HeaderFile, CommonHeaderFile, CommonHeaderFileScala)
 
 Run (self)
 
 SetLalr (self, Lalr, LalrParseTable)
 
 Parse (self, Tokens)
 
 ReadGrammar (self)
 
 WriteSemanticMaps (self)
 
 WriteRegisterMaps (self)
 
 WritePseudoRegMaps (self)
 
 WriteKeywordList (self)
 
 WriteOperatorsList (self)
 
 WriteMaps (self)
 
 WriteLhsList (self)
 
 GetType (self, Var)
 
 WriteRhsList (self)
 
 WriteRhsSize (self)
 
 WriteTerminalList (self)
 
 WriteNoneTermianlList (self)
 
 WriteParseTable (self)
 
 FindAllFirsts (self)
 
 PrintFirsts (self)
 
 GetNoneTerminalId (self, nonterminal)
 
 GetTerminalId (self, Terminal)
 
 FillParseTable (self)
 
 PrintParseTable (self)
 
 FindAllPredicts (self)
 
 print_predicts (self)
 
 FindAllFollows (self)
 
 GetNextVar (self, Rhs, p)
 
 PrintFollows (self)
 
 IsNoneTerminal (self, X)
 
 IsSemanticRule (self, X)
 
 IsNullable (self, s)
 

Public Attributes

 GrammarFile
 
 SourceFile
 
 HeaderFile
 
 CommonHeaderFile
 
 CommonHeaderFileScala
 
 RhsList
 
 LhsList
 
 TerminalSet
 
 NonTerminalSet
 
 Start
 
 MAXIMUM_RHS_LEN
 
 SPECIAL_TOKENS
 
 INVALID
 
 FunctionsDict
 
 OperatorsTwoOperand
 
 OperatorsOneOperand
 
 RegistersList
 
 PseudoRegistersList
 
 keywordList
 
 SemantiRulesList
 
 FirstDict
 
 FollowDict
 
 NonTerminalList
 
 Lalr
 
 LalrParseTable
 
 TerminalList
 
 ParseTable
 
 PredictDict
 

Constructor & Destructor Documentation

◆ __init__()

ll1_parser.LL1Parser.__init__ ( self,
SourceFile,
HeaderFile,
CommonHeaderFile,
CommonHeaderFileScala )
21 def __init__(self, SourceFile, HeaderFile, CommonHeaderFile, CommonHeaderFileScala):
22 # The file which contains the grammar of the language
23 self.GrammarFile = open("Grammar.txt", "r")
24
25 # The file which is used by parser for parsing the input
26 self.SourceFile = SourceFile
27 self.HeaderFile = HeaderFile
28 self.CommonHeaderFile = CommonHeaderFile
29 self.CommonHeaderFileScala = CommonHeaderFileScala
30
31
32 # Lists which used for storing the rules:
33 # Right Hand Side(Rhs)
34 self.RhsList = []
35
36 # Left Hand Side(Lhs)
37 self.LhsList = []
38
39 # Set of all terminals and noneterminals
40 self.TerminalSet = set()
41 self.NonTerminalSet = set()
42
43 # Start variable
44 self.Start = ""
45
46
47 # maximum of "Right Hand Side(Rhs)" length
48 self.MAXIMUM_RHS_LEN = 0
49
50
51 self.SPECIAL_TOKENS = ['%', '+', '~', '++', '-', '--', "*", "/", "=", "==", "!=", ",", ";", "(", ")", "{", "}", "|", "||", ">>", ">=", "<<", "<=", "&", "&&", "^"]
52
53 # INVALID rule indicator
54 self.INVALID = 0x80000000
55
56 self.FunctionsDict = dict()
57 self.OperatorsTwoOperand = []
58 self.OperatorsOneOperand = []
59 self.RegistersList = []
60 self.PseudoRegistersList = []
61 self.keywordList = []
62 self.SemantiRulesList = []
63
64
65 # Dictionaries used for storing first and follow sets
66 self.FirstDict = dict()
67 self.FollowDict = dict()
68
69
70
set(SourceFiles "hyperdbg-cli.cpp" "../include/platform/user/header/Environment.h") include_directories("../include" "../dependencies") add_executable(hyperdbg-cli $
Definition CMakeLists.txt:1

Member Function Documentation

◆ FillParseTable()

ll1_parser.LL1Parser.FillParseTable ( self)
814 def FillParseTable(self):
815 self.ParseTable = [[self.INVALID for y in range(len(self.TerminalList))] for X in range(len(self.NonTerminalList))]
816
817 RuleId = 0
818 for Lhs in self.LhsList:
819 i = self.GetNoneTerminalId(Lhs)
820
821 j = 0
822 for Terminal in self.TerminalList:
823 if Terminal in self.PredictDict[RuleId]:
824 if i ==34 and j ==3:
825 dccc=3
826
827 if self.ParseTable[i][j] == self.INVALID:
828 self.ParseTable[i][j] = RuleId
829
830 else:
831
832 print("Error! Input grammar is not LL1.")
833 exit()
834
835 j += 1
836
837 RuleId += 1
838

◆ FindAllFirsts()

ll1_parser.LL1Parser.FindAllFirsts ( self)
754 def FindAllFirsts(self):
755
756 self.FirstDict = {}
757 for Symbol in self.NonTerminalList:
758 self.FirstDict[Symbol] = set()
759
760 t = 0
761 while True:
762 Updated = False
763 i = 0
764 for Lhs in self.LhsList:
765 Rhs = self.RhsList[i]
766 Temp = set(self.FirstDict[Lhs])
767
768
769 if Rhs[0] == "eps":
770 pass
771 elif self.IsNoneTerminal(Rhs[0]):
772 self.FirstDict[Lhs] = self.FirstDict[Lhs].union(self.FirstDict[Rhs[0]])
773 p = 0
774 while self.IsNullable(Rhs[p]):
775 self.FirstDict[Lhs] = self.FirstDict[Lhs].union(self.FirstDict[Rhs[p+1]])
776 p += 1
777 elif self.IsSemanticRule(Rhs[0]):
778 if self.IsNoneTerminal(Rhs[0]):
779 self.FirstDict[Lhs] = self.FirstDict[Lhs].union(self.FirstDict[Rhs[1]])
780 else:
781 self.FirstDict[Lhs].add(Rhs[1])
782 else:
783 self.FirstDict[Lhs].add(Rhs[0])
784 i += 1
785
786 if Temp != self.FirstDict[Lhs]:
787 Updated = True
788 p += 1
789 if not Updated:
790 break;
791
792
793

◆ FindAllFollows()

ll1_parser.LL1Parser.FindAllFollows ( self)
894 def FindAllFollows(self):
895 self.FollowDict = {}
896 for Symbol in self.NonTerminalList:
897 self.FollowDict[Symbol] = set()
898 if Symbol == self.Start:
899 self.FollowDict[Symbol].add('$')
900
901 t = 0
902 while True:
903 Updated = False
904
905 i = 0
906 for Lhs in self.LhsList:
907 Rhs = self.RhsList[i]
908
909 p = 0
910 for Symbol in Rhs:
911
912 if self.IsNoneTerminal(Symbol):
913
914 Temp = set(self.FollowDict[Symbol])
915 if p == len(Rhs)-1:
916 self.FollowDict[Symbol] = self.FollowDict[Symbol].union(self.FollowDict[Lhs])
917
918
919
920 else:
921
922 NextVar = self.GetNextVar(Rhs, p)
923 if self.IsNoneTerminal(NextVar):
924 self.FollowDict[Symbol] = self.FollowDict[Symbol].union(self.FirstDict[NextVar])
925 if self.IsNullable(NextVar):
926 self.FollowDict[Symbol] = self.FollowDict[Symbol].union(self.FollowDict[NextVar])
927 elif self.IsSemanticRule(NextVar):
928 pass
929 else:
930 self.FollowDict[Symbol].add(NextVar)
931
932 if p == len(Rhs)-2:
933 NextVar = self.GetNextVar(Rhs, p)
934
935 if self.IsNullable(NextVar):
936 self.FollowDict[Symbol] = self.FollowDict[Symbol].union(self.FollowDict[Lhs])
937 self.FollowDict[Lhs] = self.FollowDict[Lhs].union(self.FollowDict[NextVar])
938
939
940
941 if Temp != self.FollowDict[Symbol]:
942 Updated = True
943 p += 1
944 i += 1
945 t += 1
946
947 if not Updated:
948 break

◆ FindAllPredicts()

ll1_parser.LL1Parser.FindAllPredicts ( self)
854 def FindAllPredicts(self):
855 self.PredictDict = {}
856 for i in range(len(self.LhsList)):
857 self.PredictDict[i] = set()
858
859 i = 0
860 for Lhs in self.LhsList:
861 Rhs = self.RhsList[i]
862 IsRightNullable = True
863 for Symbol in Rhs:
864 if self.IsSemanticRule(Symbol):
865 pass
866
867 elif Symbol == "eps":
868 IsRightNullable = True
869 break
870 elif self.IsNoneTerminal(Symbol):
871 self.PredictDict[i] |= self.FirstDict[Symbol]
872 if not self.IsNullable(Symbol):
873 IsRightNullable = False
874 break
875 else:
876 self.PredictDict[i].add(Symbol)
877 IsRightNullable = False
878 break
879
880 if IsRightNullable:
881 self.PredictDict[i] |= self.FollowDict[Lhs]
882 i += 1
883
884

◆ GetNextVar()

ll1_parser.LL1Parser.GetNextVar ( self,
Rhs,
p )
949 def GetNextVar(self, Rhs, p):
950 if p == len(Rhs)-1:
951 return None
952
953 X = p +1
954 while self.IsSemanticRule(Rhs[X]) and X + 1 < len(Rhs):
955 X+=1
956 return Rhs[X]
957

◆ GetNoneTerminalId()

ll1_parser.LL1Parser.GetNoneTerminalId ( self,
nonterminal )
801 def GetNoneTerminalId(self, nonterminal):
802 for i in range(len(self.NonTerminalList)):
803 if nonterminal == self.NonTerminalList[i]:
804 return i
805 return -1
806

◆ GetTerminalId()

ll1_parser.LL1Parser.GetTerminalId ( self,
Terminal )
807 def GetTerminalId(self, Terminal):
808 for i in range(len(self.TerminalList)):
809 if Terminal == self.TerminalList[i]:
810 return i
811 return -1
812
813
int GetTerminalId(PTOKEN Token)
Gets the Terminal Id object.
Definition common.c:1129

◆ GetType()

ll1_parser.LL1Parser.GetType ( self,
Var )
654 def GetType(self,Var):
655 if self.IsNoneTerminal(Var):
656 return "NON_TERMINAL"
657 elif self.IsSemanticRule(Var):
658 return "SEMANTIC_RULE"
659
660 elif Var == "eps":
661 return "EPSILON"
662
663 elif Var in self.SPECIAL_TOKENS:
664 return "SPECIAL_TOKEN"
665 elif Var[0] == "_":
666 return Var[1:].upper()
667 else:
668 return "KEYWORD"
669
670

◆ IsNoneTerminal()

ll1_parser.LL1Parser.IsNoneTerminal ( self,
X )
965 def IsNoneTerminal(self,X):
966 if X[0].isupper():
967 return True
968 else:
969 return False
970
char IsNoneTerminal(PTOKEN Token)
Checks whether this Token is noneterminal NoneTerminal token starts with capital letter.
Definition common.c:1081

◆ IsNullable()

ll1_parser.LL1Parser.IsNullable ( self,
s )
977 def IsNullable(self, s):
978
979 if not s[0].isupper():
980 return False
981 i = 0
982 for Lhs in self.LhsList:
983 Rhs = self.RhsList[i]
984
985 if Lhs == s:
986
987 if Rhs[0] == "eps":
988 return True
989
990 p = 0
991 while True:
992 if self.IsNullable(Rhs[p]):
993 return True
994 else:
995 break
996 p += 1
997 i +=1
998
999 return False

◆ IsSemanticRule()

ll1_parser.LL1Parser.IsSemanticRule ( self,
X )
971 def IsSemanticRule(self, X):
972 if X[0] == '@':
973 return True
974 else:
975 return False
976
char IsSemanticRule(PTOKEN Token)
Checks whether this Token is semantic rule SemanticRule token starts with '@'.
Definition common.c:1097

◆ Parse()

ll1_parser.LL1Parser.Parse ( self,
Tokens )
167 def Parse(self, Tokens):
168 # Initialize Parse Stack
169 Stack = []
170
171 # Initialize Match Stack
172 MatchedStack = []
173
174 # Push the end of stack indicator into stack
175 Stack.append("$")
176
177 # Push start variable into stack
178 Stack.append(self.Start)
179
180 # Assign top variale an invalid value
181 Top = ""
182
183 # Read input
184 Tokens, CurrentIn = Read(Tokens)
185
186 # Temporary values counter initialized with 0 value
187 TempCounter = 0
188
189 # While Stack is not empty repeat
190 while Top != "$":
191
192 # Read top of stack
193 Top = GetTop(Stack)
194 # print(Stack)
195 # print("Top:", Top)
196 # print("CurrentIn:", CurrentIn, "\n\n")
197 # x = input()
198
199
200
201 if self.IsNoneTerminal(Top):
202 if Top == "BOOLEAN_EXPRESSION":
203 print("Top == BOOLEAN_EXPRESSION:")
204 print(Stack)
205 print(Tokens)
206 print(CurrentIn)
207 print("=====================================\n\n")
208
209 Stack.pop()
210
211 #------------------------------------------------------
212 # Get BE Tokens
213 BETokensSize = 0
214 OpenParanthesesCount = 1
215
216 i = 0
217 while True:
218 TempToken = Tokens[i]
219
220 i +=1
221 if TempToken == '(' :
222 OpenParanthesesCount += 1
223 BETokensSize += 1
224 elif TempToken == ')':
225 OpenParanthesesCount -= 1
226 if OpenParanthesesCount == 0:
227 break
228 else:
229 BETokensSize +=1
230 else:
231 BETokensSize +=1
232 print("TempToken: ", end = "")
233 print(TempToken)
234 print("BETokensSize: ", end = "")
235 print(BETokensSize)
236
237
238 # BETokens.append("$end")
239 print("BETokensSize: ", end = "")
240 print(BETokensSize)
241 y = input()
242 NewTokens = [CurrentIn]
243 for x in Tokens:
244 NewTokens.append(x)
245 #------------------------------------------------------
246 Tokens = self.Lalr.Parse(NewTokens, BETokensSize + 1)
247 # x = input()
248 print("after lalr parsing:")
249 print(Stack)
250 print(Tokens)
251 print("=====================================\n\n")
252
253
254 CurrentIn = Tokens[0]
255 if (len(Tokens) > 1):
256 Tokens = Tokens[1:]
257 # x = input()
258 else:
259 Id = self.ParseTable[self.GetNoneTerminalId(Top)][self.GetTerminalId(CurrentIn)]
260
261 # Error Handling
262 if Id == -1:
263
264 print("1)Error in input!")
265 print(Tokens)
266 print(Stack)
267 print("Top: ", Top)
268 print("CurrentIn: ", CurrentIn)
269 print("\n\n")
270 exit()
271
272 Stack.pop()
273
274
275 Rhs = self.RhsList[Id]
276 if Rhs != ["eps"]:
277 for Symbol in reversed(Rhs):
278 Stack.append(Symbol)
279
280
281 elif self.IsSemanticRule(Top):
282 if Top == "@PUSH":
283 Stack.pop()
284 Top = GetTop(Stack)
285 if Top == CurrentIn:
286 MatchedStack.append(Top)
287 Tokens, CurrentIn = Read(Tokens)
288 else:
289 print("2)Error in input!")
290 exit()
291 elif Top == "@JZ":
292 print("JZ:")
293 print(Stack)
294 print(Tokens)
295 print("\n\n")
296 y = input()
297
298 elif Top == "@JZCMPL":
299 print("JZCOMPELETED:")
300 print(Stack)
301 print(Tokens)
302 print("\n\n")
303 y = input()
304
305
306
307
308 else:
309 Op0 = MatchedStack.pop()
310 if Top == "@PRINT":
311 print(Top,"\t", Op0 )
312 elif Top == "@MOV":
313 Op1 = MatchedStack.pop()
314 print(Top,"\t", Op1, ", ", Op0 )
315 else:
316 pass
317 Op1 = MatchedStack.pop()
318 MatchedStack.append("t" + str(TempCounter))
319 print(Top, "\t", "t"+ str(TempCounter), ", ", Op1, ",", Op0 )
320 TempCounter += 1
321
322 Stack.pop()
323
324 else: # Terminal
325
326 CurrentIn = Tokens[0]
327 if (len(Tokens) > 1):
328 Tokens = Tokens[1:]
329 Stack.pop()
330
331 return MatchedStack
332
333
334
335
336

◆ print_predicts()

ll1_parser.LL1Parser.print_predicts ( self)
885 def print_predicts(self):
886 for Key in self.PredictDict:
887 print(Key, end=": ")
888 for X in self.PredictDict[Key]:
889 print(X,end = " ")
890 print()
891
892
893

◆ PrintFirsts()

ll1_parser.LL1Parser.PrintFirsts ( self)
794 def PrintFirsts(self):
795 for Id in self.FirstDict:
796 print(Id, end=": ")
797 for X in self.FirstDict[Id]:
798 print(X,end = " ")
799 print()
800

◆ PrintFollows()

ll1_parser.LL1Parser.PrintFollows ( self)
958 def PrintFollows(self):
959 for Key in self.FollowDict:
960 print(Key, end=": ")
961 for X in self.FollowDict[Key]:
962 print(X,end = " ")
963 print()
964

◆ PrintParseTable()

ll1_parser.LL1Parser.PrintParseTable ( self)
839 def PrintParseTable(self):
840 print("\t", end = "")
841 for j in range(len(self.TerminalList)):
842 print(self.TerminalList[j], end= "\t")
843 print()
844 for i in range(len(self.NonTerminalList)):
845 print(self.NonTerminalList[i], end= "\t")
846 for j in range(len(self.TerminalList)):
847 if self.ParseTable[i][j] == self.INVALID:
848 print(".", end= "\t")
849 else:
850 print(self.ParseTable[i][j], end= "\t")
851 print()
852
853

◆ ReadGrammar()

ll1_parser.LL1Parser.ReadGrammar ( self)
337 def ReadGrammar(self):
338 Flag = 1
339 Counter = -1
340 for Line in self.GrammarFile:
341 Counter += 1
342 Line = Line.strip()
343 if Line == "" or Line[0] == "#":
344 continue
345 elif Line[0] == ".":
346 L = Line.split("->")
347 Elements = L[1].split(" ")
348 if L[0][1:] == "OperatorsTwoOperand":
349 self.OperatorsTwoOperand += Elements
350 continue
351 if L[0][1:] == "OperatorsOneOperand":
352 self.OperatorsOneOperand += Elements
353 continue
354 elif L[0][1:] == "SemantiRules":
355 self.SemantiRulesList += Elements
356 continue
357 elif L[0][1:] == "Registers":
358 self.RegistersList += Elements
359 continue
360 elif L[0][1:] == "PseudoRegisters":
361 self.PseudoRegistersList += Elements
362 continue
363
364 self.FunctionsDict[L[0]] = Elements
365 continue
366
367 L = Line.split("->")
368 Lhs = L[0]
369 Rhs = L[1].split(" ")
370
371 HasMapKeyword = False
372 MapKeywordIdx1 = 0
373 MapKeywordIdx2 = 0
374 Idx = 0
375 for X in Rhs:
376 if X[0] == ".":
377 HasMapKeyword = True
378 MapKeywordIdx1 = Idx
379 elif X[0] == "@":
380 if X[1] == ".":
381 MapKeywordIdx2 = Idx
382
383
384 Idx += 1
385
386 if not HasMapKeyword:
387 self.NonTerminalSet.add(Lhs)
388 self.LhsList.append(Lhs)
389 self.RhsList.append(Rhs)
390 for X in Rhs:
391 if not self.IsNoneTerminal(X) and not self.IsSemanticRule(X) and not X=="eps":
392 self.TerminalSet.add(X)
393 if self.IsSemanticRule(X):
394 pass
395
396 else:
397
398 for value in self.FunctionsDict[Rhs[MapKeywordIdx1]]:
399 RhsTemp =list(Rhs)
400 RhsTemp[MapKeywordIdx1] = value
401 RhsTemp[MapKeywordIdx2] = "@" + value.upper()
402
403 self.keywordList.append(value)
404
405 self.NonTerminalSet.add(Lhs)
406 self.LhsList.append(Lhs)
407 self.RhsList.append(RhsTemp)
408
409 for X in RhsTemp:
410 if not self.IsNoneTerminal(X) and not self.IsSemanticRule(X) and not X=="eps":
411 self.TerminalSet.add(X)
412
413 if Flag:
414 Flag = 0
415 self.Start = Lhs
416 self.MAXIMUM_RHS_LEN = max(self.MAXIMUM_RHS_LEN, len(Rhs))
417
418
419
420 self.TerminalSet.add("$")
421
422 self.NonTerminalList = list(self.NonTerminalSet)
423
424 self.TerminalList = list(self.TerminalSet)
425
426

◆ Run()

ll1_parser.LL1Parser.Run ( self)
71 def Run(self):
72 # Read grammar from input file and initialize grammar related variables
73 self.ReadGrammar()
74
75 # Calculate "First Set" for all nonterminals and print it
76 self.FindAllFirsts()
77 # print("Firsts:")
78 # self.PrintFirsts()
79 # print("________________________________________________________________________________")
80
81 # Calculate "Follow Set" for all nonterminals and print it
82 self.FindAllFollows()
83 # print("Follows:")
84 # self.PrintFollows()
85 # print("________________________________________________________________________________")
86
87 # Calculate "Prdicted Set" for each rule and print it
88 self.FindAllPredicts()
89 # print("Predicts:")
90 # self.print_predicts()
91 # print("________________________________________________________________________________")
92
93 # Fills "Parse Table" according to calculated "Predicted Set" and print "Parse Table"
94 self.FillParseTable()
95 # print("Parse Table:")
96 # self.PrintParseTable()
97 # print()
98
99 # Prints variables that are needed for parser for parsing into the output file
100 self.HeaderFile.write("#pragma once\n")
101
102 self.HeaderFile.write("#ifndef PARSE_TABLE_H\n")
103 self.HeaderFile.write("#define PARSE_TABLE_H\n")
104
105 self.HeaderFile.write("#define RULES_COUNT " + str(len(self.LhsList)) + "\n")
106 self.HeaderFile.write("#define TERMINAL_COUNT " + str(len(list(self.TerminalSet))) + "\n")
107 self.HeaderFile.write("#define NONETERMINAL_COUNT " + str(len(list(self.NonTerminalList))) + "\n")
108 self.HeaderFile.write("#define START_VARIABLE " + "\"" + self.Start +"\"\n")
109 self.HeaderFile.write("#define MAX_RHS_LEN " + str(self.MAXIMUM_RHS_LEN) +"\n")
110 self.HeaderFile.write("#define KEYWORD_LIST_LENGTH " + str(len(self.keywordList)) +"\n")
111 self.HeaderFile.write("#define OPERATORS_ONE_OPERAND_LIST_LENGTH " + str(len(self.OperatorsOneOperand)) + "\n")
112 self.HeaderFile.write("#define OPERATORS_TWO_OPERAND_LIST_LENGTH " + str(len(self.OperatorsTwoOperand)) + "\n")
113 self.HeaderFile.write("#define REGISTER_MAP_LIST_LENGTH " + str(len(self.RegistersList))+ "\n")
114 self.HeaderFile.write("#define PSEUDO_REGISTER_MAP_LIST_LENGTH " + str(len(self.PseudoRegistersList))+ "\n")
115 self.HeaderFile.write("#define SEMANTIC_RULES_MAP_LIST_LENGTH " + str(len(self.keywordList) + len(self.OperatorsOneOperand) + len(self.OperatorsTwoOperand) + len(self.SemantiRulesList))+ "\n")
116 for Key in self.FunctionsDict:
117 self.HeaderFile.write("#define "+ Key[1:].upper() + "_LENGTH "+ str(len(self.FunctionsDict[Key]))+"\n")
118
119
120 self.SourceFile.write("#include \"pch.h\"\n")
121
122
123
124 # Prints Rules into output files
125 self.WriteLhsList()
126 self.WriteRhsList()
127
128 # Prints size of each Rhs into output files
129 self.WriteRhsSize()
130
131 # Prints noneterminals and Terminal into output files
132 self.WriteNoneTermianlList()
133 self.WriteTerminalList()
134
135 # Prints "Parse Table" into output files
136 self.WriteParseTable()
137
138 # Prints Keywords list into output files
139 self.WriteKeywordList()
140
141 # Prints Operators List into output files
142 self.WriteOperatorsList()
143
144 # Prints Maps into output files
145 self.WriteMaps()
146
147
148
149
150 self.WriteSemanticMaps()
151 self.WriteRegisterMaps()
152 self.WritePseudoRegMaps()
153
154
155 # Closes Grammar Input File
156 self.GrammarFile.close()
157
158
159
160

◆ SetLalr()

ll1_parser.LL1Parser.SetLalr ( self,
Lalr,
LalrParseTable )
161 def SetLalr(self, Lalr, LalrParseTable):
162 self.Lalr = Lalr
163 self.LalrParseTable = LalrParseTable
164

◆ WriteKeywordList()

ll1_parser.LL1Parser.WriteKeywordList ( self)
585 def WriteKeywordList(self):
586 self.SourceFile.write("const char* KeywordList[]= {\n")
587 self.HeaderFile.write("extern const char* KeywordList[];\n")
588
589 Counter = 0
590 for X in self.keywordList:
591 if Counter == len(self.keywordList)-1:
592 self.SourceFile.write("\"" + X + "\"" + "\n")
593 else:
594 self.SourceFile.write("\"" + X + "\"" + ",\n")
595 Counter +=1
596 self.SourceFile.write("};\n")
597

◆ WriteLhsList()

ll1_parser.LL1Parser.WriteLhsList ( self)
641 def WriteLhsList(self):
642
643 self.SourceFile.write("const struct _TOKEN Lhs[RULES_COUNT]= \n{\n")
644 self.HeaderFile.write("extern const struct _TOKEN Lhs[RULES_COUNT];\n")
645 Counter = 0
646 for Lhs in self.LhsList:
647 if Counter == len(self.LhsList)-1:
648 self.SourceFile.write("\t{NON_TERMINAL, " + "\"" + Lhs + "\"}" + "\n")
649 else:
650 self.SourceFile.write("\t{NON_TERMINAL, " + "\"" + Lhs + "\"}" + ",\n")
651 Counter +=1
652 self.SourceFile.write("};\n")
653

◆ WriteMaps()

ll1_parser.LL1Parser.WriteMaps ( self)
623 def WriteMaps(self):
624 for Key in self.FunctionsDict:
625 print(Key)
626
627 self.HeaderFile.write("extern const char* "+ Key[1:]+ "[];\n")
628 self.SourceFile.write("const char* "+ Key[1:]+ "[] = {\n")
629
630 Counter = 0
631 for X in self.FunctionsDict[Key]:
632 if Counter == len(self.FunctionsDict[Key])-1:
633 self.SourceFile.write("\"" + "@"+ X.upper() + "\"" + "\n")
634 else:
635 self.SourceFile.write("\"" +"@"+ X.upper() + "\"" + ",\n")
636 Counter +=1
637 self.SourceFile.write("};\n")
638
639
640

◆ WriteNoneTermianlList()

ll1_parser.LL1Parser.WriteNoneTermianlList ( self)
719 def WriteNoneTermianlList(self):
720 self.SourceFile.write("const char* NoneTerminalMap[NONETERMINAL_COUNT]= \n{\n")
721 self.HeaderFile.write("extern const char* NoneTerminalMap[NONETERMINAL_COUNT];\n")
722 Counter = 0
723 for X in self.NonTerminalList:
724 if Counter == len(self.NonTerminalList)-1:
725 self.SourceFile.write("\"" + X + "\"" + "\n")
726 else:
727 self.SourceFile.write("\"" + X + "\"" + ",\n")
728 Counter +=1
729 self.SourceFile.write("};\n")
730

◆ WriteOperatorsList()

ll1_parser.LL1Parser.WriteOperatorsList ( self)
598 def WriteOperatorsList(self):
599 self.SourceFile.write("const char* OperatorsTwoOperandList[]= {\n")
600 self.HeaderFile.write("extern const char* OperatorsTwoOperandList[];\n")
601
602 Counter = 0
603 for X in self.OperatorsTwoOperand:
604 if Counter == len(self.OperatorsTwoOperand)-1:
605 self.SourceFile.write("\"" + "@"+ X.upper() + "\"" + "\n")
606 else:
607 self.SourceFile.write("\"" + "@"+ X.upper() + "\"" + ",\n")
608 Counter +=1
609 self.SourceFile.write("};\n")
610
611 self.SourceFile.write("const char* OperatorsOneOperandList[]= {\n")
612 self.HeaderFile.write("extern const char* OperatorsOneOperandList[];\n")
613
614 Counter = 0
615 for X in self.OperatorsOneOperand:
616 if Counter == len(self.OperatorsOneOperand)-1:
617 self.SourceFile.write("\"" + "@"+ X.upper() + "\"" + "\n")
618 else:
619 self.SourceFile.write("\"" + "@"+ X.upper() + "\"" + ",\n")
620 Counter +=1
621 self.SourceFile.write("};\n")
622

◆ WriteParseTable()

ll1_parser.LL1Parser.WriteParseTable ( self)
731 def WriteParseTable(self):
732 self.SourceFile.write("const int ParseTable[NONETERMINAL_COUNT][TERMINAL_COUNT]= \n{\n")
733 self.HeaderFile.write("extern const int ParseTable[NONETERMINAL_COUNT][TERMINAL_COUNT];\n")
734 i = 0
735 for X in self.NonTerminalList:
736 j = 0
737 self.SourceFile.write("\t{")
738 for y in self.TerminalList:
739 self.SourceFile.write(str(self.ParseTable[i][j]))
740 if j != len(self.TerminalList)-1:
741 self.SourceFile.write("\t\t,")
742 j += 1
743
744 if i == len(self.NonTerminalList)-1:
745 self.SourceFile.write("\t}\n")
746
747 else:
748 self.SourceFile.write("\t},\n")
749 i +=1
750 self.SourceFile.write("};\n")
751
752
753

◆ WritePseudoRegMaps()

ll1_parser.LL1Parser.WritePseudoRegMaps ( self)
566 def WritePseudoRegMaps(self):
567 Counter = 0
568 for X in self.PseudoRegistersList:
569 self.CommonHeaderFile.write("#define " + "PSEUDO_REGISTER_" + X.upper() + " " + str(Counter) + "\n")
570 Counter += 1
571 self.CommonHeaderFile.write("\n")
572
573 self.SourceFile.write("const SYMBOL_MAP PseudoRegisterMapList[]= {\n")
574 self.HeaderFile.write("extern const SYMBOL_MAP PseudoRegisterMapList[];\n")
575
576 Counter = 0
577 for X in self.PseudoRegistersList:
578 if Counter == len(self.PseudoRegistersList)-1:
579 self.SourceFile.write("{\"" + X + "\", "+ "PSEUDO_REGISTER_" + X.upper() + "}\n")
580 else:
581 self.SourceFile.write("{\"" + X + "\", "+ "PSEUDO_REGISTER_" + X.upper() + "},\n")
582 Counter +=1
583 self.SourceFile.write("};\n")
584

◆ WriteRegisterMaps()

ll1_parser.LL1Parser.WriteRegisterMaps ( self)
525 def WriteRegisterMaps(self):
526 self.CommonHeaderFile.write("\ntypedef enum REGS_ENUM {\n")
527 Counter = 0
528 for X in self.RegistersList:
529 if Counter == len(self.RegistersList)-1:
530 self.CommonHeaderFile.write("\t" + "REGISTER_" + X.upper() + " = " + str(Counter) + "\n")
531 else:
532 self.CommonHeaderFile.write("\t" + "REGISTER_" + X.upper() + " = " + str(Counter) + ",\n")
533 Counter += 1
534 self.CommonHeaderFile.write("\n} REGS_ENUM;\n\n")
535
536 self.CommonHeaderFile.write("static const char *const RegistersNames[] = {\n")
537 Counter = 0
538 for X in self.RegistersList:
539 if Counter == len(self.RegistersList)-1:
540 self.CommonHeaderFile.write("\t" + "\"" + X + "\"")
541 else:
542 if (Counter + 1) % 8 == 0:
543 self.CommonHeaderFile.write("" + "\"" + X + "\",\n")
544 else:
545 self.CommonHeaderFile.write("" + "\"" + X + "\", ")
546
547 Counter += 1
548 self.CommonHeaderFile.write("\n};\n\n")
549
550
551
552
553 self.SourceFile.write("const SYMBOL_MAP RegisterMapList[]= {\n")
554 self.HeaderFile.write("extern const SYMBOL_MAP RegisterMapList[];\n")
555
556 Counter = 0
557 for X in self.RegistersList:
558 if Counter == len(self.RegistersList)-1:
559 self.SourceFile.write("{\"" + X + "\", "+ "REGISTER_" + X.upper() + "}\n")
560 else:
561 self.SourceFile.write("{\"" + X + "\", "+ "REGISTER_" + X.upper() + "},\n")
562 Counter +=1
563 self.SourceFile.write("};\n")
564
565

◆ WriteRhsList()

ll1_parser.LL1Parser.WriteRhsList ( self)
671 def WriteRhsList(self):
672 self.SourceFile.write("const struct _TOKEN Rhs[RULES_COUNT][MAX_RHS_LEN]= \n{\n")
673 self.HeaderFile.write("extern const struct _TOKEN Rhs[RULES_COUNT][MAX_RHS_LEN];\n")
674 Counter =0
675 for Rhs in self.RhsList:
676 self.SourceFile.write("\t{")
677
678 C = 0
679 for Var in Rhs:
680 if C == len(Rhs) -1:
681 self.SourceFile.write("{"+self.GetType(Var) +", "+"\"" + Var + "\"}" )
682 else:
683 self.SourceFile.write("{"+self.GetType(Var) +", "+"\"" + Var + "\"}," )
684 C += 1
685
686 if Counter == len(self.RhsList)-1:
687 self.SourceFile.write("}\n")
688 else:
689 self.SourceFile.write("},\n")
690 Counter+= 1
691
692 self.SourceFile.write("};\n")
693

◆ WriteRhsSize()

ll1_parser.LL1Parser.WriteRhsSize ( self)
694 def WriteRhsSize(self):
695 self.SourceFile.write("const unsigned int RhsSize[RULES_COUNT]= \n{\n")
696 self.HeaderFile.write("extern const unsigned int RhsSize[RULES_COUNT];\n")
697 Counter =0
698 for Rhs in self.RhsList:
699 if Counter == len(self.RhsList)-1:
700 self.SourceFile.write( str(len(Rhs)) + "\n" )
701 else:
702 self.SourceFile.write( str(len(Rhs)) + ",\n" )
703 Counter+= 1
704
705 self.SourceFile.write("};\n")
706

◆ WriteSemanticMaps()

ll1_parser.LL1Parser.WriteSemanticMaps ( self)
427 def WriteSemanticMaps(self):
428
429 self.CommonHeaderFileScala.write("object ScriptEvalFunc {\n object ScriptOperators extends ChiselEnum {\n val ")
430
431 Counter = 0
432 CheckForDuplicateList = []
433
434 self.CommonHeaderFile.write("#define " + "FUNC_UNDEFINED " + str(Counter) + "\n")
435 self.CommonHeaderFileScala.write("sFunc" + "Undefined")
436 Counter += 1
437
438 for X in self.OperatorsOneOperand:
439
440 if X not in CheckForDuplicateList:
441 self.CommonHeaderFile.write("#define " + "FUNC_" + X.upper() + " " + str(Counter) + "\n")
442 self.CommonHeaderFileScala.write(", sFunc" + X.capitalize())
443
444 CheckForDuplicateList.append(X)
445 Counter += 1
446
447 for X in self.OperatorsTwoOperand:
448
449 if X not in CheckForDuplicateList:
450 self.CommonHeaderFile.write("#define " + "FUNC_" + X.upper() + " " + str(Counter) + "\n")
451 self.CommonHeaderFileScala.write(", sFunc" + X.capitalize())
452 CheckForDuplicateList.append(X)
453 Counter += 1
454
455 for X in self.SemantiRulesList:
456
457 if X not in CheckForDuplicateList:
458 self.CommonHeaderFile.write("#define " + "FUNC_" + X.upper() + " " + str(Counter) + "\n")
459 self.CommonHeaderFileScala.write(", sFunc" + X.capitalize())
460 CheckForDuplicateList.append(X)
461 Counter += 1
462
463 for X in self.keywordList:
464
465 if X not in CheckForDuplicateList:
466 self.CommonHeaderFile.write("#define " + "FUNC_" + X.upper() + " " + str(Counter) + "\n")
467
468 #
469 # Check if it's the last item
470 #
471 self.CommonHeaderFileScala.write(", sFunc" + X.capitalize() + "")
472
473 CheckForDuplicateList.append(X)
474 Counter += 1
475
476
477 self.CommonHeaderFileScala.write(" = Value\n }\n} ")
478
479
480 self.SourceFile.write("const SYMBOL_MAP SemanticRulesMapList[]= {\n")
481 self.HeaderFile.write("extern const SYMBOL_MAP SemanticRulesMapList[];\n")
482
483 for X in self.OperatorsOneOperand:
484 self.SourceFile.write("{\"@" + X.upper() + "\", "+ "FUNC_" + X.upper() + "},\n")
485
486 for X in self.OperatorsTwoOperand:
487 self.SourceFile.write("{\"@" + X.upper() + "\", "+ "FUNC_" + X.upper() + "},\n")
488
489 for X in self.SemantiRulesList:
490 self.SourceFile.write("{\"@" + X.upper() + "\", "+ "FUNC_" + X.upper() + "},\n")
491
492 for X in self.keywordList:
493 self.SourceFile.write("{\"@" + X.upper() + "\", "+ "FUNC_" + X.upper() + "},\n")
494
495
496
497 self.SourceFile.write("};\n")
498
499 CheckForDuplicateList = []
500 self.CommonHeaderFile.write("\nstatic const char *const FunctionNames[] = {")
501 self.CommonHeaderFile.write("\n\"FUNC_UNDEFINED\""+ ",\n")
502 for X in self.OperatorsOneOperand:
503 if X not in CheckForDuplicateList:
504 self.CommonHeaderFile.write("\"" + "FUNC_" + X.upper() + "\"" + ",\n")
505 CheckForDuplicateList.append(X)
506
507 for X in self.OperatorsTwoOperand:
508 if X not in CheckForDuplicateList:
509 self.CommonHeaderFile.write("\"" + "FUNC_" + X.upper() + "\"" + ",\n")
510 CheckForDuplicateList.append(X)
511
512 for X in self.SemantiRulesList:
513 if X not in CheckForDuplicateList:
514 self.CommonHeaderFile.write("\"" + "FUNC_" + X.upper() + "\"" + ",\n")
515 CheckForDuplicateList.append(X)
516
517 for X in self.keywordList:
518 if X not in CheckForDuplicateList:
519 self.CommonHeaderFile.write("\"" + "FUNC_" + X.upper() + "\"" + ",\n")
520 CheckForDuplicateList.append(X)
521
522 self.CommonHeaderFile.write("};\n")
523
524

◆ WriteTerminalList()

ll1_parser.LL1Parser.WriteTerminalList ( self)
707 def WriteTerminalList(self):
708 self.SourceFile.write("const char* TerminalMap[TERMINAL_COUNT]= \n{\n")
709 self.HeaderFile.write("extern const char* TerminalMap[TERMINAL_COUNT];\n")
710 Counter = 0
711 for X in self.TerminalList:
712 if Counter == len(self.TerminalList)-1:
713 self.SourceFile.write("\"" + X + "\"" + "\n")
714 else:
715 self.SourceFile.write("\"" + X + "\"" + ",\n")
716 Counter +=1
717 self.SourceFile.write("};\n")
718

Member Data Documentation

◆ CommonHeaderFile

ll1_parser.LL1Parser.CommonHeaderFile

◆ CommonHeaderFileScala

ll1_parser.LL1Parser.CommonHeaderFileScala

◆ FirstDict

ll1_parser.LL1Parser.FirstDict

◆ FollowDict

ll1_parser.LL1Parser.FollowDict

◆ FunctionsDict

ll1_parser.LL1Parser.FunctionsDict

◆ GrammarFile

ll1_parser.LL1Parser.GrammarFile

◆ HeaderFile

ll1_parser.LL1Parser.HeaderFile

◆ INVALID

ll1_parser.LL1Parser.INVALID

◆ keywordList

ll1_parser.LL1Parser.keywordList

◆ Lalr

ll1_parser.LL1Parser.Lalr

◆ LalrParseTable

ll1_parser.LL1Parser.LalrParseTable

◆ LhsList

ll1_parser.LL1Parser.LhsList

◆ MAXIMUM_RHS_LEN

ll1_parser.LL1Parser.MAXIMUM_RHS_LEN

◆ NonTerminalList

ll1_parser.LL1Parser.NonTerminalList

◆ NonTerminalSet

ll1_parser.LL1Parser.NonTerminalSet

◆ OperatorsOneOperand

ll1_parser.LL1Parser.OperatorsOneOperand

◆ OperatorsTwoOperand

ll1_parser.LL1Parser.OperatorsTwoOperand

◆ ParseTable

ll1_parser.LL1Parser.ParseTable

◆ PredictDict

ll1_parser.LL1Parser.PredictDict

◆ PseudoRegistersList

ll1_parser.LL1Parser.PseudoRegistersList

◆ RegistersList

ll1_parser.LL1Parser.RegistersList

◆ RhsList

ll1_parser.LL1Parser.RhsList

◆ SemantiRulesList

ll1_parser.LL1Parser.SemantiRulesList

◆ SourceFile

ll1_parser.LL1Parser.SourceFile

◆ SPECIAL_TOKENS

ll1_parser.LL1Parser.SPECIAL_TOKENS

◆ Start

ll1_parser.LL1Parser.Start

◆ TerminalList

ll1_parser.LL1Parser.TerminalList

◆ TerminalSet

ll1_parser.LL1Parser.TerminalSet

The documentation for this class was generated from the following file: