Source file
src/go/token/token.go
1
2
3
4
5
6
7
8 package token
9
10 import "strconv"
11
12
13 type Token int
14
15
16 const (
17
18 ILLEGAL Token = iota
19 EOF
20 COMMENT
21
22 literal_beg
23
24
25 IDENT
26 INT
27 FLOAT
28 IMAG
29 CHAR
30 STRING
31 literal_end
32
33 operator_beg
34
35 ADD
36 SUB
37 MUL
38 QUO
39 REM
40
41 AND
42 OR
43 XOR
44 SHL
45 SHR
46 AND_NOT
47
48 ADD_ASSIGN
49 SUB_ASSIGN
50 MUL_ASSIGN
51 QUO_ASSIGN
52 REM_ASSIGN
53
54 AND_ASSIGN
55 OR_ASSIGN
56 XOR_ASSIGN
57 SHL_ASSIGN
58 SHR_ASSIGN
59 AND_NOT_ASSIGN
60
61 LAND
62 LOR
63 ARROW
64 INC
65 DEC
66
67 EQL
68 LSS
69 GTR
70 ASSIGN
71 NOT
72
73 NEQ
74 LEQ
75 GEQ
76 DEFINE
77 ELLIPSIS
78
79 LPAREN
80 LBRACK
81 LBRACE
82 COMMA
83 PERIOD
84
85 RPAREN
86 RBRACK
87 RBRACE
88 SEMICOLON
89 COLON
90 operator_end
91
92 keyword_beg
93
94 BREAK
95 CASE
96 CHAN
97 CONST
98 CONTINUE
99
100 DEFAULT
101 DEFER
102 ELSE
103 FALLTHROUGH
104 FOR
105
106 FUNC
107 GO
108 GOTO
109 IF
110 IMPORT
111
112 INTERFACE
113 MAP
114 PACKAGE
115 RANGE
116 RETURN
117
118 SELECT
119 STRUCT
120 SWITCH
121 TYPE
122 VAR
123 keyword_end
124 )
125
126 var tokens = [...]string{
127 ILLEGAL: "ILLEGAL",
128
129 EOF: "EOF",
130 COMMENT: "COMMENT",
131
132 IDENT: "IDENT",
133 INT: "INT",
134 FLOAT: "FLOAT",
135 IMAG: "IMAG",
136 CHAR: "CHAR",
137 STRING: "STRING",
138
139 ADD: "+",
140 SUB: "-",
141 MUL: "*",
142 QUO: "/",
143 REM: "%",
144
145 AND: "&",
146 OR: "|",
147 XOR: "^",
148 SHL: "<<",
149 SHR: ">>",
150 AND_NOT: "&^",
151
152 ADD_ASSIGN: "+=",
153 SUB_ASSIGN: "-=",
154 MUL_ASSIGN: "*=",
155 QUO_ASSIGN: "/=",
156 REM_ASSIGN: "%=",
157
158 AND_ASSIGN: "&=",
159 OR_ASSIGN: "|=",
160 XOR_ASSIGN: "^=",
161 SHL_ASSIGN: "<<=",
162 SHR_ASSIGN: ">>=",
163 AND_NOT_ASSIGN: "&^=",
164
165 LAND: "&&",
166 LOR: "||",
167 ARROW: "<-",
168 INC: "++",
169 DEC: "--",
170
171 EQL: "==",
172 LSS: "<",
173 GTR: ">",
174 ASSIGN: "=",
175 NOT: "!",
176
177 NEQ: "!=",
178 LEQ: "<=",
179 GEQ: ">=",
180 DEFINE: ":=",
181 ELLIPSIS: "...",
182
183 LPAREN: "(",
184 LBRACK: "[",
185 LBRACE: "{",
186 COMMA: ",",
187 PERIOD: ".",
188
189 RPAREN: ")",
190 RBRACK: "]",
191 RBRACE: "}",
192 SEMICOLON: ";",
193 COLON: ":",
194
195 BREAK: "break",
196 CASE: "case",
197 CHAN: "chan",
198 CONST: "const",
199 CONTINUE: "continue",
200
201 DEFAULT: "default",
202 DEFER: "defer",
203 ELSE: "else",
204 FALLTHROUGH: "fallthrough",
205 FOR: "for",
206
207 FUNC: "func",
208 GO: "go",
209 GOTO: "goto",
210 IF: "if",
211 IMPORT: "import",
212
213 INTERFACE: "interface",
214 MAP: "map",
215 PACKAGE: "package",
216 RANGE: "range",
217 RETURN: "return",
218
219 SELECT: "select",
220 STRUCT: "struct",
221 SWITCH: "switch",
222 TYPE: "type",
223 VAR: "var",
224 }
225
226
227
228
229
230
231
232 func (tok Token) String() string {
233 s := ""
234 if 0 <= tok && tok < Token(len(tokens)) {
235 s = tokens[tok]
236 }
237 if s == "" {
238 s = "token(" + strconv.Itoa(int(tok)) + ")"
239 }
240 return s
241 }
242
243
244
245
246
247
248
249 const (
250 LowestPrec = 0
251 UnaryPrec = 6
252 HighestPrec = 7
253 )
254
255
256
257
258
259 func (op Token) Precedence() int {
260 switch op {
261 case LOR:
262 return 1
263 case LAND:
264 return 2
265 case EQL, NEQ, LSS, LEQ, GTR, GEQ:
266 return 3
267 case ADD, SUB, OR, XOR:
268 return 4
269 case MUL, QUO, REM, SHL, SHR, AND, AND_NOT:
270 return 5
271 }
272 return LowestPrec
273 }
274
275 var keywords map[string]Token
276
277 func init() {
278 keywords = make(map[string]Token)
279 for i := keyword_beg + 1; i < keyword_end; i++ {
280 keywords[tokens[i]] = i
281 }
282 }
283
284
285
286 func Lookup(ident string) Token {
287 if tok, is_keyword := keywords[ident]; is_keyword {
288 return tok
289 }
290 return IDENT
291 }
292
293
294
295
296
297
298 func (tok Token) IsLiteral() bool { return literal_beg < tok && tok < literal_end }
299
300
301
302
303 func (tok Token) IsOperator() bool { return operator_beg < tok && tok < operator_end }
304
305
306
307
308 func (tok Token) IsKeyword() bool { return keyword_beg < tok && tok < keyword_end }
309
View as plain text