-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathparser.go
More file actions
143 lines (130 loc) · 2.88 KB
/
parser.go
File metadata and controls
143 lines (130 loc) · 2.88 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
package ebnf
import (
"fmt"
"io"
)
type parser struct {
*lexer
errs []error
}
func newParser(r io.RuneReader) *parser {
l := newLexer(r)
l.nextChar()
l.nextToken()
return &parser{lexer: l}
}
type expectedTokenError struct {
lexerError
expected, actual token
text string
}
func (e expectedTokenError) Error() string {
return fmt.Sprintf("%v:%v: expected %v but found %v", e.line, e.col, tokenString(e.expected, e.text), tokenString(e.actual, e.text))
}
func (p *parser) expect(t token) {
if p.token != tokenInvalid && p.token != t {
p.errs = append(p.errs, expectedTokenError{
actual: p.token,
expected: t,
lexerError: lexerError{
col: p.tokenCol,
line: p.tokenLine,
},
text: string(p.text),
})
}
p.nextToken()
}
type unexpectedTokenError struct {
lexerError
token token
text string
}
func (e unexpectedTokenError) Error() string {
return fmt.Sprintf("%v:%v: unexpected %v", e.line, e.col, tokenString(e.token, e.text))
}
func (p *parser) parseFactor() *Factor {
var f Factor
switch p.token {
case tokenIdentifier:
f.Identifier = &Identifier{Text: p.text}
p.nextToken()
case tokenLiteral:
f.Literal = &Literal{Text: p.text}
p.nextToken()
case tokenLeftParen:
p.nextToken()
f.Group = p.parseExpression()
p.expect(tokenRightParen)
case tokenLeftBracket:
p.nextToken()
f.Option = p.parseExpression()
p.expect(tokenRightBracket)
case tokenLeftBrace:
p.nextToken()
f.Repetition = p.parseExpression()
p.expect(tokenRightBrace)
case tokenInvalid:
default:
p.errs = append(p.errs, unexpectedTokenError{
lexerError: lexerError{
col: p.tokenCol,
line: p.tokenLine,
},
text: p.text,
token: p.token,
})
}
return &f
}
func (p *parser) parseTerm() *Term {
fs := []*Factor{p.parseFactor()}
for tokenEnd < p.token && p.token < tokenEqual {
fs = append(fs, p.parseFactor())
}
return &Term{Factors: fs}
}
func (p *parser) parseExpression() *Expression {
ts := []*Term{p.parseTerm()}
for p.token == tokenPipe {
p.nextToken()
ts = append(ts, p.parseTerm())
}
return &Expression{Terms: ts}
}
func (p *parser) parseProduction() *Production {
prod := &Production{Identifier: &Identifier{Text: p.text}}
p.nextToken()
p.expect(tokenEqual)
prod.Expression = p.parseExpression()
p.expect(tokenPeriod)
return prod
}
func (p *parser) parseGrammar() *Grammar {
var ps []*Production
if p.token == tokenIdentifier {
for p.token == tokenIdentifier {
ps = append(ps, p.parseProduction())
}
if p.token != tokenEnd {
p.errs = append(p.errs, expectedTokenError{
actual: p.token,
expected: tokenEnd,
lexerError: lexerError{
col: p.tokenCol,
line: p.tokenLine,
},
})
}
} else {
p.errs = append(p.errs, expectedTokenError{
actual: p.token,
expected: tokenIdentifier,
lexerError: lexerError{
col: 1,
line: 1,
},
})
}
return &Grammar{Productions: ps}
}