-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathparse.go
More file actions
244 lines (217 loc) · 5.76 KB
/
parse.go
File metadata and controls
244 lines (217 loc) · 5.76 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
// Package parser provides the public API for parsing PostgreSQL SQL.
package parser
//go:generate goyacc -o parser.go -p pg gram.y
import (
"fmt"
"github.com/pgplex/pgparser/nodes"
)
// ParseResult contains the result of parsing.
type ParseResult struct {
Stmts []*nodes.RawStmt
Err error
}
// parserLexer adapts our Lexer to the goyacc-generated pgLexer interface.
type parserLexer struct {
lexer *Lexer
result *nodes.List
err error
// One-token lookahead for NOT_LA, NULLS_LA, WITH_LA, FORMAT_LA.
// PostgreSQL's parser.c uses this to disambiguate tokens based on context.
haveLookahead bool
lookaheadToken Token
lookaheadTokType int
}
// newParserLexer creates a new lexer adapter for the parser.
func newParserLexer(input string) *parserLexer {
return &parserLexer{
lexer: NewLexer(input),
}
}
// Lex implements pgLexer.Lex.
// This includes one-token lookahead logic matching PostgreSQL's parser.c
// to replace NOT→NOT_LA, NULLS_P→NULLS_LA, WITH→WITH_LA, FORMAT→FORMAT_LA
// when followed by specific keywords.
func (l *parserLexer) Lex(lval *pgSymType) int {
var tok Token
var tokType int
// Return buffered lookahead token if we have one
if l.haveLookahead {
tok = l.lookaheadToken
tokType = l.lookaheadTokType
l.haveLookahead = false
} else {
tok = l.lexer.NextToken()
tokType = l.mapTokenType(tok)
}
// Check if this token needs lookahead-based replacement
if l.needsLookahead(tokType) {
// Peek at the next token
nextTok := l.lexer.NextToken()
nextTokType := l.mapTokenType(nextTok)
// Save it for the next Lex() call
l.haveLookahead = true
l.lookaheadToken = nextTok
l.lookaheadTokType = nextTokType
// Replace current token based on lookahead
tokType = l.applyLookahead(tokType, nextTokType)
}
// Set semantic values based on token type
switch tokType {
case IDENT:
lval.str = tok.Str
case ICONST:
lval.ival = tok.Ival
case FCONST, SCONST, BCONST, XCONST:
lval.str = tok.Str
case Op:
lval.str = tok.Str
default:
// Keywords - check if it has a string value
if tok.Str != "" {
lval.str = tok.Str
}
}
return tokType
}
// needsLookahead returns true if the token type may need replacement
// based on the following token (matching PostgreSQL's parser.c logic).
func (l *parserLexer) needsLookahead(tokType int) bool {
switch tokType {
case NOT:
return true
case WITH:
return true
case NULLS_P:
return true
case WITHOUT:
return true
case FORMAT:
return true
}
return false
}
// applyLookahead replaces the current token type based on the next token,
// matching PostgreSQL's parser.c lookahead logic.
func (l *parserLexer) applyLookahead(curToken, nextToken int) int {
switch curToken {
case NOT:
// Replace NOT by NOT_LA if followed by BETWEEN, IN, LIKE, ILIKE, SIMILAR
switch nextToken {
case BETWEEN, IN_P, LIKE, ILIKE, SIMILAR:
return NOT_LA
}
case WITH:
// Replace WITH by WITH_LA if followed by TIME or ORDINALITY
switch nextToken {
case TIME, ORDINALITY:
return WITH_LA
}
case NULLS_P:
// Replace NULLS_P by NULLS_LA if followed by FIRST, LAST, DISTINCT, or NOT
switch nextToken {
case FIRST_P, LAST_P, DISTINCT, NOT:
return NULLS_LA
}
case FORMAT:
// Replace FORMAT by FORMAT_LA if followed by JSON
switch nextToken {
case JSON:
return FORMAT_LA
}
case WITHOUT:
// Replace WITHOUT by WITHOUT_LA if followed by TIME
switch nextToken {
case TIME:
return WITHOUT_LA
}
}
return curToken
}
// Error implements pgLexer.Error
func (l *parserLexer) Error(s string) {
l.err = &ParseError{
Message: s,
Position: l.lexer.pos,
}
}
// mapTokenType maps lexer token types to parser token types.
// Keywords already use parser constants (from parser.go), but
// non-keyword tokens (identifiers, literals, operators) use internal
// lex_* constants that need mapping.
func (l *parserLexer) mapTokenType(tok Token) int {
// Handle EOF
if tok.Type == 0 {
return 0
}
// Single-character tokens map directly
if tok.Type > 0 && tok.Type < 256 {
return tok.Type
}
// Check if it's a lexer internal token (lex_* constants)
// These start at nonKeywordTokenBase (800)
if tok.Type >= nonKeywordTokenBase && tok.Type < nonKeywordTokenBase+100 {
// These are non-keyword tokens from the lexer
offset := tok.Type - nonKeywordTokenBase
switch offset {
case 0: // lex_ICONST
return ICONST
case 1: // lex_FCONST
return FCONST
case 2: // lex_SCONST
return SCONST
case 3: // lex_BCONST
return BCONST
case 4: // lex_XCONST
return XCONST
case 5: // lex_USCONST - map to SCONST for now
return SCONST
case 6: // lex_IDENT
return IDENT
case 7: // lex_UIDENT - map to IDENT for now
return IDENT
case 8: // lex_TYPECAST
return TYPECAST
case 9: // lex_DOT_DOT
return DOT_DOT
case 10: // lex_COLON_EQUALS
return COLON_EQUALS
case 11: // lex_EQUALS_GREATER
return EQUALS_GREATER
case 12: // lex_LESS_EQUALS
return LESS_EQUALS
case 13: // lex_GREATER_EQUALS
return GREATER_EQUALS
case 14: // lex_NOT_EQUALS
return NOT_EQUALS
case 15: // lex_PARAM
return PARAM
case 16: // lex_Op
return Op
}
// Unknown lex_* token
return 0
}
// All other tokens (keywords, etc.) are already parser constants
// Just pass them through directly
return tok.Type
}
// ParseError represents a parse error with position information.
type ParseError struct {
Message string
Position int
}
func (e *ParseError) Error() string {
return e.Message
}
// Parse parses the given SQL input and returns a list of statements.
func Parse(input string) (*nodes.List, error) {
lexer := newParserLexer(input)
ret := pgParse(lexer)
if lexer.err != nil {
return nil, lexer.err
}
if ret != 0 {
return nil, &ParseError{Message: fmt.Sprintf("parse error (ret=%d)", ret), Position: lexer.lexer.pos}
}
return lexer.result, nil
}