|
|
@@ -26,6 +26,12 @@ func (self *reader) peekByte() (byte, bool) {
|
|
26
|
26
|
return self.data[self.pos], true
|
|
27
|
27
|
}
|
|
28
|
28
|
|
|
|
29
|
+func (self *reader) takeChar() string {
|
|
|
30
|
+ self.checkPos()
|
|
|
31
|
+ self.pos += 1
|
|
|
32
|
+ return self.data[self.pos-1 : self.pos]
|
|
|
33
|
+}
|
|
|
34
|
+
|
|
29
|
35
|
func (self *reader) takeUntil(needle string) (string, bool) {
|
|
30
|
36
|
self.checkPos()
|
|
31
|
37
|
idx := strings.Index(self.data[self.pos:], needle)
|
|
|
@@ -126,6 +132,8 @@ func (self tokenizeResultCode) String() string {
|
|
126
|
132
|
return "tokenizeResultCodeMissingEndSingleQuote"
|
|
127
|
133
|
case tokenizeResultCodeMissingEndDoubleQuote:
|
|
128
|
134
|
return "tokenizeResultCodeMissingEndDoubleQuote"
|
|
|
135
|
+ case tokenizeResultCodeMissingEscapedCharacter:
|
|
|
136
|
+ return "tokenizeResultCodeMissingEscapedCharacter"
|
|
129
|
137
|
default:
|
|
130
|
138
|
return fmt.Sprintf("unknown!tokenizeResultCode(%d)", self)
|
|
131
|
139
|
}
|
|
|
@@ -135,6 +143,7 @@ const (
|
|
135
|
143
|
tokenizeResultCodeOk tokenizeResultCode = iota
|
|
136
|
144
|
tokenizeResultCodeMissingEndSingleQuote
|
|
137
|
145
|
tokenizeResultCodeMissingEndDoubleQuote
|
|
|
146
|
+ tokenizeResultCodeMissingEscapedCharacter
|
|
138
|
147
|
)
|
|
139
|
148
|
|
|
140
|
149
|
func tokenize(str string) tokenizeResult {
|
|
|
@@ -171,6 +180,13 @@ func tokenize(str string) tokenizeResult {
|
|
171
|
180
|
}
|
|
172
|
181
|
b.bufAppend(new_chars)
|
|
173
|
182
|
rdr.tossChar() // the second `"`
|
|
|
183
|
+ case '\\':
|
|
|
184
|
+ rdr.tossChar()
|
|
|
185
|
+ if rdr.done() {
|
|
|
186
|
+ return tokenizeResult{code: tokenizeResultCodeMissingEscapedCharacter, err_loc: rdr.pos - 1}
|
|
|
187
|
+ }
|
|
|
188
|
+ new_chars := rdr.takeChar()
|
|
|
189
|
+ b.bufAppend(new_chars)
|
|
174
|
190
|
default:
|
|
175
|
191
|
b.bufAppendChar(this_byte)
|
|
176
|
192
|
rdr.tossChar()
|
|
|
@@ -189,6 +205,8 @@ func Tokenize(str string) ([]string, error) {
|
|
189
|
205
|
return nil, TokenizeError{code: TokenizeErrorCodeMissingEndSingleQuote, loc: res.err_loc}
|
|
190
|
206
|
case tokenizeResultCodeMissingEndDoubleQuote:
|
|
191
|
207
|
return nil, TokenizeError{code: TokenizeErrorCodeMissingEndDoubleQuote, loc: res.err_loc}
|
|
|
208
|
+ case tokenizeResultCodeMissingEscapedCharacter:
|
|
|
209
|
+ return nil, TokenizeError{code: TokenizeErrorCodeMissingEscapedCharacter, loc: res.err_loc}
|
|
192
|
210
|
default:
|
|
193
|
211
|
return nil, TokenizeError{code: TokenizeErrorCodeGeneral, loc: res.err_loc}
|
|
194
|
212
|
}
|
|
|
@@ -204,6 +222,7 @@ const (
|
|
204
|
222
|
TokenizeErrorCodeGeneral TokenizeErrorCode = iota
|
|
205
|
223
|
TokenizeErrorCodeMissingEndSingleQuote
|
|
206
|
224
|
TokenizeErrorCodeMissingEndDoubleQuote
|
|
|
225
|
+ TokenizeErrorCodeMissingEscapedCharacter
|
|
207
|
226
|
)
|
|
208
|
227
|
|
|
209
|
228
|
func (e TokenizeError) Error() string {
|
|
|
@@ -212,6 +231,8 @@ func (e TokenizeError) Error() string {
|
|
212
|
231
|
return fmt.Sprintf("unterminated single-quote: at %d", e.loc)
|
|
213
|
232
|
case TokenizeErrorCodeMissingEndDoubleQuote:
|
|
214
|
233
|
return fmt.Sprintf("unterminated double-quote: at %d", e.loc)
|
|
|
234
|
+ case TokenizeErrorCodeMissingEscapedCharacter:
|
|
|
235
|
+ return fmt.Sprintf("missing escaped character: at %d", e.loc)
|
|
215
|
236
|
default:
|
|
216
|
237
|
return fmt.Sprintf("unknown TokenizeError code: .code=%d .loc=%d", e.code, e.loc)
|
|
217
|
238
|
}
|