svn-gvsig-desktop / trunk / org.gvsig.desktop / org.gvsig.desktop.library / org.gvsig.expressionevaluator / org.gvsig.expressionevaluator.lib / org.gvsig.expressionevaluator.lib.impl / src / main / java / org / gvsig / expressionevaluator / impl / DefaultCompiler.java @ 45934
History | View | Annotate | Download (26.7 KB)
1 |
package org.gvsig.expressionevaluator.impl; |
---|---|
2 |
|
3 |
import java.util.HashMap; |
4 |
import java.util.Map; |
5 |
import java.util.Stack; |
6 |
import org.apache.commons.lang3.StringUtils; |
7 |
import org.gvsig.expressionevaluator.Compiler; |
8 |
import org.gvsig.expressionevaluator.LexicalAnalyzer; |
9 |
import org.gvsig.expressionevaluator.LexicalAnalyzer.Token; |
10 |
import org.gvsig.expressionevaluator.Code; |
11 |
import org.gvsig.expressionevaluator.Code.Callable; |
12 |
import org.gvsig.expressionevaluator.CodeBuilder; |
13 |
import org.gvsig.expressionevaluator.Codes; |
14 |
import static org.gvsig.expressionevaluator.ExpressionBuilder.FUNCTION_DICT; |
15 |
import org.gvsig.expressionevaluator.ExpressionEvaluatorManager; |
16 |
import org.gvsig.expressionevaluator.ExpressionSyntaxException; |
17 |
import org.gvsig.expressionevaluator.GrammarSet; |
18 |
import org.gvsig.expressionevaluator.Statement; |
19 |
import org.gvsig.expressionevaluator.Statement.StatementContext; |
20 |
import org.gvsig.expressionevaluator.impl.DefaultCodeBuilder.BaseCodes; |
21 |
import org.gvsig.expressionevaluator.impl.DefaultCodeBuilder.BaseConstant; |
22 |
import org.gvsig.expressionevaluator.impl.function.operator.NegOperator; |
23 |
import org.gvsig.expressionevaluator.impl.function.programming.CodeBlockFunction; |
24 |
import org.gvsig.expressionevaluator.spi.AbstractLexicalAnalyzer; |
25 |
import org.slf4j.Logger; |
26 |
import org.slf4j.LoggerFactory; |
27 |
|
28 |
public class DefaultCompiler implements Compiler { |
29 |
|
30 |
protected static final Logger LOGGER = LoggerFactory.getLogger(DefaultCompiler.class); |
31 |
|
32 |
class DefaultStatementContext implements StatementContext { |
33 |
|
34 |
private class State { |
35 |
public String codeClassifier; |
36 |
public Map<String,Code> codes; |
37 |
} |
38 |
|
39 |
private State state;
|
40 |
private final Stack<State> states; |
41 |
|
42 |
public DefaultStatementContext() {
|
43 |
this.state = new State(); |
44 |
this.states = new Stack<>(); |
45 |
} |
46 |
|
47 |
@Override
|
48 |
public void save_state() { |
49 |
this.trace("save_state"); |
50 |
((AbstractLexicalAnalyzer)lexer).save_state(); |
51 |
this.states.push(state);
|
52 |
} |
53 |
|
54 |
@Override
|
55 |
public void restore_state() { |
56 |
((AbstractLexicalAnalyzer)lexer).restore_state(); |
57 |
state = this.states.pop();
|
58 |
this.trace("restore_state"); |
59 |
} |
60 |
|
61 |
@Override
|
62 |
public void drop_state() { |
63 |
((AbstractLexicalAnalyzer)lexer).drop_state(); |
64 |
this.states.pop();
|
65 |
this.trace("drop_state"); |
66 |
} |
67 |
|
68 |
@Override
|
69 |
public Compiler getCompiler() { |
70 |
return DefaultCompiler.this;
|
71 |
} |
72 |
|
73 |
@Override
|
74 |
public LexicalAnalyzer getLexicalAnalyzer() {
|
75 |
return lexer;
|
76 |
} |
77 |
|
78 |
@Override
|
79 |
public void setCode(String id, Code code) { |
80 |
if( this.state.codes == null ) { |
81 |
this.state.codes = new HashMap<>(); |
82 |
} |
83 |
if( !StringUtils.isBlank(this.state.codeClassifier) ) { |
84 |
if( id.contains("#") ) { |
85 |
id = StringUtils.replace(id,"#",this.state.codeClassifier,1); |
86 |
} |
87 |
} |
88 |
this.state.codes.put(id.toUpperCase(), code);
|
89 |
} |
90 |
|
91 |
@Override
|
92 |
public Code getCode(String id) { |
93 |
if( this.state==null || this.state.codes==null ) { |
94 |
return null; |
95 |
} |
96 |
if( StringUtils.isBlank(id) ) {
|
97 |
return null; |
98 |
} |
99 |
return this.state.codes.get(id.toUpperCase()); |
100 |
} |
101 |
|
102 |
@Override
|
103 |
public void setCodeClassifier(String classifier) { |
104 |
this.state.codeClassifier = classifier;
|
105 |
} |
106 |
|
107 |
@Override
|
108 |
public String getCodeClassifier() { |
109 |
return this.state.codeClassifier; |
110 |
} |
111 |
|
112 |
@Override
|
113 |
public CodeBuilder getCodeBuilder() {
|
114 |
return codeBuilder;
|
115 |
} |
116 |
|
117 |
@Override
|
118 |
public Token look_token() {
|
119 |
return lexer.look();
|
120 |
} |
121 |
|
122 |
@Override
|
123 |
public Token next_token() {
|
124 |
return lexer.next();
|
125 |
} |
126 |
|
127 |
@Override
|
128 |
public Code parse_expression(boolean allow_assignement) { |
129 |
return DefaultCompiler.this.parse_expression(allow_assignement);
|
130 |
} |
131 |
|
132 |
@Override
|
133 |
public Codes parse_expressions(String separator) { |
134 |
return DefaultCompiler.this.parse_expressions(separator);
|
135 |
} |
136 |
|
137 |
@Override
|
138 |
public boolean isReservedWord(String s) { |
139 |
return grammars.isReservedWord(s);
|
140 |
} |
141 |
|
142 |
@Override
|
143 |
public void trace(String msg) { |
144 |
// LexicalAnalyzer lex = this.getLexicalAnalyzer();
|
145 |
// String s = StringUtils.left(lex.getSource(), lex.getPosition()) + "[*]" + StringUtils.mid(lex.getSource(), lex.getPosition(), 200);
|
146 |
// if( s.length()>200 ) {
|
147 |
// s = "..."+StringUtils.mid(s, lex.getPosition()-100, 200)+"...";
|
148 |
// }
|
149 |
// System.out.println(msg+". "+s);
|
150 |
} |
151 |
} |
152 |
|
153 |
private boolean objectAccessSupported; |
154 |
private LexicalAnalyzer lexer;
|
155 |
private CodeBuilder codeBuilder;
|
156 |
private final GrammarSet grammars; |
157 |
protected ExpressionEvaluatorManager manager;
|
158 |
//
|
159 |
// https://www.postgresql.org/docs/9.1/static/functions.html
|
160 |
//
|
161 |
|
162 |
public DefaultCompiler(ExpressionEvaluatorManager manager) {
|
163 |
this.manager = manager;
|
164 |
this.grammars = new DefaultGrammarSet(); |
165 |
this.lexer = new SQLLexicalAnalyzer(); |
166 |
this.codeBuilder = new DefaultCodeBuilder(manager); |
167 |
this.objectAccessSupported = true; |
168 |
} |
169 |
|
170 |
@Override
|
171 |
public Compiler clone() throws CloneNotSupportedException { |
172 |
DefaultCompiler other = (DefaultCompiler) super.clone();
|
173 |
other.lexer = lexer.clone(); |
174 |
other.codeBuilder = codeBuilder.clone(); |
175 |
|
176 |
return other;
|
177 |
} |
178 |
|
179 |
@Override
|
180 |
public void setLexicalAnalyzer(LexicalAnalyzer lexer) { |
181 |
this.lexer = lexer;
|
182 |
} |
183 |
|
184 |
@Override
|
185 |
public LexicalAnalyzer getLexicalAnalyzer() {
|
186 |
return this.lexer; |
187 |
} |
188 |
|
189 |
@Override
|
190 |
public void setCodeBuilder(CodeBuilder codeBuilder) { |
191 |
this.codeBuilder = codeBuilder;
|
192 |
} |
193 |
|
194 |
@Override
|
195 |
public CodeBuilder getCodeBuilder() {
|
196 |
return this.codeBuilder; |
197 |
} |
198 |
|
199 |
@Override
|
200 |
public boolean isObjectAccessSupported() { |
201 |
return this.objectAccessSupported; |
202 |
} |
203 |
|
204 |
@Override
|
205 |
public void setObjectAccessSupported(boolean objectAccessSupported) { |
206 |
this.objectAccessSupported = objectAccessSupported;
|
207 |
} |
208 |
|
209 |
@Override
|
210 |
public GrammarSet getGrammars() {
|
211 |
return this.grammars; |
212 |
} |
213 |
|
214 |
@Override
|
215 |
public Code compileExpression(String expression) { |
216 |
this.lexer.setSource(expression.trim());
|
217 |
Code code = parse_expression(); |
218 |
if( !this.lexer.isEOF() ) { |
219 |
throw new ExpressionSyntaxException(lexer); |
220 |
} |
221 |
return code;
|
222 |
} |
223 |
|
224 |
public Code parse_expression() {
|
225 |
return this.parse_expression(true); |
226 |
} |
227 |
|
228 |
public Code parse_expression(boolean allow_assignement) { |
229 |
Code code = parse_relational(); |
230 |
if( code != null && allow_assignement) { |
231 |
Token token = lexer.look(); |
232 |
if( token.is("AS") ) { |
233 |
lexer.next(); |
234 |
token = lexer.look(); |
235 |
if( token.getType() != Token.IDENTIFIER ) {
|
236 |
throw new ExpressionSyntaxException( |
237 |
I18N.An_identifier_was_expected_and_XliteralX_was_found(token.getLiteral()), |
238 |
lexer |
239 |
); |
240 |
} |
241 |
token = lexer.next(); |
242 |
code = codeBuilder.let(token.getLiteral(),code); |
243 |
} |
244 |
} |
245 |
return code;
|
246 |
} |
247 |
|
248 |
public Code parse_relational() {
|
249 |
Code op1 = parse_not(); |
250 |
Code op2; |
251 |
while( true ) { |
252 |
Token token = lexer.look(); |
253 |
switch( token.getType() ) {
|
254 |
case Token.OP_OR:
|
255 |
lexer.next(); |
256 |
op2 = parse_not(); |
257 |
if( op2==null ) { |
258 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_OR_operator(),lexer); |
259 |
} |
260 |
op1 = codeBuilder.or(op1, op2); |
261 |
break;
|
262 |
case Token.OP_AND:
|
263 |
lexer.next(); |
264 |
op2 = parse_not(); |
265 |
if( op2==null ) { |
266 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_AND_operator(),lexer); |
267 |
} |
268 |
op1 = codeBuilder.and(op1, op2); |
269 |
break;
|
270 |
default:
|
271 |
return op1;
|
272 |
} |
273 |
} |
274 |
} |
275 |
|
276 |
public Code parse_not() {
|
277 |
Code op1; |
278 |
Token token = lexer.look(); |
279 |
if( token.getType() == Token.OP_NOT ) {
|
280 |
lexer.next(); |
281 |
op1 = parse_conditional(); |
282 |
op1 = codeBuilder.not(op1); |
283 |
} else {
|
284 |
op1 = parse_conditional(); |
285 |
} |
286 |
return op1;
|
287 |
} |
288 |
|
289 |
public Code parse_conditional() {
|
290 |
Code op1 = parse_sum(); |
291 |
Code op2; |
292 |
while( true ) { |
293 |
Token token = lexer.look(); |
294 |
switch( token.getType() ) {
|
295 |
case Token.OP_LT:
|
296 |
lexer.next(); |
297 |
op2 = parse_sum(); |
298 |
if( op2==null ) { |
299 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_LT_operator(),lexer); |
300 |
} |
301 |
op1 = codeBuilder.lt(op1, op2); |
302 |
break;
|
303 |
case Token.OP_GT:
|
304 |
lexer.next(); |
305 |
op2 = parse_sum(); |
306 |
if( op2==null ) { |
307 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_GT_operator(),lexer); |
308 |
} |
309 |
op1 = codeBuilder.gt(op1, op2); |
310 |
break;
|
311 |
case Token.OP_LE:
|
312 |
lexer.next(); |
313 |
op2 = parse_sum(); |
314 |
if( op2==null ) { |
315 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_LE_operator(),lexer); |
316 |
} |
317 |
op1 = codeBuilder.le(op1, op2); |
318 |
break;
|
319 |
case Token.OP_GE:
|
320 |
lexer.next(); |
321 |
op2 = parse_sum(); |
322 |
if( op2==null ) { |
323 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_GE_operator(),lexer); |
324 |
} |
325 |
op1 = codeBuilder.ge(op1, op2); |
326 |
break;
|
327 |
case Token.OP_EQ:
|
328 |
lexer.next(); |
329 |
op2 = parse_sum(); |
330 |
if( op2==null ) { |
331 |
token = lexer.look(); |
332 |
String tip = null; |
333 |
switch(token.getType()) {
|
334 |
case Token.OP_GT:
|
335 |
tip = I18N.The_operator_greater_than_or_equal_is_ge(); |
336 |
break;
|
337 |
case Token.OP_LT:
|
338 |
tip = I18N.The_operator_less_than_or_equal_is_ge(); |
339 |
break;
|
340 |
} |
341 |
throw new ExpressionSyntaxException( |
342 |
I18N.Cant_recognize_the_second_operand_of_EQ_operator(), |
343 |
lexer, |
344 |
tip |
345 |
); |
346 |
} |
347 |
op1 = codeBuilder.eq(op1, op2); |
348 |
break;
|
349 |
case Token.OP_NE:
|
350 |
lexer.next(); |
351 |
op2 = parse_sum(); |
352 |
if( op2==null ) { |
353 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_NEQ_operator(),lexer); |
354 |
} |
355 |
op1 = codeBuilder.ne(op1, op2); |
356 |
break;
|
357 |
case Token.PRED_IS: {
|
358 |
lexer.next(); |
359 |
Token next = lexer.look(); |
360 |
switch(next.getType()) {
|
361 |
case Token.NOTNULL:
|
362 |
lexer.next(); |
363 |
op1 = codeBuilder.is(op1, codeBuilder.constant(null));
|
364 |
op1 = codeBuilder.not(op1); |
365 |
break;
|
366 |
case Token.OP_NOT:
|
367 |
lexer.next(); |
368 |
next = lexer.look(); |
369 |
if( next.getType() == Token.NULL ) {
|
370 |
lexer.next(); |
371 |
op1 = codeBuilder.is(op1, codeBuilder.constant(null));
|
372 |
} else {
|
373 |
op2 = parse_sum(); |
374 |
if( op2==null ) { |
375 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_IS_operator(),lexer); |
376 |
} |
377 |
op1 = codeBuilder.is(op1, op2); |
378 |
} |
379 |
op1 = codeBuilder.not(op1); |
380 |
break;
|
381 |
case Token.NULL:
|
382 |
lexer.next(); |
383 |
op1 = codeBuilder.is(op1, codeBuilder.constant(null));
|
384 |
break;
|
385 |
default:
|
386 |
op2 = parse_sum(); |
387 |
if( op2==null ) { |
388 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_IS_operator(),lexer); |
389 |
} |
390 |
op1 = codeBuilder.is(op1, op2); |
391 |
} |
392 |
} |
393 |
break;
|
394 |
case Token.ISNULL:
|
395 |
lexer.next(); |
396 |
op1 = codeBuilder.is(op1, codeBuilder.constant(null));
|
397 |
break;
|
398 |
case Token.OP_REGEXP:
|
399 |
lexer.next(); |
400 |
op2 = parse_sum(); |
401 |
if( op2==null ) { |
402 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_REGEXP_operator(),lexer); |
403 |
} |
404 |
op1 = codeBuilder.regexp(op1, op2); |
405 |
break;
|
406 |
case Token.PRED_LIKE:
|
407 |
lexer.next(); |
408 |
op2 = parse_sum(); |
409 |
if( op2==null ) { |
410 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_LIKE_operator(),lexer); |
411 |
} |
412 |
op1 = codeBuilder.like(op1, op2); |
413 |
break;
|
414 |
case Token.PRED_ILIKE:
|
415 |
lexer.next(); |
416 |
op2 = parse_sum(); |
417 |
if( op2==null ) { |
418 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_ILIKE_operator(),lexer); |
419 |
} |
420 |
op1 = codeBuilder.ilike(op1, op2); |
421 |
break;
|
422 |
default:
|
423 |
return op1;
|
424 |
} |
425 |
} |
426 |
} |
427 |
|
428 |
public Code parse_sum() {
|
429 |
Code op1 = parse_factor(); |
430 |
Code op2; |
431 |
while( true ) { |
432 |
Token token = lexer.look(); |
433 |
switch( token.getType() ) {
|
434 |
case Token.OP_CONCAT:
|
435 |
lexer.next(); |
436 |
op2 = parse_factor(); |
437 |
op1 = codeBuilder.concat(op1, op2); |
438 |
break;
|
439 |
case Token.OP_ADD:
|
440 |
lexer.next(); |
441 |
op2 = parse_factor(); |
442 |
op1 = codeBuilder.add(op1, op2); |
443 |
break;
|
444 |
case Token.OP_SUBST:
|
445 |
lexer.next(); |
446 |
op2 = parse_factor(); |
447 |
op1 = codeBuilder.subst(op1, op2); |
448 |
break;
|
449 |
default:
|
450 |
return op1;
|
451 |
} |
452 |
} |
453 |
} |
454 |
|
455 |
public Code parse_factor() {
|
456 |
Code op1 = parse_getattr(); |
457 |
Code op2; |
458 |
while( true ) { |
459 |
Token token = lexer.look(); |
460 |
switch( token.getType() ) {
|
461 |
case Token.OP_MULT:
|
462 |
lexer.next(); |
463 |
op2 = parse_getattr(); |
464 |
if( op2==null ) { |
465 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_MULT_operator(),lexer); |
466 |
} |
467 |
op1 = codeBuilder.mult(op1, op2); |
468 |
break;
|
469 |
case Token.OP_DIV:
|
470 |
lexer.next(); |
471 |
op2 = parse_getattr(); |
472 |
if( op2==null ) { |
473 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_DIV_operator(),lexer); |
474 |
} |
475 |
op1 = codeBuilder.div(op1, op2); |
476 |
break;
|
477 |
case Token.OP_MOD:
|
478 |
lexer.next(); |
479 |
op2 = parse_getattr(); |
480 |
if( op2==null ) { |
481 |
throw new ExpressionSyntaxException(I18N.Cant_recognize_the_second_operand_of_MOD_operator(),lexer); |
482 |
} |
483 |
op1 = codeBuilder.mod(op1, op2); |
484 |
break;
|
485 |
case Token.OPEN_BRACKET:
|
486 |
lexer.next(); |
487 |
Code codeIndex = parse_expression(); |
488 |
if( codeIndex == null ) { |
489 |
throw new ExpressionSyntaxException(I18N.unexpected_end_of_source(),lexer); |
490 |
} |
491 |
token = lexer.look(); |
492 |
if( token.getType()!=Token.CLOSED_BRACKET) {
|
493 |
throw new ExpressionSyntaxException(I18N.A_XTokenX_was_expected_and_XliteralX_was_found("]", token.getLiteral()),lexer); |
494 |
} |
495 |
lexer.next(); |
496 |
Code code = codeBuilder.getitem(op1, codeIndex); |
497 |
return code;
|
498 |
default:
|
499 |
return op1;
|
500 |
} |
501 |
} |
502 |
} |
503 |
|
504 |
public Code parse_getattr() {
|
505 |
Code op1 = parse_termino(); |
506 |
if( !isObjectAccessSupported() ) {
|
507 |
return op1;
|
508 |
} |
509 |
while( true ) { |
510 |
Token next = lexer.look(); |
511 |
switch( next.getType() ) {
|
512 |
case Token.OP_GETATTR:
|
513 |
lexer.next(); |
514 |
next = lexer.look(); |
515 |
if( next.getType()!=Token.IDENTIFIER ) {
|
516 |
throw new ExpressionSyntaxException( |
517 |
I18N.An_attribute_identifier_was_expected_and_XliteralX_was_found(next.getLiteral()), |
518 |
lexer |
519 |
); |
520 |
} |
521 |
String id = (String) next.getLiteral(); |
522 |
lexer.next(); |
523 |
next = lexer.look(); |
524 |
if( next.getType() == Token.PARENTHESIS_OPEN ) {
|
525 |
lexer.next(); |
526 |
Codes args = parse_expressions(",");
|
527 |
next = lexer.next(); |
528 |
if( next.getType() != Token.PARENTHESIS_CLOSE ) {
|
529 |
throw new ExpressionSyntaxException( |
530 |
I18N.Closing_parenthesis_was_expected_and_XliteralX_was_found(next.getLiteral()), |
531 |
lexer |
532 |
); |
533 |
} |
534 |
op1 = codeBuilder.method(op1, id, args); |
535 |
} else {
|
536 |
op1 = codeBuilder.getattr(op1, id); |
537 |
} |
538 |
break;
|
539 |
default:
|
540 |
return op1;
|
541 |
} |
542 |
} |
543 |
} |
544 |
|
545 |
@SuppressWarnings("UnusedAssignment") |
546 |
public Code parse_termino() {
|
547 |
|
548 |
Token token = lexer.look(); |
549 |
switch( token.getType() ) {
|
550 |
case Token.PARENTHESIS_OPEN: {
|
551 |
lexer.next(); |
552 |
Code value = parse_expression(); |
553 |
Token next = lexer.next(); |
554 |
switch(next.getType()) {
|
555 |
case Token.PARENTHESIS_CLOSE:
|
556 |
break;
|
557 |
case Token.EOF:
|
558 |
throw new ExpressionSyntaxException( |
559 |
I18N.Closing_parenthesis_was_expected_and_end_of_source_was_found(), |
560 |
lexer |
561 |
); |
562 |
default:
|
563 |
throw new ExpressionSyntaxException( |
564 |
I18N.Closing_parenthesis_was_expected_and_XliteralX_was_found(next.getLiteral()), |
565 |
lexer |
566 |
); |
567 |
} |
568 |
return value;
|
569 |
} |
570 |
case Token.IDENTIFIER: {
|
571 |
Code code = parse_grammars(); |
572 |
if( code!=null ) { |
573 |
return code;
|
574 |
} |
575 |
if( this.grammars.isReservedWord(token.getLiteral()) ) { |
576 |
return null; |
577 |
} |
578 |
lexer.next(); |
579 |
String id = (String) token.getLiteral(); |
580 |
Token next = lexer.look(); |
581 |
if( next.getType() == Token.PARENTHESIS_OPEN ) {
|
582 |
next = lexer.next(); |
583 |
Codes args = parse_arguments(); |
584 |
next = lexer.next(); |
585 |
switch(next.getType()) {
|
586 |
case Token.PARENTHESIS_CLOSE:
|
587 |
break;
|
588 |
case Token.EOF:
|
589 |
throw new ExpressionSyntaxException( |
590 |
I18N.Closing_parenthesis_was_expected_and_end_of_source_was_found(), |
591 |
lexer |
592 |
); |
593 |
default:
|
594 |
throw new ExpressionSyntaxException( |
595 |
I18N.Closing_parenthesis_was_expected_and_XliteralX_was_found(next.getLiteral()), |
596 |
lexer |
597 |
); |
598 |
} |
599 |
// Optimizacion para cuando se esta invocando a la funcion dict
|
600 |
if( StringUtils.equalsIgnoreCase(id, FUNCTION_DICT) && args!=null && args.size()==1 ) { |
601 |
code = args.get(0);
|
602 |
if( code.code()==Code.CALLABLE &&
|
603 |
StringUtils.equalsIgnoreCase(((Callable)code).name(),FUNCTION_DICT) ) {
|
604 |
return code;
|
605 |
} |
606 |
} |
607 |
return codeBuilder.function(id, args);
|
608 |
} else {
|
609 |
if( StringUtils.equalsIgnoreCase(id, "TRUE") ) { |
610 |
return codeBuilder.constant(true); |
611 |
} |
612 |
if( StringUtils.equalsIgnoreCase(id, "FALSE") ) { |
613 |
return codeBuilder.constant(false); |
614 |
} |
615 |
return codeBuilder.identifier(id);
|
616 |
} |
617 |
} |
618 |
case Token.STRING_LITERAL:
|
619 |
lexer.next(); |
620 |
return codeBuilder.constant(token.getValue());
|
621 |
case Token.INTEGER_LITERAL:
|
622 |
lexer.next(); |
623 |
return codeBuilder.constant(token.getValue());
|
624 |
case Token.FLOATING_POINT_LITERAL:
|
625 |
lexer.next(); |
626 |
return codeBuilder.constant(token.getValue());
|
627 |
case Token.NULL:
|
628 |
lexer.next(); |
629 |
return codeBuilder.constant(null); |
630 |
case Token.TRUE:
|
631 |
lexer.next(); |
632 |
return codeBuilder.constant(true); |
633 |
case Token.FALSE:
|
634 |
lexer.next(); |
635 |
return codeBuilder.constant(false); |
636 |
case Token.OP_SUBST:
|
637 |
lexer.next(); |
638 |
Code code = parse_termino(); |
639 |
if( code.code()==Code.CONSTANT ) {
|
640 |
BaseConstant c = (BaseConstant)code; |
641 |
if( c.value() instanceof Number ) { |
642 |
c.value(NegOperator.negate((Number) c.value()));
|
643 |
return code;
|
644 |
} |
645 |
throw new ExpressionSyntaxException(I18N.A_numeric_constant_was_expected_after_the_unary_operator_minus(),lexer); |
646 |
} |
647 |
return codeBuilder.negate(code);
|
648 |
case Token.EOF:
|
649 |
throw new ExpressionSyntaxException(I18N.unexpected_end_of_source(),lexer); |
650 |
default:
|
651 |
return parse_grammars();
|
652 |
} |
653 |
} |
654 |
|
655 |
public Codes parse_expressions(String sep) { |
656 |
BaseCodes codes = null;
|
657 |
while( true ) { |
658 |
Code code = parse_expression(); |
659 |
if( code!=null ) { |
660 |
if( codes == null ) { |
661 |
codes = (BaseCodes) codeBuilder.args(); |
662 |
} |
663 |
codes.add(code); |
664 |
} |
665 |
Token next = lexer.look(); |
666 |
String literal = next.getLiteral();
|
667 |
if( literal == null ) { |
668 |
return codes;
|
669 |
} |
670 |
literal = literal.trim(); |
671 |
if( sep.equals(literal) ) {
|
672 |
lexer.next(); // Consume el ",".
|
673 |
} else {
|
674 |
return codes;
|
675 |
} |
676 |
} |
677 |
} |
678 |
|
679 |
private String getKeyArgument() { |
680 |
((AbstractLexicalAnalyzer)lexer).save_state(); |
681 |
Token next = lexer.look(); |
682 |
if( next.getType()==Token.IDENTIFIER ) {
|
683 |
String key = next.getLiteral();
|
684 |
lexer.next(); |
685 |
next = lexer.next(); |
686 |
if( next.is(":","=","=>") ) { |
687 |
((AbstractLexicalAnalyzer)lexer).drop_state(); |
688 |
return key;
|
689 |
} |
690 |
} |
691 |
((AbstractLexicalAnalyzer)lexer).restore_state(); |
692 |
return null; |
693 |
} |
694 |
|
695 |
public Codes parse_arguments() {
|
696 |
String sep = ","; |
697 |
BaseCodes codes = null;
|
698 |
Map<String,Code> kwargs = null; |
699 |
while( true ) { |
700 |
String key = getKeyArgument();
|
701 |
if( key == null ) { |
702 |
if( kwargs != null ) { |
703 |
throw new ExpressionSyntaxException(I18N.nonkeyword_arg_after_keyword_arg(),lexer); |
704 |
} |
705 |
Code code = parse_expression(); |
706 |
if( code!=null ) { |
707 |
if( codes == null ) { |
708 |
codes = (BaseCodes) codeBuilder.args(); |
709 |
} |
710 |
codes.add(code); |
711 |
} |
712 |
} else {
|
713 |
if( kwargs == null ) { |
714 |
kwargs = new HashMap<>(); |
715 |
} |
716 |
Code code = parse_expression(); |
717 |
kwargs.put(key, code); |
718 |
} |
719 |
Token next = lexer.look(); |
720 |
if( !next.is(sep) ) {
|
721 |
break;
|
722 |
} |
723 |
lexer.next(); // Consume el ",".
|
724 |
} |
725 |
if( kwargs!=null ) { |
726 |
if( codes == null ) { |
727 |
codes = (BaseCodes) codeBuilder.args(); |
728 |
} |
729 |
Code code = codeBuilder.dict(kwargs); |
730 |
codes.add(code); |
731 |
} |
732 |
return codes;
|
733 |
} |
734 |
|
735 |
private Code parse_grammars() {
|
736 |
StatementContext context = new DefaultStatementContext();
|
737 |
Code code; |
738 |
BaseCodes args = (BaseCodes) this.codeBuilder.args();
|
739 |
context.trace("compiler.parse_gramars");
|
740 |
Statement stmt = this.grammars.getApplicableStatement(context); |
741 |
while( stmt!=null ) { |
742 |
code = stmt.parse(context); |
743 |
args.add(code); |
744 |
stmt = this.grammars.getApplicableStatement(context);
|
745 |
} |
746 |
switch(args.size()) {
|
747 |
case 0 : |
748 |
code = null;
|
749 |
break;
|
750 |
case 1 : |
751 |
code = args.get(0);
|
752 |
break;
|
753 |
default:
|
754 |
code = this.codeBuilder.function(CodeBlockFunction.NAME, args);
|
755 |
break;
|
756 |
} |
757 |
return code;
|
758 |
} |
759 |
} |