You are viewing a plain text version of this content. The canonical link for it is here.
Posted to ojb-dev@db.apache.org by th...@apache.org on 2004/01/07 20:03:16 UTC
cvs commit: db-ojb/lib antlr_compiletime.jar antlr.jar antlr.debug.jar
thma 2004/01/07 11:03:16
Modified: src/java/org/apache/ojb/odmg/oql OQLLexerTokenTypes.java
OQLLexerTokenTypes.txt OQLLexer.java OQLParser.java
. .classpath
Removed: lib antlr_compiletime.jar antlr.jar antlr.debug.jar
Log:
migrate to ANTLR 2.7.2
Revision Changes Path
1.10 +1 -1 db-ojb/src/java/org/apache/ojb/odmg/oql/OQLLexerTokenTypes.java
Index: OQLLexerTokenTypes.java
===================================================================
RCS file: /home/cvs/db-ojb/src/java/org/apache/ojb/odmg/oql/OQLLexerTokenTypes.java,v
retrieving revision 1.9
retrieving revision 1.10
diff -u -r1.9 -r1.10
--- OQLLexerTokenTypes.java 7 Nov 2003 21:45:48 -0000 1.9
+++ OQLLexerTokenTypes.java 7 Jan 2004 19:03:16 -0000 1.10
@@ -1,4 +1,4 @@
-// $ANTLR 2.7.1: "oql-ojb.g" -> "OQLParser.java"$
+// $ANTLR 2.7.2: "oql-ojb.g" -> "OQLParser.java"$
/*
* This OQL grammar has been derived from a OQL sample grammar from the ODMG
1.4 +1 -1 db-ojb/src/java/org/apache/ojb/odmg/oql/OQLLexerTokenTypes.txt
Index: OQLLexerTokenTypes.txt
===================================================================
RCS file: /home/cvs/db-ojb/src/java/org/apache/ojb/odmg/oql/OQLLexerTokenTypes.txt,v
retrieving revision 1.3
retrieving revision 1.4
diff -u -r1.3 -r1.4
--- OQLLexerTokenTypes.txt 13 Oct 2003 16:46:29 -0000 1.3
+++ OQLLexerTokenTypes.txt 7 Jan 2004 19:03:16 -0000 1.4
@@ -1,4 +1,4 @@
-// $ANTLR 2.7.1: oql-ojb.g -> OQLLexerTokenTypes.txt$
+// $ANTLR 2.7.2: oql-ojb.g -> OQLLexerTokenTypes.txt$
OQLLexer // output token vocab name
TOK_RPAREN("right parenthesis")=4
TOK_LPAREN("left parenthesis")=5
1.14 +55 -26 db-ojb/src/java/org/apache/ojb/odmg/oql/OQLLexer.java
Index: OQLLexer.java
===================================================================
RCS file: /home/cvs/db-ojb/src/java/org/apache/ojb/odmg/oql/OQLLexer.java,v
retrieving revision 1.13
retrieving revision 1.14
diff -u -r1.13 -r1.14
--- OQLLexer.java 7 Nov 2003 21:45:48 -0000 1.13
+++ OQLLexer.java 7 Jan 2004 19:03:16 -0000 1.14
@@ -1,4 +1,4 @@
-// $ANTLR 2.7.1: "oql-ojb.g" -> "OQLLexer.java"$
+// $ANTLR 2.7.2: "oql-ojb.g" -> "OQLLexer.java"$
/*
* This OQL grammar has been derived from a OQL sample grammar from the ODMG
@@ -52,6 +52,8 @@
}
public OQLLexer(LexerSharedInputState state) {
super(state);
+ caseSensitiveLiterals = true;
+ setCaseSensitive(true);
literals = new Hashtable();
literals.put(new ANTLRHashString("nil", this), new Integer(52));
literals.put(new ANTLRHashString("group", this), new Integer(44));
@@ -79,8 +81,6 @@
literals.put(new ANTLRHashString("where", this), new Integer(41));
literals.put(new ANTLRHashString("exists", this), new Integer(46));
literals.put(new ANTLRHashString("is_defined", this), new Integer(57));
-caseSensitiveLiterals = true;
-setCaseSensitive(true);
}
public Token nextToken() throws TokenStreamException {
@@ -248,7 +248,7 @@
}
else {
if (LA(1)==EOF_CHAR) {uponEOF(); _returnToken = makeToken(Token.EOF_TYPE);}
- else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine());}
+ else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
}
if ( _returnToken==null ) continue tryAgain; // found SKIP token
@@ -615,7 +615,7 @@
}
default:
{
- throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine());
+ throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
}
}
}
@@ -660,7 +660,7 @@
}
default:
{
- throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine());
+ throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
}
}
}
@@ -736,7 +736,7 @@
}
default:
{
- throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine());
+ throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
}
}
}
@@ -748,7 +748,7 @@
matchRange('0','9');
}
else {
- if ( _cnt34>=1 ) { break _loop34; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine());}
+ if ( _cnt34>=1 ) { break _loop34; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
_cnt34++;
@@ -778,7 +778,7 @@
mTOK_UNSIGNED_INTEGER(false);
}
else {
- if ( _cnt37>=1 ) { break _loop37; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine());}
+ if ( _cnt37>=1 ) { break _loop37; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
_cnt37++;
@@ -808,7 +808,7 @@
mTOK_UNSIGNED_INTEGER(false);
}
else {
- if ( _cnt40>=1 ) { break _loop40; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine());}
+ if ( _cnt40>=1 ) { break _loop40; } else {throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());}
}
_cnt40++;
@@ -859,7 +859,7 @@
}
default:
{
- throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine());
+ throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
}
}
if ( _createToken && _token==null && _ttype!=Token.SKIP ) {
@@ -975,7 +975,7 @@
}
default:
{
- throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine());
+ throw new NoViableAltForCharException((char)LA(1), getFilename(), getLine(), getColumn());
}
}
}
@@ -1074,19 +1074,48 @@
}
- private static final long _tokenSet_0_data_[] = { 105621835743232L, 576460745995190270L, 0L, 0L, 0L };
- public static final BitSet _tokenSet_0 = new BitSet(_tokenSet_0_data_);
- private static final long _tokenSet_1_data_[] = { 288019269919178752L, 0L, 0L, 0L, 0L };
- public static final BitSet _tokenSet_1 = new BitSet(_tokenSet_1_data_);
- private static final long _tokenSet_2_data_[] = { 288054523010744320L, 576460745995190270L, 0L, 0L, 0L };
- public static final BitSet _tokenSet_2 = new BitSet(_tokenSet_2_data_);
- private static final long _tokenSet_3_data_[] = { -549755814920L, -1L, -1L, -1L, 0L, 0L, 0L, 0L };
- public static final BitSet _tokenSet_3 = new BitSet(_tokenSet_3_data_);
- private static final long _tokenSet_4_data_[] = { -17179870216L, -1L, -1L, -1L, 0L, 0L, 0L, 0L };
- public static final BitSet _tokenSet_4 = new BitSet(_tokenSet_4_data_);
- private static final long _tokenSet_5_data_[] = { -1032L, -1L, -1L, -1L, 0L, 0L, 0L, 0L };
- public static final BitSet _tokenSet_5 = new BitSet(_tokenSet_5_data_);
- private static final long _tokenSet_6_data_[] = { -4398046512136L, -1L, -1L, -1L, 0L, 0L, 0L, 0L };
- public static final BitSet _tokenSet_6 = new BitSet(_tokenSet_6_data_);
+ private static final long[] mk_tokenSet_0() {
+ long[] data = { 105621835743232L, 576460745995190270L, 0L, 0L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
+ private static final long[] mk_tokenSet_1() {
+ long[] data = { 288019269919178752L, 0L, 0L, 0L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
+ private static final long[] mk_tokenSet_2() {
+ long[] data = { 288054523010744320L, 576460745995190270L, 0L, 0L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());
+ private static final long[] mk_tokenSet_3() {
+ long[] data = new long[8];
+ data[0]=-549755814920L;
+ for (int i = 1; i<=3; i++) { data[i]=-1L; }
+ return data;
+ }
+ public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());
+ private static final long[] mk_tokenSet_4() {
+ long[] data = new long[8];
+ data[0]=-17179870216L;
+ for (int i = 1; i<=3; i++) { data[i]=-1L; }
+ return data;
+ }
+ public static final BitSet _tokenSet_4 = new BitSet(mk_tokenSet_4());
+ private static final long[] mk_tokenSet_5() {
+ long[] data = new long[8];
+ data[0]=-1032L;
+ for (int i = 1; i<=3; i++) { data[i]=-1L; }
+ return data;
+ }
+ public static final BitSet _tokenSet_5 = new BitSet(mk_tokenSet_5());
+ private static final long[] mk_tokenSet_6() {
+ long[] data = new long[8];
+ data[0]=-4398046512136L;
+ for (int i = 1; i<=3; i++) { data[i]=-1L; }
+ return data;
+ }
+ public static final BitSet _tokenSet_6 = new BitSet(mk_tokenSet_6());
}
1.22 +1275 -1427db-ojb/src/java/org/apache/ojb/odmg/oql/OQLParser.java
Index: OQLParser.java
===================================================================
RCS file: /home/cvs/db-ojb/src/java/org/apache/ojb/odmg/oql/OQLParser.java,v
retrieving revision 1.21
retrieving revision 1.22
diff -u -r1.21 -r1.22
--- OQLParser.java 27 Nov 2003 13:08:24 -0000 1.21
+++ OQLParser.java 7 Jan 2004 19:03:16 -0000 1.22
@@ -1,4 +1,4 @@
-// $ANTLR 2.7.1: "oql-ojb.g" -> "OQLParser.java"$
+// $ANTLR 2.7.2: "oql-ojb.g" -> "OQLParser.java"$
/*
* This OQL grammar has been derived from a OQL sample grammar from the ODMG
@@ -22,7 +22,6 @@
import org.apache.ojb.broker.query.*;
import org.apache.ojb.broker.metadata.*;
-
import java.util.*;
import antlr.TokenBuffer;
@@ -34,1434 +33,1283 @@
import antlr.ParserSharedInputState;
import antlr.collections.impl.BitSet;
-public class OQLParser extends antlr.LLkParser
- implements OQLLexerTokenTypes
-{
-
- protected OQLParser(TokenBuffer tokenBuf, int k)
- {
- super(tokenBuf, k);
- tokenNames = _tokenNames;
- }
-
- public OQLParser(TokenBuffer tokenBuf)
- {
- this(tokenBuf, 3);
- }
-
- protected OQLParser(TokenStream lexer, int k)
- {
- super(lexer, k);
- tokenNames = _tokenNames;
- }
-
- public OQLParser(TokenStream lexer)
- {
- this(lexer, 3);
- }
-
- public OQLParser(ParserSharedInputState state)
- {
- super(state, 3);
- tokenNames = _tokenNames;
- }
-
- public final Query buildQuery() throws RecognitionException, TokenStreamException
- {
- Query query = null;
-
-
- try
- { // for error handling
- query = selectQuery();
- {
- if ((LA(1) == TOK_SEMIC))
- {
- match(TOK_SEMIC);
- }
- else if ((LA(1) == EOF))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_0);
- }
- return query;
- }
-
- public final QueryByCriteria selectQuery() throws RecognitionException, TokenStreamException
- {
- QueryByCriteria query = null;
-
-
- try
- { // for error handling
-
- Class clazz = null;
- Criteria criteria = new Criteria();
- String[] projectionAttrs;
- boolean distinct = false;
-
- match(LITERAL_select);
- {
- if ((LA(1) == LITERAL_distinct))
- {
- match(LITERAL_distinct);
-
- distinct = true;
-
- }
- else if ((LA(1) == TOK_STAR || LA(1) == Identifier))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- projectionAttrs = projectionAttributes();
- match(LITERAL_from);
- clazz = fromClause();
- {
- if ((LA(1) == LITERAL_where))
- {
- match(LITERAL_where);
- whereClause(criteria);
- }
- else if ((_tokenSet_1.member(LA(1))))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
-
- if (clazz != null)
- {
- if (projectionAttrs[0].indexOf('.') < 0)
- {
- query = QueryFactory.newQuery(clazz, criteria, distinct);
- }
- else
- {
- ClassDescriptor cld = MetadataManager.getInstance().getRepository().getDescriptorFor(clazz);
- for (int i = 0; i < projectionAttrs.length; i++)
- {
- projectionAttrs[i] = projectionAttrs[i].substring(projectionAttrs[i].indexOf('.') + 1);
- }
-
- ArrayList descs = cld.getAttributeDescriptorsForPath(projectionAttrs[0]);
- int pathLen = descs.size();
-
- if ((pathLen > 0) && (descs.get(pathLen - 1) instanceof ObjectReferenceDescriptor))
- {
- ObjectReferenceDescriptor ord =
- ((ObjectReferenceDescriptor) descs.get(pathLen - 1));
- query = QueryFactory.newQuery(clazz, criteria, distinct);
- query.setObjectProjectionAttribute(projectionAttrs[0],
- ord.getItemClass());
- }
- else
- {
- query = QueryFactory.newReportQuery(clazz, projectionAttrs, criteria, distinct);
- }
- }
- }
-
- {
- if ((LA(1) == LITERAL_order))
- {
- match(LITERAL_order);
- match(LITERAL_by);
- orderClause(query);
- }
- else if ((_tokenSet_2.member(LA(1))))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- {
- if ((LA(1) == LITERAL_group))
- {
- match(LITERAL_group);
- match(LITERAL_by);
- groupClause(query);
- }
- else if ((_tokenSet_3.member(LA(1))))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- {
- if ((LA(1) == LITERAL_prefetch))
- {
- match(LITERAL_prefetch);
- prefetchClause(query);
- }
- else if ((LA(1) == EOF || LA(1) == TOK_RPAREN || LA(1) == TOK_SEMIC))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_4);
- }
- return query;
- }
-
- public final String[] projectionAttributes() throws RecognitionException, TokenStreamException
- {
- String[] projectionAttrs = null;
-
- Token id = null;
- Token id1 = null;
-
- try
- { // for error handling
-
- String first = null;
- ArrayList list = null;
-
- {
- if ((LA(1) == Identifier))
- {
- id = LT(1);
- match(Identifier);
- {
-
- first = id.getText();
-
- }
- {
- _loop80:
- do
- {
- if ((LA(1) == TOK_COMMA))
- {
- match(TOK_COMMA);
- id1 = LT(1);
- match(Identifier);
- {
-
- if (list == null)
- {
- list = new ArrayList();
- list.add(first);
- }
- list.add(id1.getText());
-
- }
- }
- else
- {
- break _loop80;
- }
-
- }
- while (true);
- }
- }
- else if ((LA(1) == TOK_STAR))
- {
- match(TOK_STAR);
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
-
- if (list == null)
- {
- projectionAttrs = new String[]{first};
- }
- else
- {
- projectionAttrs = (String[]) list.toArray(new String[list.size()]);
- }
-
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_5);
- }
- return projectionAttrs;
- }
-
- public final Class fromClause() throws RecognitionException, TokenStreamException
- {
- Class clazz = null;
-
- Token id = null;
-
- try
- { // for error handling
- id = LT(1);
- match(Identifier);
-
- try
- {
- clazz = Class.forName(id.getText(), true,
- Thread.currentThread().getContextClassLoader());
- }
- catch (Exception e)
- {
- }
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_6);
- }
- return clazz;
- }
-
- public final void whereClause(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
-
- try
- { // for error handling
- orExpr(criteria);
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final void orderClause(
- QueryByCriteria query
- ) throws RecognitionException, TokenStreamException
- {
-
-
- try
- { // for error handling
- sortCriterion(query);
- {
- _loop83:
- do
- {
- if ((LA(1) == TOK_COMMA))
- {
- match(TOK_COMMA);
- sortCriterion(query);
- }
- else
- {
- break _loop83;
- }
-
- }
- while (true);
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_2);
- }
- }
-
- public final void groupClause(
- QueryByCriteria query
- ) throws RecognitionException, TokenStreamException
- {
-
-
- try
- { // for error handling
- groupCriterion(query);
- {
- _loop89:
- do
- {
- if ((LA(1) == TOK_COMMA))
- {
- match(TOK_COMMA);
- groupCriterion(query);
- }
- else
- {
- break _loop89;
- }
-
- }
- while (true);
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_3);
- }
- }
-
- public final void prefetchClause(
- QueryByCriteria query
- ) throws RecognitionException, TokenStreamException
- {
-
-
- try
- { // for error handling
- prefetchCriterion(query);
- {
- _loop93:
- do
- {
- if ((LA(1) == TOK_COMMA))
- {
- match(TOK_COMMA);
- prefetchCriterion(query);
- }
- else
- {
- break _loop93;
- }
-
- }
- while (true);
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_4);
- }
- }
-
- public final Query existsQuery() throws RecognitionException, TokenStreamException
- {
- Query query = null;
-
-
- try
- { // for error handling
-
- Class clazz = null;
- Criteria criteria = new Criteria();
-
- match(LITERAL_exists);
- projectionAttributes();
- match(LITERAL_in);
- clazz = fromClause();
- {
- if ((LA(1) == TOK_COLON))
- {
- match(TOK_COLON);
- whereClause(criteria);
- }
- else if ((_tokenSet_7.member(LA(1))))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
-
- if (clazz != null)
- {
- query = QueryFactory.newQuery(clazz, criteria);
- }
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- return query;
- }
-
- public final void orExpr(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
-
- try
- { // for error handling
- andExpr(criteria);
- {
- _loop97:
- do
- {
- if ((LA(1) == LITERAL_or) && (_tokenSet_8.member(LA(2))) && (_tokenSet_9.member(LA(3))))
- {
- match(LITERAL_or);
- Criteria orCriteria = new Criteria();
- andExpr(orCriteria);
- criteria.addOrCriteria(orCriteria);
- }
- else
- {
- break _loop97;
- }
-
- }
- while (true);
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final void sortCriterion(
- QueryByCriteria query
- ) throws RecognitionException, TokenStreamException
- {
-
- Token id = null;
-
- try
- { // for error handling
- boolean descending = false;
- id = LT(1);
- match(Identifier);
- {
- if ((LA(1) == LITERAL_asc || LA(1) == LITERAL_desc))
- {
- {
- if ((LA(1) == LITERAL_asc))
- {
- match(LITERAL_asc);
- descending = false;
- }
- else if ((LA(1) == LITERAL_desc))
- {
- match(LITERAL_desc);
- descending = true;
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- }
- else if ((_tokenSet_10.member(LA(1))))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
-
- if (descending)
- {
- query.addOrderByDescending(id.getText());
- }
- else
- {
- query.addOrderByAscending(id.getText());
- }
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_10);
- }
- }
-
- public final void groupCriterion(
- QueryByCriteria query
- ) throws RecognitionException, TokenStreamException
- {
-
- Token id = null;
+public class OQLParser extends antlr.LLkParser implements OQLLexerTokenTypes
+ {
- try
- { // for error handling
- id = LT(1);
- match(Identifier);
-
- query.addGroupBy(id.getText());
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_11);
- }
- }
-
- public final void prefetchCriterion(
- QueryByCriteria query
- ) throws RecognitionException, TokenStreamException
- {
-
- Token id = null;
-
- try
- { // for error handling
- id = LT(1);
- match(Identifier);
-
- query.addPrefetchedRelationship(id.getText());
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_12);
- }
- }
-
- public final void andExpr(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
-
- try
- { // for error handling
- quantifierExpr(criteria);
- {
- _loop100:
- do
- {
- if ((LA(1) == LITERAL_and) && (_tokenSet_8.member(LA(2))) && (_tokenSet_9.member(LA(3))))
- {
- match(LITERAL_and);
- Criteria andCriteria = new Criteria();
- quantifierExpr(andCriteria);
- criteria.addAndCriteria(andCriteria);
- }
- else
- {
- break _loop100;
- }
-
- }
- while (true);
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final void quantifierExpr(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
-
- try
- { // for error handling
- switch (LA(1))
- {
- case TOK_LPAREN:
- {
- match(TOK_LPAREN);
- orExpr(criteria);
- match(TOK_RPAREN);
- break;
- }
- case LITERAL_is_undefined:
- case LITERAL_is_defined:
- {
- undefinedExpr(criteria);
- break;
- }
- case LITERAL_exists:
- case LITERAL_not:
- {
- existsExpr(criteria);
- break;
- }
- default:
- if ((LA(1) == Identifier) && (_tokenSet_13.member(LA(2))))
- {
- equalityExpr(criteria);
- }
- else if ((LA(1) == Identifier) && (LA(2) == LITERAL_not || LA(2) == LITERAL_like) && (_tokenSet_14.member(LA(3))))
- {
- likeExpr(criteria);
- }
- else if ((LA(1) == Identifier) && (LA(2) == LITERAL_not || LA(2) == LITERAL_between) && (_tokenSet_15.member(LA(3))))
- {
- betweenExpr(criteria);
- }
- else if ((LA(1) == Identifier) && (LA(2) == LITERAL_in || LA(2) == LITERAL_not) && (LA(3) == TOK_LPAREN || LA(3) == LITERAL_in || LA(3) == LITERAL_list))
- {
- inExpr(criteria);
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final void equalityExpr(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
- Token id = null;
-
- try
- { // for error handling
-
- Object value = null;
-
- id = LT(1);
- match(Identifier);
- {
- {
- switch (LA(1))
- {
- case TOK_EQ:
- {
- match(TOK_EQ);
- {
- if ((LA(1) == LITERAL_nil))
- {
- match(LITERAL_nil);
- criteria.addIsNull(id.getText());
- }
- else if ((_tokenSet_16.member(LA(1))))
- {
- value = literal();
- criteria.addEqualTo(id.getText(), value);
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- break;
- }
- case TOK_NE:
- {
- match(TOK_NE);
- {
- if ((LA(1) == LITERAL_nil))
- {
- match(LITERAL_nil);
- criteria.addNotNull(id.getText());
- }
- else if ((_tokenSet_16.member(LA(1))))
- {
- value = literal();
- criteria.addNotEqualTo(id.getText(), value);
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- break;
- }
- case TOK_NE2:
- {
- match(TOK_NE2);
- {
- if ((LA(1) == LITERAL_nil))
- {
- match(LITERAL_nil);
- criteria.addNotNull(id.getText());
- }
- else if ((_tokenSet_16.member(LA(1))))
- {
- value = literal();
- criteria.addNotEqualTo(id.getText(), value);
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- break;
- }
- case TOK_LT:
- {
- match(TOK_LT);
- value = literal();
- criteria.addLessThan(id.getText(), value);
- break;
- }
- case TOK_GT:
- {
- match(TOK_GT);
- value = literal();
- criteria.addGreaterThan(id.getText(), value);
- break;
- }
- case TOK_LE:
- {
- match(TOK_LE);
- value = literal();
- criteria.addLessOrEqualThan(id.getText(), value);
- break;
- }
- case TOK_GE:
- {
- match(TOK_GE);
- value = literal();
- criteria.addGreaterOrEqualThan(id.getText(), value);
- break;
- }
- default:
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
- }
- }
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final void likeExpr(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
- Token id = null;
-
- try
- { // for error handling
-
- boolean negative = false;
- Object value = null;
-
- id = LT(1);
- match(Identifier);
- {
- if ((LA(1) == LITERAL_not))
- {
- match(LITERAL_not);
- negative = true;
- }
- else if ((LA(1) == LITERAL_like))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- match(LITERAL_like);
- value = literal();
-
- if (negative)
- {
- criteria.addNotLike(id.getText(), value);
- }
- else
- {
- criteria.addLike(id.getText(), value);
- }
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final void undefinedExpr(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
- Token id = null;
-
- try
- { // for error handling
- boolean negative = false;
- {
- if ((LA(1) == LITERAL_is_undefined))
- {
- match(LITERAL_is_undefined);
- negative = false;
- }
- else if ((LA(1) == LITERAL_is_defined))
- {
- match(LITERAL_is_defined);
- negative = true;
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- match(TOK_LPAREN);
- id = LT(1);
- match(Identifier);
- match(TOK_RPAREN);
-
- if (negative)
- {
- criteria.addNotNull(id.getText());
- }
- else
- {
- criteria.addIsNull(id.getText());
- }
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final void betweenExpr(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
- Token id = null;
-
- try
- { // for error handling
-
- boolean negative = false;
- Object lower = null;
- Object upper = null;
-
- id = LT(1);
- match(Identifier);
- {
- if ((LA(1) == LITERAL_not))
- {
- match(LITERAL_not);
- negative = true;
- }
- else if ((LA(1) == LITERAL_between))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- match(LITERAL_between);
- lower = literal();
- match(LITERAL_and);
- upper = literal();
-
- if (negative)
- {
- criteria.addNotBetween(id.getText(), lower, upper);
- }
- else
- {
- criteria.addBetween(id.getText(), lower, upper);
- }
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final void inExpr(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
- Token id = null;
-
- try
- { // for error handling
-
- boolean negative = false;
- Collection coll;
-
- id = LT(1);
- match(Identifier);
- {
- if ((LA(1) == LITERAL_not))
- {
- match(LITERAL_not);
- negative = true;
- }
- else if ((LA(1) == LITERAL_in))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- match(LITERAL_in);
- {
- if ((LA(1) == LITERAL_list))
- {
- match(LITERAL_list);
- }
- else if ((LA(1) == TOK_LPAREN))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- coll = argList();
-
- if (negative)
- {
- criteria.addNotIn(id.getText(), coll);
- }
- else
- {
- criteria.addIn(id.getText(), coll);
- }
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final void existsExpr(
- Criteria criteria
- ) throws RecognitionException, TokenStreamException
- {
-
-
- try
- { // for error handling
-
- Query subQuery = null;
- boolean negative = false;
-
- {
- if ((LA(1) == LITERAL_not))
- {
- match(LITERAL_not);
- negative = true;
- }
- else if ((LA(1) == LITERAL_exists))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- {
- if ((LA(1) == LITERAL_exists) && (LA(2) == TOK_LPAREN))
- {
- match(LITERAL_exists);
- match(TOK_LPAREN);
- subQuery = selectQuery();
- match(TOK_RPAREN);
- }
- else if ((LA(1) == LITERAL_exists) && (LA(2) == TOK_STAR || LA(2) == Identifier))
- {
- subQuery = existsQuery();
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
-
- if (negative)
- {
- criteria.addNotExists(subQuery);
- }
- else
- {
- criteria.addExists(subQuery);
- }
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- }
-
- public final Object literal() throws RecognitionException, TokenStreamException
- {
- Object value = null;
-
- Token tokInt = null;
- Token tokADbl = null;
- Token tokEDbl = null;
- Token tokChar = null;
- Token tokStr = null;
- Token tokDate = null;
- Token tokTime = null;
- Token tokTs = null;
-
- try
- { // for error handling
- switch (LA(1))
- {
- case TOK_DOLLAR:
- {
- match(TOK_DOLLAR);
- match(TOK_UNSIGNED_INTEGER);
- value = null;
- break;
- }
- case LITERAL_true:
- {
- match(LITERAL_true);
- value = Boolean.TRUE;
- break;
- }
- case LITERAL_false:
- {
- match(LITERAL_false);
- value = Boolean.FALSE;
- break;
- }
- case TOK_UNSIGNED_INTEGER:
- {
- tokInt = LT(1);
- match(TOK_UNSIGNED_INTEGER);
-
- try
- {
- value = Integer.valueOf(tokInt.getText());
- }
- catch (NumberFormatException ignored)
- {
- value = Long.valueOf(tokInt.getText());
- }
-
- break;
- }
- case TOK_APPROXIMATE_NUMERIC_LITERAL:
- {
- tokADbl = LT(1);
- match(TOK_APPROXIMATE_NUMERIC_LITERAL);
- value = Double.valueOf(tokADbl.getText());
- break;
- }
- case TOK_EXACT_NUMERIC_LITERAL:
- {
- tokEDbl = LT(1);
- match(TOK_EXACT_NUMERIC_LITERAL);
- value = Double.valueOf(tokEDbl.getText());
- break;
- }
- case CharLiteral:
- {
- tokChar = LT(1);
- match(CharLiteral);
- value = new Character(tokChar.getText().charAt(0));
- break;
- }
- case StringLiteral:
- {
- tokStr = LT(1);
- match(StringLiteral);
- value = tokStr.getText();
- break;
- }
- case LITERAL_date:
- {
- match(LITERAL_date);
- tokDate = LT(1);
- match(StringLiteral);
- value = java.sql.Date.valueOf(tokDate.getText());
- break;
- }
- case LITERAL_time:
- {
- match(LITERAL_time);
- tokTime = LT(1);
- match(StringLiteral);
- value = java.sql.Time.valueOf(tokTime.getText());
- break;
- }
- case LITERAL_timestamp:
- {
- match(LITERAL_timestamp);
- tokTs = LT(1);
- match(StringLiteral);
- value = java.sql.Timestamp.valueOf(tokTs.getText());
- break;
- }
- default:
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
- }
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_17);
- }
- return value;
- }
-
- public final Collection argList() throws RecognitionException, TokenStreamException
- {
- Collection coll = null;
-
-
- try
- { // for error handling
-
- Collection temp = new Vector();
- Object val;
-
- match(TOK_LPAREN);
- {
- if ((_tokenSet_16.member(LA(1))))
- {
- val = literal();
- if (val != null)
- {
- temp.add(val);
- }
- {
- _loop124:
- do
- {
- if ((LA(1) == TOK_COMMA))
- {
- match(TOK_COMMA);
- val = literal();
- if (val != null)
- {
- temp.add(val);
- }
- }
- else
- {
- break _loop124;
- }
-
- }
- while (true);
- }
- }
- else if ((LA(1) == TOK_RPAREN))
- {
- }
- else
- {
- throw new NoViableAltException(LT(1), getFilename());
- }
-
- }
- match(TOK_RPAREN);
-
- if (!temp.isEmpty())
- {
- coll = temp;
- }
-
- }
- catch (RecognitionException ex)
- {
- reportError(ex);
- consume();
- consumeUntil(_tokenSet_7);
- }
- return coll;
- }
+protected OQLParser(TokenBuffer tokenBuf, int k) {
+ super(tokenBuf,k);
+ tokenNames = _tokenNames;
+}
+public OQLParser(TokenBuffer tokenBuf) {
+ this(tokenBuf,3);
+}
- public static final String[] _tokenNames = {
- "<0>",
- "EOF",
- "<2>",
- "NULL_TREE_LOOKAHEAD",
- "right parenthesis",
- "left parenthesis",
- "comma",
- "semicolon",
- "TOK_COLON",
- "dot",
- "dot",
- "operator",
- "comparison operator",
- "operator",
- "operator",
- "operator",
- "operator",
- "comparison operator",
- "comparison operator",
- "comparison operator",
- "comparison operator",
- "comparison operator",
- "comparison operator",
- "left bracket",
- "right bracket",
- "TOK_DOLLAR",
- "NameFirstCharacter",
- "NameCharacter",
- "Identifier",
- "TOK_UNSIGNED_INTEGER",
- "TOK_APPROXIMATE_NUMERIC_LITERAL",
- "numeric value",
- "character string",
- "character string",
- "WhiteSpace",
- "NewLine",
- "CommentLine",
- "MultiLineComment",
- "\"select\"",
- "\"distinct\"",
- "\"from\"",
- "\"where\"",
- "\"order\"",
- "\"by\"",
- "\"group\"",
- "\"prefetch\"",
- "\"exists\"",
- "\"in\"",
- "\"asc\"",
- "\"desc\"",
- "\"or\"",
- "\"and\"",
- "\"nil\"",
- "\"not\"",
- "\"list\"",
- "\"between\"",
- "\"is_undefined\"",
- "\"is_defined\"",
- "\"like\"",
- "\"true\"",
- "\"false\"",
- "\"date\"",
- "\"time\"",
- "\"timestamp\""
- };
+protected OQLParser(TokenStream lexer, int k) {
+ super(lexer,k);
+ tokenNames = _tokenNames;
+}
- private static final long _tokenSet_0_data_[] = {2L, 0L};
- public static final BitSet _tokenSet_0 = new BitSet(_tokenSet_0_data_);
- private static final long _tokenSet_1_data_[] = {57174604644498L, 0L};
- public static final BitSet _tokenSet_1 = new BitSet(_tokenSet_1_data_);
- private static final long _tokenSet_2_data_[] = {52776558133394L, 0L};
- public static final BitSet _tokenSet_2 = new BitSet(_tokenSet_2_data_);
- private static final long _tokenSet_3_data_[] = {35184372088978L, 0L};
- public static final BitSet _tokenSet_3 = new BitSet(_tokenSet_3_data_);
- private static final long _tokenSet_4_data_[] = {146L, 0L};
- public static final BitSet _tokenSet_4 = new BitSet(_tokenSet_4_data_);
- private static final long _tokenSet_5_data_[] = {141836999983104L, 0L};
- public static final BitSet _tokenSet_5 = new BitSet(_tokenSet_5_data_);
- private static final long _tokenSet_6_data_[] = {3437073348428178L, 0L};
- public static final BitSet _tokenSet_6 = new BitSet(_tokenSet_6_data_);
- private static final long _tokenSet_7_data_[] = {3434874325172370L, 0L};
- public static final BitSet _tokenSet_7 = new BitSet(_tokenSet_7_data_);
- private static final long _tokenSet_8_data_[] = {225250350381137952L, 0L};
- public static final BitSet _tokenSet_8 = new BitSet(_tokenSet_8_data_);
- private static final long _tokenSet_9_data_[] = {549650261048496160L, 0L};
- public static final BitSet _tokenSet_9 = new BitSet(_tokenSet_9_data_);
- private static final long _tokenSet_10_data_[] = {52776558133458L, 0L};
- public static final BitSet _tokenSet_10 = new BitSet(_tokenSet_10_data_);
- private static final long _tokenSet_11_data_[] = {35184372089042L, 0L};
- public static final BitSet _tokenSet_11 = new BitSet(_tokenSet_11_data_);
- private static final long _tokenSet_12_data_[] = {210L, 0L};
- public static final BitSet _tokenSet_12 = new BitSet(_tokenSet_12_data_);
- private static final long _tokenSet_13_data_[] = {8261632L, 0L};
- public static final BitSet _tokenSet_13 = new BitSet(_tokenSet_13_data_);
- private static final long _tokenSet_14_data_[] = {-288230359475159040L, 0L};
- public static final BitSet _tokenSet_14 = new BitSet(_tokenSet_14_data_);
- private static final long _tokenSet_15_data_[] = {-540431938607906816L, 0L};
- public static final BitSet _tokenSet_15 = new BitSet(_tokenSet_15_data_);
- private static final long _tokenSet_16_data_[] = {-576460735626870784L, 0L};
- public static final BitSet _tokenSet_16 = new BitSet(_tokenSet_16_data_);
- private static final long _tokenSet_17_data_[] = {3434874325172434L, 0L};
- public static final BitSet _tokenSet_17 = new BitSet(_tokenSet_17_data_);
+public OQLParser(TokenStream lexer) {
+ this(lexer,3);
+}
+public OQLParser(ParserSharedInputState state) {
+ super(state,3);
+ tokenNames = _tokenNames;
}
+
+ public final Query buildQuery() throws RecognitionException, TokenStreamException {
+ Query query = null;
+
+
+ try { // for error handling
+ query=selectQuery();
+ {
+ if ((LA(1)==TOK_SEMIC)) {
+ match(TOK_SEMIC);
+ }
+ else if ((LA(1)==EOF)) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_0);
+ }
+ return query;
+ }
+
+ public final QueryByCriteria selectQuery() throws RecognitionException, TokenStreamException {
+ QueryByCriteria query = null;
+
+
+ try { // for error handling
+
+ Class clazz = null;
+ Criteria criteria = new Criteria();
+ String[] projectionAttrs;
+ boolean distinct = false;
+
+ match(LITERAL_select);
+ {
+ if ((LA(1)==LITERAL_distinct)) {
+ match(LITERAL_distinct);
+
+ distinct = true;
+
+ }
+ else if ((LA(1)==TOK_STAR||LA(1)==Identifier)) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ projectionAttrs=projectionAttributes();
+ match(LITERAL_from);
+ clazz=fromClause();
+ {
+ if ((LA(1)==LITERAL_where)) {
+ match(LITERAL_where);
+ whereClause(criteria);
+ }
+ else if ((_tokenSet_1.member(LA(1)))) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+
+ if (clazz != null)
+ {
+ if (projectionAttrs[0].indexOf('.') < 0)
+ {
+ query = QueryFactory.newQuery(clazz, criteria, distinct);
+ }
+ else
+ {
+ ClassDescriptor cld = MetadataManager.getInstance().getRepository().getDescriptorFor(clazz);
+ for (int i = 0; i < projectionAttrs.length; i++)
+ {
+ projectionAttrs[i] = projectionAttrs[i].substring(projectionAttrs[i].indexOf('.') + 1);
+ }
+
+ ArrayList descs = cld.getAttributeDescriptorsForPath(projectionAttrs[0]);
+ int pathLen = descs.size();
+
+ if ((pathLen > 0) && (descs.get(pathLen - 1) instanceof ObjectReferenceDescriptor))
+ {
+ ObjectReferenceDescriptor ord =
+ ((ObjectReferenceDescriptor) descs.get(pathLen - 1));
+ query = QueryFactory.newQuery(clazz, criteria, distinct);
+ query.setObjectProjectionAttribute(projectionAttrs[0],
+ ord.getItemClass());
+ }
+ else
+ {
+ query = QueryFactory.newReportQuery(clazz, projectionAttrs, criteria, distinct);
+ }
+ }
+ }
+
+ {
+ if ((LA(1)==LITERAL_order)) {
+ match(LITERAL_order);
+ match(LITERAL_by);
+ orderClause(query);
+ }
+ else if ((_tokenSet_2.member(LA(1)))) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ {
+ if ((LA(1)==LITERAL_group)) {
+ match(LITERAL_group);
+ match(LITERAL_by);
+ groupClause(query);
+ }
+ else if ((_tokenSet_3.member(LA(1)))) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ {
+ if ((LA(1)==LITERAL_prefetch)) {
+ match(LITERAL_prefetch);
+ prefetchClause(query);
+ }
+ else if ((LA(1)==EOF||LA(1)==TOK_RPAREN||LA(1)==TOK_SEMIC)) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_4);
+ }
+ return query;
+ }
+
+ public final String[] projectionAttributes() throws RecognitionException, TokenStreamException {
+ String[] projectionAttrs = null;
+
+ Token id = null;
+ Token id1 = null;
+
+ try { // for error handling
+
+ String first = null;
+ ArrayList list = null;
+
+ {
+ if ((LA(1)==Identifier)) {
+ id = LT(1);
+ match(Identifier);
+ {
+
+ first = id.getText();
+
+ }
+ {
+ _loop80:
+ do {
+ if ((LA(1)==TOK_COMMA)) {
+ match(TOK_COMMA);
+ id1 = LT(1);
+ match(Identifier);
+ {
+
+ if (list == null)
+ {
+ list = new ArrayList();
+ list.add(first);
+ }
+ list.add(id1.getText());
+
+ }
+ }
+ else {
+ break _loop80;
+ }
+
+ } while (true);
+ }
+ }
+ else if ((LA(1)==TOK_STAR)) {
+ match(TOK_STAR);
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+
+ if (list == null)
+ {
+ projectionAttrs = new String[] {first};
+ }
+ else
+ {
+ projectionAttrs = (String[]) list.toArray(new String[list.size()]);
+ }
+
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_5);
+ }
+ return projectionAttrs;
+ }
+
+ public final Class fromClause() throws RecognitionException, TokenStreamException {
+ Class clazz = null;
+
+ Token id = null;
+
+ try { // for error handling
+ id = LT(1);
+ match(Identifier);
+
+ try {
+ clazz = Class.forName(id.getText(), true,
+ Thread.currentThread().getContextClassLoader());
+ } catch (Exception e) {
+ }
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_6);
+ }
+ return clazz;
+ }
+
+ public final void whereClause(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+
+ try { // for error handling
+ orExpr(criteria);
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final void orderClause(
+ QueryByCriteria query
+ ) throws RecognitionException, TokenStreamException {
+
+
+ try { // for error handling
+ sortCriterion(query);
+ {
+ _loop83:
+ do {
+ if ((LA(1)==TOK_COMMA)) {
+ match(TOK_COMMA);
+ sortCriterion(query);
+ }
+ else {
+ break _loop83;
+ }
+
+ } while (true);
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_2);
+ }
+ }
+
+ public final void groupClause(
+ QueryByCriteria query
+ ) throws RecognitionException, TokenStreamException {
+
+
+ try { // for error handling
+ groupCriterion(query);
+ {
+ _loop89:
+ do {
+ if ((LA(1)==TOK_COMMA)) {
+ match(TOK_COMMA);
+ groupCriterion(query);
+ }
+ else {
+ break _loop89;
+ }
+
+ } while (true);
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_3);
+ }
+ }
+
+ public final void prefetchClause(
+ QueryByCriteria query
+ ) throws RecognitionException, TokenStreamException {
+
+
+ try { // for error handling
+ prefetchCriterion(query);
+ {
+ _loop93:
+ do {
+ if ((LA(1)==TOK_COMMA)) {
+ match(TOK_COMMA);
+ prefetchCriterion(query);
+ }
+ else {
+ break _loop93;
+ }
+
+ } while (true);
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_4);
+ }
+ }
+
+ public final Query existsQuery() throws RecognitionException, TokenStreamException {
+ Query query = null;
+
+
+ try { // for error handling
+
+ Class clazz = null;
+ Criteria criteria = new Criteria();
+
+ match(LITERAL_exists);
+ projectionAttributes();
+ match(LITERAL_in);
+ clazz=fromClause();
+ {
+ if ((LA(1)==TOK_COLON)) {
+ match(TOK_COLON);
+ whereClause(criteria);
+ }
+ else if ((_tokenSet_7.member(LA(1)))) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+
+ if (clazz != null) {
+ query = QueryFactory.newQuery(clazz, criteria);
+ }
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ return query;
+ }
+
+ public final void orExpr(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+
+ try { // for error handling
+ andExpr(criteria);
+ {
+ _loop97:
+ do {
+ if ((LA(1)==LITERAL_or) && (_tokenSet_8.member(LA(2))) && (_tokenSet_9.member(LA(3)))) {
+ match(LITERAL_or);
+ Criteria orCriteria = new Criteria();
+ andExpr(orCriteria);
+ criteria.addOrCriteria(orCriteria);
+ }
+ else {
+ break _loop97;
+ }
+
+ } while (true);
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final void sortCriterion(
+ QueryByCriteria query
+ ) throws RecognitionException, TokenStreamException {
+
+ Token id = null;
+
+ try { // for error handling
+ boolean descending = false;
+ id = LT(1);
+ match(Identifier);
+ {
+ if ((LA(1)==LITERAL_asc||LA(1)==LITERAL_desc)) {
+ {
+ if ((LA(1)==LITERAL_asc)) {
+ match(LITERAL_asc);
+ descending = false;
+ }
+ else if ((LA(1)==LITERAL_desc)) {
+ match(LITERAL_desc);
+ descending = true;
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ }
+ else if ((_tokenSet_10.member(LA(1)))) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+
+ if (descending) {
+ query.addOrderByDescending(id.getText());
+ } else {
+ query.addOrderByAscending(id.getText());
+ }
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_10);
+ }
+ }
+
+ public final void groupCriterion(
+ QueryByCriteria query
+ ) throws RecognitionException, TokenStreamException {
+
+ Token id = null;
+
+ try { // for error handling
+ id = LT(1);
+ match(Identifier);
+
+ query.addGroupBy(id.getText());
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_11);
+ }
+ }
+
+ public final void prefetchCriterion(
+ QueryByCriteria query
+ ) throws RecognitionException, TokenStreamException {
+
+ Token id = null;
+
+ try { // for error handling
+ id = LT(1);
+ match(Identifier);
+
+ query.addPrefetchedRelationship(id.getText());
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_12);
+ }
+ }
+
+ public final void andExpr(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+
+ try { // for error handling
+ quantifierExpr(criteria);
+ {
+ _loop100:
+ do {
+ if ((LA(1)==LITERAL_and) && (_tokenSet_8.member(LA(2))) && (_tokenSet_9.member(LA(3)))) {
+ match(LITERAL_and);
+ Criteria andCriteria = new Criteria();
+ quantifierExpr(andCriteria);
+ criteria.addAndCriteria(andCriteria);
+ }
+ else {
+ break _loop100;
+ }
+
+ } while (true);
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final void quantifierExpr(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+
+ try { // for error handling
+ switch ( LA(1)) {
+ case TOK_LPAREN:
+ {
+ match(TOK_LPAREN);
+ orExpr(criteria);
+ match(TOK_RPAREN);
+ break;
+ }
+ case LITERAL_is_undefined:
+ case LITERAL_is_defined:
+ {
+ undefinedExpr(criteria);
+ break;
+ }
+ case LITERAL_exists:
+ case LITERAL_not:
+ {
+ existsExpr(criteria);
+ break;
+ }
+ default:
+ if ((LA(1)==Identifier) && (_tokenSet_13.member(LA(2)))) {
+ equalityExpr(criteria);
+ }
+ else if ((LA(1)==Identifier) && (LA(2)==LITERAL_not||LA(2)==LITERAL_like) && (_tokenSet_14.member(LA(3)))) {
+ likeExpr(criteria);
+ }
+ else if ((LA(1)==Identifier) && (LA(2)==LITERAL_not||LA(2)==LITERAL_between) && (_tokenSet_15.member(LA(3)))) {
+ betweenExpr(criteria);
+ }
+ else if ((LA(1)==Identifier) && (LA(2)==LITERAL_in||LA(2)==LITERAL_not) && (LA(3)==TOK_LPAREN||LA(3)==LITERAL_in||LA(3)==LITERAL_list)) {
+ inExpr(criteria);
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final void equalityExpr(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+ Token id = null;
+
+ try { // for error handling
+
+ Object value = null;
+
+ id = LT(1);
+ match(Identifier);
+ {
+ {
+ switch ( LA(1)) {
+ case TOK_EQ:
+ {
+ match(TOK_EQ);
+ {
+ if ((LA(1)==LITERAL_nil)) {
+ match(LITERAL_nil);
+ criteria.addIsNull(id.getText());
+ }
+ else if ((_tokenSet_16.member(LA(1)))) {
+ value=literal();
+ criteria.addEqualTo(id.getText(), value);
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ break;
+ }
+ case TOK_NE:
+ {
+ match(TOK_NE);
+ {
+ if ((LA(1)==LITERAL_nil)) {
+ match(LITERAL_nil);
+ criteria.addNotNull(id.getText());
+ }
+ else if ((_tokenSet_16.member(LA(1)))) {
+ value=literal();
+ criteria.addNotEqualTo(id.getText(), value);
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ break;
+ }
+ case TOK_NE2:
+ {
+ match(TOK_NE2);
+ {
+ if ((LA(1)==LITERAL_nil)) {
+ match(LITERAL_nil);
+ criteria.addNotNull(id.getText());
+ }
+ else if ((_tokenSet_16.member(LA(1)))) {
+ value=literal();
+ criteria.addNotEqualTo(id.getText(), value);
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ break;
+ }
+ case TOK_LT:
+ {
+ match(TOK_LT);
+ value=literal();
+ criteria.addLessThan(id.getText(), value);
+ break;
+ }
+ case TOK_GT:
+ {
+ match(TOK_GT);
+ value=literal();
+ criteria.addGreaterThan(id.getText(), value);
+ break;
+ }
+ case TOK_LE:
+ {
+ match(TOK_LE);
+ value=literal();
+ criteria.addLessOrEqualThan(id.getText(), value);
+ break;
+ }
+ case TOK_GE:
+ {
+ match(TOK_GE);
+ value=literal();
+ criteria.addGreaterOrEqualThan(id.getText(), value);
+ break;
+ }
+ default:
+ {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+ }
+ }
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final void likeExpr(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+ Token id = null;
+
+ try { // for error handling
+
+ boolean negative = false;
+ Object value = null;
+
+ id = LT(1);
+ match(Identifier);
+ {
+ if ((LA(1)==LITERAL_not)) {
+ match(LITERAL_not);
+ negative = true;
+ }
+ else if ((LA(1)==LITERAL_like)) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ match(LITERAL_like);
+ value=literal();
+
+ if (negative) {
+ criteria.addNotLike(id.getText(), value);
+ } else {
+ criteria.addLike(id.getText(), value);
+ }
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final void undefinedExpr(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+ Token id = null;
+
+ try { // for error handling
+ boolean negative = false;
+ {
+ if ((LA(1)==LITERAL_is_undefined)) {
+ match(LITERAL_is_undefined);
+ negative = false;
+ }
+ else if ((LA(1)==LITERAL_is_defined)) {
+ match(LITERAL_is_defined);
+ negative = true;
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ match(TOK_LPAREN);
+ id = LT(1);
+ match(Identifier);
+ match(TOK_RPAREN);
+
+ if (negative) {
+ criteria.addNotNull(id.getText());
+ } else {
+ criteria.addIsNull(id.getText());
+ }
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final void betweenExpr(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+ Token id = null;
+
+ try { // for error handling
+
+ boolean negative = false;
+ Object lower = null;
+ Object upper = null;
+
+ id = LT(1);
+ match(Identifier);
+ {
+ if ((LA(1)==LITERAL_not)) {
+ match(LITERAL_not);
+ negative = true;
+ }
+ else if ((LA(1)==LITERAL_between)) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ match(LITERAL_between);
+ lower=literal();
+ match(LITERAL_and);
+ upper=literal();
+
+ if (negative) {
+ criteria.addNotBetween(id.getText(), lower, upper);
+ } else {
+ criteria.addBetween(id.getText(), lower, upper);
+ }
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final void inExpr(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+ Token id = null;
+
+ try { // for error handling
+
+ boolean negative = false;
+ Collection coll;
+
+ id = LT(1);
+ match(Identifier);
+ {
+ if ((LA(1)==LITERAL_not)) {
+ match(LITERAL_not);
+ negative = true;
+ }
+ else if ((LA(1)==LITERAL_in)) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ match(LITERAL_in);
+ {
+ if ((LA(1)==LITERAL_list)) {
+ match(LITERAL_list);
+ }
+ else if ((LA(1)==TOK_LPAREN)) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ coll=argList();
+
+ if (negative) {
+ criteria.addNotIn(id.getText(), coll);
+ } else {
+ criteria.addIn(id.getText(), coll);
+ }
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final void existsExpr(
+ Criteria criteria
+ ) throws RecognitionException, TokenStreamException {
+
+
+ try { // for error handling
+
+ Query subQuery = null;
+ boolean negative = false;
+
+ {
+ if ((LA(1)==LITERAL_not)) {
+ match(LITERAL_not);
+ negative = true;
+ }
+ else if ((LA(1)==LITERAL_exists)) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ {
+ if ((LA(1)==LITERAL_exists) && (LA(2)==TOK_LPAREN)) {
+ match(LITERAL_exists);
+ match(TOK_LPAREN);
+ subQuery=selectQuery();
+ match(TOK_RPAREN);
+ }
+ else if ((LA(1)==LITERAL_exists) && (LA(2)==TOK_STAR||LA(2)==Identifier)) {
+ subQuery=existsQuery();
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+
+ if (negative) {
+ criteria.addNotExists(subQuery);
+ } else {
+ criteria.addExists(subQuery);
+ }
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ }
+
+ public final Object literal() throws RecognitionException, TokenStreamException {
+ Object value = null;
+
+ Token tokInt = null;
+ Token tokADbl = null;
+ Token tokEDbl = null;
+ Token tokChar = null;
+ Token tokStr = null;
+ Token tokDate = null;
+ Token tokTime = null;
+ Token tokTs = null;
+
+ try { // for error handling
+ switch ( LA(1)) {
+ case TOK_DOLLAR:
+ {
+ match(TOK_DOLLAR);
+ match(TOK_UNSIGNED_INTEGER);
+ value = null;
+ break;
+ }
+ case LITERAL_true:
+ {
+ match(LITERAL_true);
+ value = Boolean.TRUE;
+ break;
+ }
+ case LITERAL_false:
+ {
+ match(LITERAL_false);
+ value = Boolean.FALSE;
+ break;
+ }
+ case TOK_UNSIGNED_INTEGER:
+ {
+ tokInt = LT(1);
+ match(TOK_UNSIGNED_INTEGER);
+
+ try
+ {
+ value = Integer.valueOf(tokInt.getText());
+ }
+ catch (NumberFormatException ignored)
+ {
+ value = Long.valueOf(tokInt.getText());
+ }
+
+ break;
+ }
+ case TOK_APPROXIMATE_NUMERIC_LITERAL:
+ {
+ tokADbl = LT(1);
+ match(TOK_APPROXIMATE_NUMERIC_LITERAL);
+ value = Double.valueOf(tokADbl.getText());
+ break;
+ }
+ case TOK_EXACT_NUMERIC_LITERAL:
+ {
+ tokEDbl = LT(1);
+ match(TOK_EXACT_NUMERIC_LITERAL);
+ value = Double.valueOf(tokEDbl.getText());
+ break;
+ }
+ case CharLiteral:
+ {
+ tokChar = LT(1);
+ match(CharLiteral);
+ value = new Character(tokChar.getText().charAt(0));
+ break;
+ }
+ case StringLiteral:
+ {
+ tokStr = LT(1);
+ match(StringLiteral);
+ value = tokStr.getText();
+ break;
+ }
+ case LITERAL_date:
+ {
+ match(LITERAL_date);
+ tokDate = LT(1);
+ match(StringLiteral);
+ value = java.sql.Date.valueOf(tokDate.getText());
+ break;
+ }
+ case LITERAL_time:
+ {
+ match(LITERAL_time);
+ tokTime = LT(1);
+ match(StringLiteral);
+ value = java.sql.Time.valueOf(tokTime.getText());
+ break;
+ }
+ case LITERAL_timestamp:
+ {
+ match(LITERAL_timestamp);
+ tokTs = LT(1);
+ match(StringLiteral);
+ value = java.sql.Timestamp.valueOf(tokTs.getText());
+ break;
+ }
+ default:
+ {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+ }
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_17);
+ }
+ return value;
+ }
+
+ public final Collection argList() throws RecognitionException, TokenStreamException {
+ Collection coll = null;
+
+
+ try { // for error handling
+
+ Collection temp = new Vector();
+ Object val;
+
+ match(TOK_LPAREN);
+ {
+ if ((_tokenSet_16.member(LA(1)))) {
+ val=literal();
+ if (val != null) {temp.add(val);}
+ {
+ _loop124:
+ do {
+ if ((LA(1)==TOK_COMMA)) {
+ match(TOK_COMMA);
+ val=literal();
+ if (val != null) {temp.add(val);}
+ }
+ else {
+ break _loop124;
+ }
+
+ } while (true);
+ }
+ }
+ else if ((LA(1)==TOK_RPAREN)) {
+ }
+ else {
+ throw new NoViableAltException(LT(1), getFilename());
+ }
+
+ }
+ match(TOK_RPAREN);
+
+ if (!temp.isEmpty()) {coll = temp;}
+
+ }
+ catch (RecognitionException ex) {
+ reportError(ex);
+ consume();
+ consumeUntil(_tokenSet_7);
+ }
+ return coll;
+ }
+
+
+ public static final String[] _tokenNames = {
+ "<0>",
+ "EOF",
+ "<2>",
+ "NULL_TREE_LOOKAHEAD",
+ "right parenthesis",
+ "left parenthesis",
+ "comma",
+ "semicolon",
+ "TOK_COLON",
+ "dot",
+ "dot",
+ "operator",
+ "comparison operator",
+ "operator",
+ "operator",
+ "operator",
+ "operator",
+ "comparison operator",
+ "comparison operator",
+ "comparison operator",
+ "comparison operator",
+ "comparison operator",
+ "comparison operator",
+ "left bracket",
+ "right bracket",
+ "TOK_DOLLAR",
+ "NameFirstCharacter",
+ "NameCharacter",
+ "Identifier",
+ "TOK_UNSIGNED_INTEGER",
+ "TOK_APPROXIMATE_NUMERIC_LITERAL",
+ "numeric value",
+ "character string",
+ "character string",
+ "WhiteSpace",
+ "NewLine",
+ "CommentLine",
+ "MultiLineComment",
+ "\"select\"",
+ "\"distinct\"",
+ "\"from\"",
+ "\"where\"",
+ "\"order\"",
+ "\"by\"",
+ "\"group\"",
+ "\"prefetch\"",
+ "\"exists\"",
+ "\"in\"",
+ "\"asc\"",
+ "\"desc\"",
+ "\"or\"",
+ "\"and\"",
+ "\"nil\"",
+ "\"not\"",
+ "\"list\"",
+ "\"between\"",
+ "\"is_undefined\"",
+ "\"is_defined\"",
+ "\"like\"",
+ "\"true\"",
+ "\"false\"",
+ "\"date\"",
+ "\"time\"",
+ "\"timestamp\""
+ };
+
+ private static final long[] mk_tokenSet_0() {
+ long[] data = { 2L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_0 = new BitSet(mk_tokenSet_0());
+ private static final long[] mk_tokenSet_1() {
+ long[] data = { 57174604644498L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_1 = new BitSet(mk_tokenSet_1());
+ private static final long[] mk_tokenSet_2() {
+ long[] data = { 52776558133394L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_2 = new BitSet(mk_tokenSet_2());
+ private static final long[] mk_tokenSet_3() {
+ long[] data = { 35184372088978L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_3 = new BitSet(mk_tokenSet_3());
+ private static final long[] mk_tokenSet_4() {
+ long[] data = { 146L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_4 = new BitSet(mk_tokenSet_4());
+ private static final long[] mk_tokenSet_5() {
+ long[] data = { 141836999983104L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_5 = new BitSet(mk_tokenSet_5());
+ private static final long[] mk_tokenSet_6() {
+ long[] data = { 3437073348428178L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_6 = new BitSet(mk_tokenSet_6());
+ private static final long[] mk_tokenSet_7() {
+ long[] data = { 3434874325172370L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_7 = new BitSet(mk_tokenSet_7());
+ private static final long[] mk_tokenSet_8() {
+ long[] data = { 225250350381137952L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_8 = new BitSet(mk_tokenSet_8());
+ private static final long[] mk_tokenSet_9() {
+ long[] data = { 549650261048496160L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_9 = new BitSet(mk_tokenSet_9());
+ private static final long[] mk_tokenSet_10() {
+ long[] data = { 52776558133458L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_10 = new BitSet(mk_tokenSet_10());
+ private static final long[] mk_tokenSet_11() {
+ long[] data = { 35184372089042L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_11 = new BitSet(mk_tokenSet_11());
+ private static final long[] mk_tokenSet_12() {
+ long[] data = { 210L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_12 = new BitSet(mk_tokenSet_12());
+ private static final long[] mk_tokenSet_13() {
+ long[] data = { 8261632L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_13 = new BitSet(mk_tokenSet_13());
+ private static final long[] mk_tokenSet_14() {
+ long[] data = { -288230359475159040L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_14 = new BitSet(mk_tokenSet_14());
+ private static final long[] mk_tokenSet_15() {
+ long[] data = { -540431938607906816L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_15 = new BitSet(mk_tokenSet_15());
+ private static final long[] mk_tokenSet_16() {
+ long[] data = { -576460735626870784L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_16 = new BitSet(mk_tokenSet_16());
+ private static final long[] mk_tokenSet_17() {
+ long[] data = { 3434874325172434L, 0L};
+ return data;
+ }
+ public static final BitSet _tokenSet_17 = new BitSet(mk_tokenSet_17());
+
+ }
1.27 +1 -3 db-ojb/.classpath
Index: .classpath
===================================================================
RCS file: /home/cvs/db-ojb/.classpath,v
retrieving revision 1.26
retrieving revision 1.27
diff -u -r1.26 -r1.27
--- .classpath 22 Dec 2003 10:18:23 -0000 1.26
+++ .classpath 7 Jan 2004 19:03:16 -0000 1.27
@@ -9,8 +9,6 @@
<classpathentry kind="src" path="src/xdoclet/test"/>
<classpathentry kind="lib" path="target/test/ojb"/>
<classpathentry kind="lib" path="lib/ant.jar"/>
- <classpathentry kind="lib" path="lib/antlr.jar"/>
- <classpathentry kind="lib" path="lib/antlr_compiletime.jar"/>
<classpathentry kind="lib" path="lib/commons-beanutils.jar"/>
<classpathentry kind="lib" path="lib/commons-logging.jar"/>
<classpathentry kind="lib" path="lib/commons-collections.jar"/>
@@ -29,12 +27,12 @@
<classpathentry kind="lib" path="lib/jcs.jar"/>
<classpathentry kind="lib" path="lib/jdori.jar"/>
<classpathentry kind="lib" path="lib/j2ee.jar"/>
- <classpathentry kind="lib" path="lib/antlr.debug.jar"/>
<classpathentry kind="lib" path="lib/log4j-1.2.8.jar"/>
<classpathentry kind="lib" path="lib/torque-3.0.2.jar"/>
<classpathentry kind="lib" path="lib/velocity-1.3.1.jar"/>
<classpathentry kind="lib" path="lib/prevayler.jar"/>
<classpathentry kind="lib" path="lib/xdoclet-1.2.jar"/>
<classpathentry kind="lib" path="lib/xjavadoc-1.0.2.jar"/>
+ <classpathentry kind="lib" path="lib/antlr-2.7.2.jar"/>
<classpathentry kind="output" path="target/classes"/>
</classpath>
---------------------------------------------------------------------
To unsubscribe, e-mail: ojb-dev-unsubscribe@db.apache.org
For additional commands, e-mail: ojb-dev-help@db.apache.org