You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@avro.apache.org by rs...@apache.org on 2022/02/07 18:53:28 UTC

[avro] branch branch-1.11 updated (1551d61 -> 767e7fe)

This is an automated email from the ASF dual-hosted git repository.

rskraba pushed a change to branch branch-1.11
in repository https://gitbox.apache.org/repos/asf/avro.git.


    from 1551d61  Bump protobuf-java from 3.19.1 to 3.19.4 in /lang/java (#1523)
     new 34e75a3  AVRO-3285: Upgrade JavaCC plugin and library (#1447)
     new 767e7fe  AVRO-3294: Improve IDL doc comment handling (#1453)

The 2 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 lang/java/compiler/pom.xml                         |  30 ++--
 .../apache/avro/compiler/idl/DocCommentHelper.java |   8 +
 .../javacc/org/apache/avro/compiler/idl/idl.jj     | 194 ++++++++++-----------
 .../java/compiler/src/test/idl/input/comments.avdl |  37 ++--
 .../compiler/src/test/idl/output/comments.avpr     |  19 +-
 .../java/org/apache/avro/compiler/idl/TestIdl.java |  75 ++++----
 lang/java/pom.xml                                  |  31 ++--
 7 files changed, 202 insertions(+), 192 deletions(-)

[avro] 02/02: AVRO-3294: Improve IDL doc comment handling (#1453)

Posted by rs...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

rskraba pushed a commit to branch branch-1.11
in repository https://gitbox.apache.org/repos/asf/avro.git

commit 767e7fe1e1459339942d9d8d2c4f7cb3f01b05dc
Author: Oscar Westra van Holthe - Kind <op...@users.noreply.github.com>
AuthorDate: Mon Feb 7 19:37:55 2022 +0100

    AVRO-3294: Improve IDL doc comment handling (#1453)
    
    * AVRO-3294: Improve IDL doc comment handling
    
    Use the previously introduced method `DocCommentHandler#clearDoc()` to
    generate warnings for misplaced documentation. Also add documentation to
    describe the tricky bits when using this method.
    
    The brittleness of the solution proves that doc comments as special
    tokens are a hack. However, making regular tokens out of them may break
    existing `.avdl` files.
    
    * AVRO-3294: Trigger build
---
 .../apache/avro/compiler/idl/DocCommentHelper.java |   8 ++
 .../javacc/org/apache/avro/compiler/idl/idl.jj     | 113 ++++++++++++---------
 .../java/compiler/src/test/idl/input/comments.avdl |  37 ++++---
 .../compiler/src/test/idl/output/comments.avpr     |  19 ++--
 .../java/org/apache/avro/compiler/idl/TestIdl.java |  75 +++++++-------
 5 files changed, 140 insertions(+), 112 deletions(-)

diff --git a/lang/java/compiler/src/main/java/org/apache/avro/compiler/idl/DocCommentHelper.java b/lang/java/compiler/src/main/java/org/apache/avro/compiler/idl/DocCommentHelper.java
index 1ccc101..5d0ec52 100644
--- a/lang/java/compiler/src/main/java/org/apache/avro/compiler/idl/DocCommentHelper.java
+++ b/lang/java/compiler/src/main/java/org/apache/avro/compiler/idl/DocCommentHelper.java
@@ -74,6 +74,14 @@ public class DocCommentHelper {
     DOC.set(newDocComment);
   }
 
+  /**
+   * Clear any documentation (and generate a warning if there was).
+   *
+   * This method should NOT be used after an optional component in a grammar
+   * (i.e., after a @code{[…]} or @code{…*} construct), because the optional
+   * grammar part may have already caused parsing a doc comment special token
+   * placed after the code block.
+   */
   static void clearDoc() {
     DocComment oldDocComment = DOC.get();
     if (oldDocComment != null) {
diff --git a/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj b/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
index ee467ff..1f931a6 100644
--- a/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
+++ b/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
@@ -1065,7 +1065,7 @@ Protocol CompilationUnit():
 /*
  * Declaration syntax follows.
  */
-private Schema NamedSchemaDeclaration(Map<String, JsonNode> props):
+private Schema NamedSchemaDeclaration(String doc, Map<String, JsonNode> props):
 {
   Schema s;
   String savedSpace = this.namespace;
@@ -1076,9 +1076,9 @@ private Schema NamedSchemaDeclaration(Map<String, JsonNode> props):
       this.namespace = getTextProp("namespace", props, token);
   }
  (
-     s = FixedDeclaration()
-   | s = EnumDeclaration()
-   | s = RecordDeclaration()
+     s = FixedDeclaration(doc)
+   | s = EnumDeclaration(doc)
+   | s = RecordDeclaration(doc)
  )
  {
    this.namespace = savedSpace;
@@ -1125,11 +1125,12 @@ Schema UnionDefinition():
 
 Protocol ProtocolDeclaration():
 {
-  String name;
+  String doc, name;
   Protocol p;
   Map<String, JsonNode> props = new LinkedHashMap<>();
 }
 {
+  doc = Documentation()
   ( SchemaProperty(props) )*
   {
     if (props.containsKey("namespace"))
@@ -1138,7 +1139,7 @@ Protocol ProtocolDeclaration():
  "protocol"
    name = Identifier()
  {
-   p = new Protocol(name, DocCommentHelper.getDoc(), namespace);
+   p = new Protocol(name, doc, namespace);
    for (String key : props.keySet())
      if ("namespace".equals(key)) {               // already handled: ignore
      } else {                                     // add all other properties
@@ -1152,17 +1153,30 @@ Protocol ProtocolDeclaration():
 }
 
 
-Schema EnumDeclaration():
+String Documentation():
+{
+  //noinspection ResultOfMethodCallIgnored
+  getToken(1); // Parse, but don't consume, at least one token; this triggers parsing special tokens like doc comments.
+}
+{
+   // Don't parse anything, just return the doc string
+   {
+       return DocCommentHelper.getDoc();
+   }
+}
+
+
+Schema EnumDeclaration(String doc):
 {
   String name;
   List<String> symbols;
   String defaultSymbol = null;
 }
 {
-  "enum" {   String doc = DocCommentHelper.getDoc(); }
+  "enum"
   name = Identifier()
   symbols = EnumBody()
-      [ <EQUALS> defaultSymbol=Identifier() <SEMICOLON>]
+      [ <EQUALS> defaultSymbol=Identifier() <SEMICOLON> { DocCommentHelper.clearDoc(); } ]
   {
     Schema s = Schema.createEnum(name, doc, namespace, symbols, defaultSymbol);
     names.put(s.getFullName(), s);
@@ -1175,10 +1189,11 @@ List<String> EnumBody():
   List<String> symbols = new ArrayList<>();
 }
 {
-  "{"
+  "{" { DocCommentHelper.clearDoc(); }
   [ EnumConstant(symbols) ( "," EnumConstant(symbols) )* ]
   "}"
   {
+    DocCommentHelper.clearDoc();
     return symbols;
   }
 }
@@ -1193,13 +1208,14 @@ void EnumConstant(List<String> symbols):
 
 void ProtocolBody(Protocol p):
 {
+  String doc;
   Schema schema;
   Message message;
   Protocol importProtocol;
   Map<String, JsonNode> props = new LinkedHashMap<>();
 }
 {
-  "{"
+  "{" { DocCommentHelper.clearDoc(); }
   (
    <IMPORT>
    ((( importProtocol = ImportIdl() | importProtocol = ImportProtocol()) {
@@ -1208,21 +1224,26 @@ void ProtocolBody(Protocol p):
        p.getMessages().putAll(importProtocol.getMessages());
      })
      | schema = ImportSchema()
-     )
+     ) {
+       DocCommentHelper.clearDoc();
+     }
    |
+   doc = Documentation()
    ( SchemaProperty(props) )*
    (
-     schema = NamedSchemaDeclaration(props)
+     schema = NamedSchemaDeclaration(doc, props)
      |
-     message = MessageDeclaration(p, props) {
+     message = MessageDeclaration(p, doc, props) {
        p.getMessages().put(message.getName(), message);
      }
-    )  { props.clear(); }
+    ) {
+      props.clear();
+    }
   ) *
   "}"
-
   {
     p.setTypes(names.values());
+    DocCommentHelper.clearDoc();
   }
 }
 
@@ -1273,7 +1294,7 @@ Schema ImportSchema() : {
     }
 }
 
-Schema FixedDeclaration():
+Schema FixedDeclaration(String doc):
 {
   String name;
   Token sizeTok;
@@ -1282,14 +1303,14 @@ Schema FixedDeclaration():
   "fixed" name = Identifier() "(" sizeTok = <INTEGER_LITERAL> ")"
   ";"
   {
-    Schema s = Schema.createFixed(name, DocCommentHelper.getDoc(), this.namespace,
-                                  Integer.parseInt(sizeTok.image));
+    DocCommentHelper.clearDoc();
+    Schema s = Schema.createFixed(name, doc, this.namespace, Integer.parseInt(sizeTok.image));
     names.put(s.getFullName(), s);
     return s;
   }
 }
 
-Schema RecordDeclaration():
+Schema RecordDeclaration(String doc):
 {
   String name;
   List<Field> fields = new ArrayList<>();
@@ -1302,14 +1323,14 @@ Schema RecordDeclaration():
   )
   name = Identifier()
   {
-    Schema result = Schema.createRecord(
-      name, DocCommentHelper.getDoc(), this.namespace, isError);
+    Schema result = Schema.createRecord(name, doc, this.namespace, isError);
     names.put(result.getFullName(), result);
   }
-  "{"
+  "{" { DocCommentHelper.clearDoc(); }
     ( FieldDeclaration(fields) )*
   "}"
   {
+    DocCommentHelper.clearDoc();
     result.setFields(fields);
     return result;
   }
@@ -1332,27 +1353,27 @@ private void SchemaProperty(Map<String, JsonNode> properties):
 
 void FieldDeclaration(List<Field> fields):
 {
+  String defaultDoc;
   Schema type;
 }
 {
+  defaultDoc = Documentation()
   type = Type()
-  VariableDeclarator(type, fields) ( "," VariableDeclarator(type, fields) )*
-  ";"
+  VariableDeclarator(type, defaultDoc, fields) ( "," VariableDeclarator(type, defaultDoc, fields) )*
+  ";" { DocCommentHelper.clearDoc(); }
 }
 
-void VariableDeclarator(Schema type, List<Field> fields):
+void VariableDeclarator(Schema type, String defaultDoc, List<Field> fields):
 {
-  String name;
+  String doc, name;
   JsonNode defaultValue = null;
   Map<String, JsonNode> props = new LinkedHashMap<>();
 }
 {
-    ( SchemaProperty(props) )*
-
+  doc = Documentation()
+  ( SchemaProperty(props) )*
   name = Identifier()
-
-    [ <EQUALS> defaultValue=Json() ]
-
+  [ <EQUALS> defaultValue=Json() ]
   {
     Field.Order order = Field.Order.ASCENDING;
     for (String key : props.keySet())
@@ -1361,7 +1382,7 @@ void VariableDeclarator(Schema type, List<Field> fields):
 
     boolean validate = SchemaResolver.isFullyResolvedSchema(type);
     Schema fieldType = fixOptionalSchema(type, defaultValue);
-    Field field = Accessor.createField(name, fieldType, DocCommentHelper.getDoc(), defaultValue, validate, order);
+    Field field = Accessor.createField(name, fieldType, doc == null ? defaultDoc : doc, defaultValue, validate, order);
     for (String key : props.keySet())
       if ("order".equals(key)) {                  // already handled: ignore
       } else if ("aliases".equals(key)) {         // aliases
@@ -1371,22 +1392,13 @@ void VariableDeclarator(Schema type, List<Field> fields):
         Accessor.addProp(field, key, props.get(key));
       }
     fields.add(field);
+    DocCommentHelper.clearDoc();
   }
 }
 
 
-String MessageDocumentation():
-{}
-{
-   // Don't parse anything, just return the doc string
-   {
-       return DocCommentHelper.getDoc();
-   }
-}
-
-private Message MessageDeclaration(Protocol p, Map<String, JsonNode> props):
+private Message MessageDeclaration(Protocol p, String msgDoc, Map<String, JsonNode> props):
 {
-  String msgDoc;
   String name;
   Schema request;
   Schema response;
@@ -1395,13 +1407,12 @@ private Message MessageDeclaration(Protocol p, Map<String, JsonNode> props):
   errorSchemata.add(Protocol.SYSTEM_ERROR);
 }
 {
-  msgDoc = MessageDocumentation()
-  response = ResultType()
-  name = Identifier()
+  response = ResultType()  name = Identifier()
   request = FormalParameters()
   [ "oneway" {oneWay = true; } | "throws" ErrorList(errorSchemata) ]
   ";"
   {
+    DocCommentHelper.clearDoc();
     Schema errors = Schema.createUnion(errorSchemata);
     if (oneWay && response.getType() != Type.NULL)
       throw error("One-way message'"+name+"' must return void", token);
@@ -1426,19 +1437,23 @@ Schema FormalParameters():
   List<Field> fields = new ArrayList<>();
 }
 {
-  "(" [ FormalParameter(fields) ( "," FormalParameter(fields) )* ] ")"
+  "(" { DocCommentHelper.clearDoc(); }
+  [ FormalParameter(fields) ( "," FormalParameter(fields) )* ] ")"
   {
+    DocCommentHelper.clearDoc();
     return Schema.createRecord(null, null, null, false, fields);
   }
 }
 
 void FormalParameter(List<Field> fields):
 {
+  String doc;
   Schema type;
 }
 {
+  doc = Documentation()
   type = Type()
-  VariableDeclarator(type, fields)
+  VariableDeclarator(type, doc, fields)
 }
 
 Schema Type():
diff --git a/lang/java/compiler/src/test/idl/input/comments.avdl b/lang/java/compiler/src/test/idl/input/comments.avdl
index 5c61996..38024af 100644
--- a/lang/java/compiler/src/test/idl/input/comments.avdl
+++ b/lang/java/compiler/src/test/idl/input/comments.avdl
@@ -27,41 +27,38 @@ protocol Comments {
     /** Dangling Enum8 */ A
     /** Dangling Enum9 */;
 
-    // The "Dangling Enum9" doc comment above will be attributed to this enum.
-    enum NotUndocumentedEnum {D,E}
+    enum UndocumentedEnum {D,E}
 
-    /** Dangling Fixed1 */ fixed
-    /** Dangling Fixed2 */ DocumentedFixed
-    /** Dangling Fixed3 */(
-    /** Dangling Fixed4 */ 16
-    /** Dangling Fixed5 */)
-    /** Documented Fixed Type */;
+    /** Documented Fixed Type */ fixed
+    /** Dangling Fixed1 */ DocumentedFixed
+    /** Dangling Fixed2 */(
+    /** Dangling Fixed3 */ 16
+    /** Dangling Fixed4 */)
+    /** Dangling Fixed5 */;
 
     fixed UndocumentedFixed(16);
 
-    /** Dangling Error1 */ error
-    /** Documented Error */ DocumentedError
+    /** Documented Error */ error
+    /** Dangling Error1 */ DocumentedError
     /** Dangling Field1 */{
-    /** Dangling Field2 */string
-    /** Dangling Field3 */reason
-    /** Documented Field */;
+    /** Default Doc Explanation Field */string
+    /** Documented Reason Field */reason, explanation
+    /** Dangling Field2 */;
     /** Dangling Error2 */}
 
-    // The "Dangling Error2" doc comment above will be attributed to this record.
-    record NotUndocumentedRecord {
+    record UndocumentedRecord {
         string description;
     }
 
     /** Documented Method */ void
     /** Dangling Param1 */ documentedMethod
     /** Dangling Param2 */(
-        /** Dangling Param3 */ string
-        /** Dangling Param4 */ message
-    /** Documented Parameter */)
+        string /** Documented Parameter */ message,
+        /** Default Documented Parameter */ string defMsg
+    /** Dangling Param3 */)
     /** Dangling Method1 */ throws
     /** Dangling Method2 */ DocumentedError
     /** Dangling Method3 */;
 
-    // The "Dangling Method3" doc comment above will be attributed to this method.
-    void notUndocumentedMethod(string message);
+    void undocumentedMethod(string message);
 }
diff --git a/lang/java/compiler/src/test/idl/output/comments.avpr b/lang/java/compiler/src/test/idl/output/comments.avpr
index 2d98962..9901f8e 100644
--- a/lang/java/compiler/src/test/idl/output/comments.avpr
+++ b/lang/java/compiler/src/test/idl/output/comments.avpr
@@ -9,8 +9,7 @@
     "default" : "A"
   }, {
     "type" : "enum",
-    "name" : "NotUndocumentedEnum",
-    "doc" : "Dangling Enum9",
+    "name" : "UndocumentedEnum",
     "symbols" : [ "D", "E" ]
   }, {
     "type" : "fixed",
@@ -28,12 +27,15 @@
     "fields" : [ {
       "name" : "reason",
       "type" : "string",
-      "doc" : "Documented Field"
+      "doc" : "Documented Reason Field"
+    }, {
+      "name" : "explanation",
+      "type" : "string",
+      "doc" : "Default Doc Explanation Field"
     } ]
   }, {
     "type" : "record",
-    "name" : "NotUndocumentedRecord",
-    "doc" : "Dangling Error2",
+    "name" : "UndocumentedRecord",
     "fields" : [ {
       "name" : "description",
       "type" : "string"
@@ -46,12 +48,15 @@
         "name" : "message",
         "type" : "string",
         "doc" : "Documented Parameter"
+      }, {
+        "name" : "defMsg",
+        "type" : "string",
+        "doc" : "Default Documented Parameter"
       } ],
       "response" : "null",
       "errors" : [ "DocumentedError" ]
     },
-    "notUndocumentedMethod" : {
-      "doc" : "Dangling Method3",
+    "undocumentedMethod" : {
       "request" : [ {
         "name" : "message",
         "type" : "string"
diff --git a/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java b/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java
index 7e609ba..37e6b2b 100644
--- a/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java
+++ b/lang/java/compiler/src/test/java/org/apache/avro/compiler/idl/TestIdl.java
@@ -35,9 +35,11 @@ import java.net.URL;
 import java.net.URLClassLoader;
 import java.nio.charset.StandardCharsets;
 import java.util.ArrayList;
+import java.util.Arrays;
 import java.util.List;
 import java.util.Map;
 
+import static java.util.Objects.requireNonNull;
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertNull;
 import static org.junit.Assert.assertTrue;
@@ -46,12 +48,12 @@ import static org.junit.Assert.fail;
 /**
  * Simple test harness for Idl. This relies on an input/ and output/ directory.
  * Inside the input/ directory are .avdl files. Each file should have a
- * corresponding .avpr file in output/. When the test is run, it generates and
+ * corresponding .avpr file in output/. When you run the test, it generates and
  * stringifies each .avdl file and compares it to the expected output, failing
  * if the two differ.
  *
  * To make it simpler to write these tests, you can run ant -Dtestcase=TestIdl
- * -Dtest.idl.mode=write which will *replace* all expected output.
+ * -Dtest.idl.mode=write, which will *replace* all expected output.
  */
 public class TestIdl {
   private static final File TEST_DIR = new File(System.getProperty("test.idl.dir", "src/test/idl"));
@@ -71,7 +73,7 @@ public class TestIdl {
     assertTrue(TEST_OUTPUT_DIR.exists());
 
     tests = new ArrayList<>();
-    for (File inF : TEST_INPUT_DIR.listFiles()) {
+    for (File inF : requireNonNull(TEST_INPUT_DIR.listFiles())) {
       if (!inF.getName().endsWith(".avdl"))
         continue;
       if (inF.getName().startsWith("."))
@@ -100,7 +102,7 @@ public class TestIdl {
     }
 
     if (failed > 0) {
-      fail(String.valueOf(failed) + " tests failed");
+      fail(failed + " tests failed");
     }
   }
 
@@ -121,44 +123,45 @@ public class TestIdl {
       final List<String> warnings = parser.getWarningsAfterParsing();
 
       assertEquals("Documented Enum", protocol.getType("testing.DocumentedEnum").getDoc());
-      assertEquals("Dangling Enum9", protocol.getType("testing.NotUndocumentedEnum").getDoc()); // Arguably a bug
+
       assertEquals("Documented Fixed Type", protocol.getType("testing.DocumentedFixed").getDoc());
-      assertNull(protocol.getType("testing.UndocumentedFixed").getDoc());
+
       final Schema documentedError = protocol.getType("testing.DocumentedError");
       assertEquals("Documented Error", documentedError.getDoc());
-      assertEquals("Documented Field", documentedError.getField("reason").doc());
-      assertEquals("Dangling Error2", protocol.getType("testing.NotUndocumentedRecord").getDoc()); // Arguably a bug
+      assertEquals("Documented Reason Field", documentedError.getField("reason").doc());
+      assertEquals("Default Doc Explanation Field", documentedError.getField("explanation").doc());
+
       final Map<String, Protocol.Message> messages = protocol.getMessages();
-      assertEquals("Documented Method", messages.get("documentedMethod").getDoc());
-      assertEquals("Documented Parameter", messages.get("documentedMethod").getRequest().getField("message").doc());
-      assertEquals("Dangling Method3", messages.get("notUndocumentedMethod").getDoc()); // Arguably a bug
+      final Protocol.Message documentedMethod = messages.get("documentedMethod");
+      assertEquals("Documented Method", documentedMethod.getDoc());
+      assertEquals("Documented Parameter", documentedMethod.getRequest().getField("message").doc());
+      assertEquals("Default Documented Parameter", documentedMethod.getRequest().getField("defMsg").doc());
 
-      assertEquals(23, warnings.size());
-      final String pattern = "Found documentation comment at line %d, column %d. Ignoring previous one at line %d, column %d: \"%s\""
+      assertNull(protocol.getType("testing.UndocumentedEnum").getDoc());
+      assertNull(protocol.getType("testing.UndocumentedFixed").getDoc());
+      assertNull(protocol.getType("testing.UndocumentedRecord").getDoc());
+      assertNull(messages.get("undocumentedMethod").getDoc());
+
+      final String pattern1 = "Found documentation comment at line %d, column %d. Ignoring previous one at line %d, column %d: \"%s\""
+          + "\nDid you mean to use a multiline comment ( /* ... */ ) instead?";
+      final String pattern2 = "Ignoring out-of-place documentation comment at line %d, column %d: \"%s\""
           + "\nDid you mean to use a multiline comment ( /* ... */ ) instead?";
-      assertEquals(String.format(pattern, 21, 47, 21, 10, "Dangling Enum1"), warnings.get(0));
-      assertEquals(String.format(pattern, 22, 9, 21, 47, "Dangling Enum2"), warnings.get(1));
-      assertEquals(String.format(pattern, 23, 9, 22, 9, "Dangling Enum3"), warnings.get(2));
-      assertEquals(String.format(pattern, 24, 9, 23, 9, "Dangling Enum4"), warnings.get(3));
-      assertEquals(String.format(pattern, 25, 5, 24, 9, "Dangling Enum5"), warnings.get(4));
-      assertEquals(String.format(pattern, 26, 5, 25, 5, "Dangling Enum6"), warnings.get(5));
-      assertEquals(String.format(pattern, 27, 5, 26, 5, "Dangling Enum7"), warnings.get(6));
-      assertEquals(String.format(pattern, 28, 5, 27, 5, "Dangling Enum8"), warnings.get(7));
-      assertEquals(String.format(pattern, 34, 5, 33, 5, "Dangling Fixed1"), warnings.get(8));
-      assertEquals(String.format(pattern, 35, 5, 34, 5, "Dangling Fixed2"), warnings.get(9));
-      assertEquals(String.format(pattern, 36, 5, 35, 5, "Dangling Fixed3"), warnings.get(10));
-      assertEquals(String.format(pattern, 37, 5, 36, 5, "Dangling Fixed4"), warnings.get(11));
-      assertEquals(String.format(pattern, 38, 5, 37, 5, "Dangling Fixed5"), warnings.get(12));
-      assertEquals(String.format(pattern, 43, 5, 42, 5, "Dangling Error1"), warnings.get(13));
-      assertEquals(String.format(pattern, 45, 5, 44, 5, "Dangling Field1"), warnings.get(14));
-      assertEquals(String.format(pattern, 46, 5, 45, 5, "Dangling Field2"), warnings.get(15));
-      assertEquals(String.format(pattern, 47, 5, 46, 5, "Dangling Field3"), warnings.get(16));
-      assertEquals(String.format(pattern, 57, 5, 56, 5, "Dangling Param1"), warnings.get(17));
-      assertEquals(String.format(pattern, 58, 9, 57, 5, "Dangling Param2"), warnings.get(18));
-      assertEquals(String.format(pattern, 59, 9, 58, 9, "Dangling Param3"), warnings.get(19));
-      assertEquals(String.format(pattern, 60, 5, 59, 9, "Dangling Param4"), warnings.get(20));
-      assertEquals(String.format(pattern, 62, 5, 61, 5, "Dangling Method1"), warnings.get(21));
-      assertEquals(String.format(pattern, 63, 5, 62, 5, "Dangling Method2"), warnings.get(22));
+      assertEquals(Arrays.asList(String.format(pattern1, 21, 47, 21, 10, "Dangling Enum1"),
+          String.format(pattern2, 21, 47, "Dangling Enum2"), String.format(pattern1, 23, 9, 22, 9, "Dangling Enum3"),
+          String.format(pattern1, 24, 9, 23, 9, "Dangling Enum4"),
+          String.format(pattern1, 25, 5, 24, 9, "Dangling Enum5"), String.format(pattern2, 25, 5, "Dangling Enum6"),
+          String.format(pattern1, 27, 5, 26, 5, "Dangling Enum7"),
+          String.format(pattern1, 28, 5, 27, 5, "Dangling Enum8"), String.format(pattern2, 28, 5, "Dangling Enum9"),
+          String.format(pattern1, 34, 5, 33, 5, "Dangling Fixed1"),
+          String.format(pattern1, 35, 5, 34, 5, "Dangling Fixed2"),
+          String.format(pattern1, 36, 5, 35, 5, "Dangling Fixed3"),
+          String.format(pattern1, 37, 5, 36, 5, "Dangling Fixed4"), String.format(pattern2, 37, 5, "Dangling Fixed5"),
+          String.format(pattern1, 43, 5, 42, 5, "Dangling Error1"), String.format(pattern2, 43, 5, "Dangling Field1"),
+          String.format(pattern2, 46, 5, "Dangling Field2"), String.format(pattern2, 47, 5, "Dangling Error2"),
+          String.format(pattern1, 55, 5, 54, 5, "Dangling Param1"), String.format(pattern2, 55, 5, "Dangling Param2"),
+          String.format(pattern2, 58, 5, "Dangling Param3"), String.format(pattern1, 60, 5, 59, 5, "Dangling Method1"),
+          String.format(pattern1, 61, 5, 60, 5, "Dangling Method2"),
+          String.format(pattern2, 61, 5, "Dangling Method3")), warnings);
     }
   }
 

[avro] 01/02: AVRO-3285: Upgrade JavaCC plugin and library (#1447)

Posted by rs...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

rskraba pushed a commit to branch branch-1.11
in repository https://gitbox.apache.org/repos/asf/avro.git

commit 34e75a3995feb26721ecb90e89023392e27d9f8b
Author: Oscar Westra van Holthe - Kind <op...@users.noreply.github.com>
AuthorDate: Thu Jan 13 14:39:43 2022 +0100

    AVRO-3285: Upgrade JavaCC plugin and library (#1447)
    
    Also resolve IDE warnings for idl.jj, which was previously impossible.
---
 lang/java/compiler/pom.xml                         | 30 ++++----
 .../javacc/org/apache/avro/compiler/idl/idl.jj     | 83 ++++++++--------------
 lang/java/pom.xml                                  | 31 ++++----
 3 files changed, 63 insertions(+), 81 deletions(-)

diff --git a/lang/java/compiler/pom.xml b/lang/java/compiler/pom.xml
index e5f4756..380b526 100644
--- a/lang/java/compiler/pom.xml
+++ b/lang/java/compiler/pom.xml
@@ -1,11 +1,11 @@
 <?xml version="1.0" encoding="UTF-8"?>
 <!--
    Licensed to the Apache Software Foundation (ASF) under one or more
-   contributor license agreements.  See the NOTICE file distributed with
+   contributor license agreements. See the NOTICE file distributed with
    this work for additional information regarding copyright ownership.
    The ASF licenses this file to You under the Apache License, Version 2.0
    (the "License"); you may not use this file except in compliance with
-   the License.  You may obtain a copy of the License at
+   the License. You may obtain a copy of the License at:
 
        https://www.apache.org/licenses/LICENSE-2.0
 
@@ -24,7 +24,7 @@
     <artifactId>avro-parent</artifactId>
     <groupId>org.apache.avro</groupId>
     <version>1.11.1-SNAPSHOT</version>
-    <relativePath>../</relativePath>
+    <relativePath>../pom.xml</relativePath>
   </parent>
 
   <artifactId>avro-compiler</artifactId>
@@ -93,7 +93,7 @@
         and outputs to target/generated-sources/javacc See http://mojo.codehaus.org/javacc-maven-plugin/javacc-mojo.html
         for more info on using this plugin. -->
       <plugin>
-        <groupId>org.codehaus.mojo</groupId>
+        <groupId>org.javacc.plugin</groupId>
         <artifactId>javacc-maven-plugin</artifactId>
         <executions>
           <execution>
@@ -133,11 +133,11 @@
               <classpathScope>test</classpathScope>
               <arguments>
                 <argument>-classpath</argument>
-                <classpath></classpath>
+                <classpath/>
                 <argument>org.apache.avro.compiler.specific.SchemaTask</argument>
                 <argument>${project.basedir}/src/test/resources/full_record_v1.avsc</argument>
                 <argument>${project.basedir}/src/test/resources/full_record_v2.avsc</argument>
-                <argument>${project.basedir}/target/generated-test-sources</argument>
+                <argument>${project.basedir}/target/generated-test-sources/javacc</argument>
               </arguments>
             </configuration>
           </execution>
@@ -149,10 +149,8 @@
         <executions>
           <execution>
             <!--
-              Usually code is generated using a special-purpose maven plugin and the plugin
-              automatically adds the generated sources into project.
-              Here since general-purpose exec plugin is used for generating code, we need to manually
-              add the sources.
+              Special-purpose maven plugins that generate code usually add the generated sources into the project.
+              But because the general-purpose exec plugin above generated code, we need to manually add the generated sources.
             -->
             <id>add-test-source</id>
             <phase>generate-test-sources</phase>
@@ -161,16 +159,14 @@
             </goals>
             <configuration>
               <sources>
-                <source>${project.basedir}/target/generated-test-sources</source>
+                <source>${project.basedir}/target/generated-test-sources/javacc</source>
               </sources>
             </configuration>
           </execution>
           <execution>
             <!--
-              Usually code is generated using a special-purpose maven plugin and the plugin
-              automatically adds the generated sources into project.
-              Here since general-purpose exec plugin is used for generating code, we need to manually
-              add the sources.
+              Special-purpose maven plugins that generate code usually add the generated sources into the project.
+              But because the general-purpose exec plugin above generated code, we need to manually add the generated sources.
             -->
             <id>add-source</id>
             <phase>generate-sources</phase>
@@ -225,8 +221,8 @@
     </dependency>
     <dependency>
       <groupId>org.apache.commons</groupId>
-      <artifactId>commons-lang3</artifactId>
-      <version>${commons-lang.version}</version>
+      <artifactId>commons-text</artifactId>
+      <version>${commons-text.version}</version>
     </dependency>
     <dependency>
       <groupId>org.apache.velocity</groupId>
diff --git a/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj b/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
index 5fe7871..ee467ff 100644
--- a/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
+++ b/lang/java/compiler/src/main/javacc/org/apache/avro/compiler/idl/idl.jj
@@ -81,7 +81,7 @@ import org.apache.avro.util.internal.Accessor;
 import com.fasterxml.jackson.databind.JsonNode;
 import com.fasterxml.jackson.databind.node.*;
 
-import org.apache.commons.lang3.StringEscapeUtils;
+import org.apache.commons.text.StringEscapeUtils;
 
 /**
  * Grammar to parse a higher-level language into an Avro Schema.
@@ -96,7 +96,7 @@ public class Idl implements Closeable {
   URI inputDir;
   ClassLoader resourceLoader = null;
   String namespace;
-  Map<String,Schema> names = new LinkedHashMap<String,Schema>();
+  Map<String,Schema> names = new LinkedHashMap<>();
 
   private List<String> parserWarnings = Collections.emptyList();
   /**
@@ -137,6 +137,7 @@ public class Idl implements Closeable {
     this.resourceLoader = parent.resourceLoader;
   }
 
+  @SuppressWarnings("RedundantThrows")
   public void close() throws IOException {
     jj_input_stream.inputStream.close();
   }
@@ -159,7 +160,7 @@ public class Idl implements Closeable {
     JsonNode value = props.get(key);
     if (!value.isArray())
       throw error(key+" property must be array: "+value, token);
-    List<String> values = new ArrayList<String>();
+    List<String> values = new ArrayList<>();
     for (JsonNode n : value)
       if (n.isTextual())
         values.add(n.textValue());
@@ -209,7 +210,6 @@ public class Idl implements Closeable {
     }
     return schema;
   }
-
 }
 
 PARSER_END(Idl)
@@ -248,13 +248,13 @@ MORE :
 <DOC_COMMENT>
 SPECIAL_TOKEN :
 {
-  <"*/" > {DocCommentHelper.setDoc(matchedToken);} : DEFAULT
+  "*/" {DocCommentHelper.setDoc(matchedToken);} : DEFAULT
 }
 
 <MULTI_LINE_COMMENT>
 SKIP :
 {
-  <"*/" > : DEFAULT
+  "*/" : DEFAULT
 }
 
 /* RESERVED WORDS AND LITERALS */
@@ -1053,7 +1053,7 @@ Protocol CompilationUnit():
 }
 {
   p = ProtocolDeclaration()
-  ( < "\u001a" > )?
+  ( "\u001a" )?
   ( <STUFF_TO_IGNORE: ~[]> )?
   <EOF>
   {
@@ -1102,7 +1102,7 @@ private Schema NamedSchemaDeclaration(Map<String, JsonNode> props):
 Schema UnionDefinition():
 {
   Schema s;
-  List<Schema> schemata = new ArrayList<Schema>();
+  List<Schema> schemata = new ArrayList<>();
 }
 {
   // Don't disallow unions here: its constructor disallows nested unions and throws a descriptive exception.
@@ -1127,7 +1127,7 @@ Protocol ProtocolDeclaration():
 {
   String name;
   Protocol p;
-  Map<String, JsonNode> props = new LinkedHashMap<String, JsonNode>();
+  Map<String, JsonNode> props = new LinkedHashMap<>();
 }
 {
   ( SchemaProperty(props) )*
@@ -1164,8 +1164,7 @@ Schema EnumDeclaration():
   symbols = EnumBody()
       [ <EQUALS> defaultSymbol=Identifier() <SEMICOLON>]
   {
-    Schema s = Schema.createEnum(name, doc, this.namespace, symbols,
-                                 defaultSymbol);
+    Schema s = Schema.createEnum(name, doc, namespace, symbols, defaultSymbol);
     names.put(s.getFullName(), s);
     return s;
   }
@@ -1173,7 +1172,7 @@ Schema EnumDeclaration():
 
 List<String> EnumBody():
 {
-  List<String> symbols = new ArrayList<String>();
+  List<String> symbols = new ArrayList<>();
 }
 {
   "{"
@@ -1197,7 +1196,7 @@ void ProtocolBody(Protocol p):
   Schema schema;
   Message message;
   Protocol importProtocol;
-  Map<String, JsonNode> props = new LinkedHashMap<String, JsonNode>();
+  Map<String, JsonNode> props = new LinkedHashMap<>();
 }
 {
   "{"
@@ -1234,13 +1233,8 @@ Protocol ImportIdl() : {
 {
   <IDL> importFile = JsonString() ";"
     {
-      try {
-        Idl idl = new Idl(findFile(importFile), this);
-        try {
-          return idl.CompilationUnit();
-        } finally {
-          idl.close();
-        }
+      try (Idl idl=new Idl(findFile(importFile), this)){
+        return idl.CompilationUnit();
       } catch (IOException e) {
         throw error("Error importing "+importFile+": "+e, token);
       }
@@ -1253,14 +1247,8 @@ Protocol ImportProtocol() : {
 {
   <PROTOCOL> importFile = JsonString() ";"
     {
-
-      try {
-        InputStream stream = findFile(importFile).openStream();
-        try {
-          return Protocol.parse(stream);
-        } finally {
-          stream.close();
-        }
+      try (InputStream stream=findFile(importFile).openStream()) {
+        return Protocol.parse(stream);
       } catch (IOException e) {
         throw error("Error importing "+importFile+": "+e, token);
       }
@@ -1273,17 +1261,12 @@ Schema ImportSchema() : {
 {
   <SCHEMA> importFile = JsonString() ";"
     {
-      try {
+      try (InputStream stream=findFile(importFile).openStream()){
         Parser parser = new Schema.Parser();
         parser.addTypes(names);                   // inherit names
-        InputStream stream = findFile(importFile).openStream();
-        try {
-          Schema value = parser.parse(stream);
-          names = parser.getTypes();                // update names
-          return value;
-        } finally {
-          stream.close();
-        }
+        Schema value = parser.parse(stream);
+        names = parser.getTypes();                // update names
+        return value;
       } catch (IOException e) {
         throw error("Error importing "+importFile+": "+e, token);
       }
@@ -1309,7 +1292,7 @@ Schema FixedDeclaration():
 Schema RecordDeclaration():
 {
   String name;
-  List<Field> fields = new ArrayList<Field>();
+  List<Field> fields = new ArrayList<>();
   boolean isError;
 }
 {
@@ -1361,7 +1344,7 @@ void VariableDeclarator(Schema type, List<Field> fields):
 {
   String name;
   JsonNode defaultValue = null;
-  Map<String, JsonNode> props = new LinkedHashMap<String, JsonNode>();
+  Map<String, JsonNode> props = new LinkedHashMap<>();
 }
 {
     ( SchemaProperty(props) )*
@@ -1408,7 +1391,7 @@ private Message MessageDeclaration(Protocol p, Map<String, JsonNode> props):
   Schema request;
   Schema response;
   boolean oneWay = false;
-  List<Schema> errorSchemata = new ArrayList<Schema>();
+  List<Schema> errorSchemata = new ArrayList<>();
   errorSchemata.add(Protocol.SYSTEM_ERROR);
 }
 {
@@ -1440,14 +1423,12 @@ void ErrorList(List<Schema> errors):
 
 Schema FormalParameters():
 {
-  List<Field> fields = new ArrayList<Field>();
+  List<Field> fields = new ArrayList<>();
 }
 {
-  (
-    "(" [ FormalParameter(fields) ( "," FormalParameter(fields) )* ] ")"
-  )
+  "(" [ FormalParameter(fields) ( "," FormalParameter(fields) )* ] ")"
   {
-    return Schema.createRecord(fields);
+    return Schema.createRecord(null, null, null, false, fields);
   }
 }
 
@@ -1463,7 +1444,7 @@ void FormalParameter(List<Field> fields):
 Schema Type():
 {
   Schema s;
-  Map<String, JsonNode> props = new LinkedHashMap<String, JsonNode>();
+  Map<String, JsonNode> props = new LinkedHashMap<>();
 }
 {
     ( SchemaProperty(props) )*
@@ -1557,10 +1538,8 @@ Schema ReferenceType():
   StringBuilder sb = new StringBuilder();
 }
 {
-  (
-    part = Identifier() { sb.append(part); }
-    ("." tok = AnyIdentifier() { sb.append(".").append(tok.image); })*
-    )
+  part = Identifier() { sb.append(part); }
+  ("." tok = AnyIdentifier() { sb.append(".").append(tok.image); })*
   {
     String name = sb.toString();
     if ((name.indexOf('.') == -1) && namespace != null)
@@ -1575,7 +1554,7 @@ Schema ReferenceType():
 }
 
 Schema PrimitiveType():
-{}
+{ Schema s; }
 {
   "boolean" { return Schema.create(Type.BOOLEAN); }
 | "bytes" { return Schema.create(Type.BYTES); }
@@ -1589,7 +1568,7 @@ Schema PrimitiveType():
 | "time_ms" { return LogicalTypes.timeMillis().addToSchema(Schema.create(Type.INT)); }
 | "timestamp_ms" { return LogicalTypes.timestampMillis().addToSchema(Schema.create(Type.LONG)); }
 | "local_timestamp_ms" { return LogicalTypes.localTimestampMillis().addToSchema(Schema.create(Type.LONG)); }
-| "decimal" { return DecimalTypeProperties(); }
+| "decimal" s = DecimalTypeProperties() { return s; }
 | "uuid" {return LogicalTypes.uuid().addToSchema(Schema.create(Type.STRING));}
 }
 
diff --git a/lang/java/pom.xml b/lang/java/pom.xml
index 14b7f11..7223740 100644
--- a/lang/java/pom.xml
+++ b/lang/java/pom.xml
@@ -23,7 +23,7 @@
     <groupId>org.apache.avro</groupId>
     <artifactId>avro-toplevel</artifactId>
     <version>1.11.1-SNAPSHOT</version>
-    <relativePath>../../</relativePath>
+    <relativePath>../../pom.xml</relativePath>
   </parent>
 
   <artifactId>avro-parent</artifactId>
@@ -44,28 +44,28 @@
     <jopt-simple.version>5.0.4</jopt-simple.version>
     <junit.version>4.13.2</junit.version>
     <netty.version>4.1.72.Final</netty.version>
-    <protobuf.version>3.19.4</protobuf.version>
+    <protobuf.version>3.19.1</protobuf.version>
     <thrift.version>0.15.0</thrift.version>
-    <slf4j.version>1.7.33</slf4j.version>
-    <reload4j.version>1.2.18.0</reload4j.version>
+    <slf4j.version>1.7.32</slf4j.version>
     <snappy.version>1.1.8.4</snappy.version>
     <velocity.version>2.3</velocity.version>
-    <maven-core.version>3.3.9</maven-core.version>
+    <maven.version>3.8.4</maven.version>
     <ant.version>1.10.12</ant.version>
-    <commons-cli.version>1.5.0</commons-cli.version>
+    <commons-cli.version>1.4</commons-cli.version>
     <commons-compress.version>1.21</commons-compress.version>
-    <commons-lang.version>3.12.0</commons-lang.version>
+    <commons-text.version>1.9</commons-text.version>
     <tukaani.version>1.9</tukaani.version>
     <mockito.version>4.2.0</mockito.version>
     <hamcrest.version>2.2</hamcrest.version>
-    <grpc.version>1.44.0</grpc.version>
+    <grpc.version>1.43.1</grpc.version>
     <zstd-jni.version>1.5.1-1</zstd-jni.version>
     <!-- version properties for plugins -->
-    <archetype-plugin.version>3.2.1</archetype-plugin.version>
+    <archetype-plugin.version>3.2.0</archetype-plugin.version>
     <bundle-plugin-version>5.1.4</bundle-plugin-version>
     <exec-plugin.version>3.0.0</exec-plugin.version>
     <file-management.version>3.0.0</file-management.version>
-    <javacc-plugin.version>2.6</javacc-plugin.version>
+    <javacc-plugin.version>3.0.3</javacc-plugin.version>
+    <javacc.version>7.0.10</javacc.version>
   </properties>
 
   <modules>
@@ -96,7 +96,7 @@
         <plugin>
           <groupId>org.codehaus.mojo</groupId>
           <artifactId>build-helper-maven-plugin</artifactId>
-          <version>3.3.0</version>
+          <version>3.2.0</version>
         </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
@@ -211,9 +211,16 @@
           </configuration>
         </plugin>
         <plugin>
-          <groupId>org.codehaus.mojo</groupId>
+          <groupId>org.javacc.plugin</groupId>
           <artifactId>javacc-maven-plugin</artifactId>
           <version>${javacc-plugin.version}</version>
+          <dependencies>
+            <dependency>
+              <groupId>net.java.dev.javacc</groupId>
+              <artifactId>javacc</artifactId>
+              <version>${javacc.version}</version>
+            </dependency>
+          </dependencies>
         </plugin>
         <plugin>
           <groupId>org.codehaus.mojo</groupId>