You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by dw...@apache.org on 2020/09/29 08:24:29 UTC

[lucene-solr] branch master updated (8b329a0 -> 2b692cc)

This is an automated email from the ASF dual-hosted git repository.

dweiss pushed a change to branch master
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git.


    from 8b329a0  SOLR-14850: Correct the spelling in contributor's name.
     new 3ae0b50  LUCENE-9546: Configure Nori and Kuromoji generation lazily when java plugin is applied to the projects
     new 65a62b0  Remove unused imports.
     new 2b692cc  LUCENE-9547: Race condition in maven artifact configuration results in wrong group/ artifact name

The 3 revisions listed above as "new" are entirely new to this
repository and will be described in separate emails.  The revisions
listed as "add" were already present in the repository and have only
been added to this reference.


Summary of changes:
 gradle/generation/kuromoji.gradle  | 173 +++++++++++++++++++------------------
 gradle/generation/nori.gradle      |  77 +++++++++--------
 gradle/maven/defaults-maven.gradle |  41 +++++----
 lucene/luke/build.gradle           |   2 -
 4 files changed, 152 insertions(+), 141 deletions(-)


[lucene-solr] 02/03: Remove unused imports.

Posted by dw...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dweiss pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 65a62b04c5b231e733f7fab2f82873aa450ecfcc
Author: Dawid Weiss <da...@carrotsearch.com>
AuthorDate: Tue Sep 29 09:41:53 2020 +0200

    Remove unused imports.
---
 lucene/luke/build.gradle | 2 --
 1 file changed, 2 deletions(-)

diff --git a/lucene/luke/build.gradle b/lucene/luke/build.gradle
index 9b6f47b..719e11f 100644
--- a/lucene/luke/build.gradle
+++ b/lucene/luke/build.gradle
@@ -14,9 +14,7 @@
  * See the License for the specific language governing permissions and
  * limitations under the License.
  */
-import org.apache.tools.ant.taskdefs.condition.Os
 import org.apache.tools.ant.filters.*
-import java.nio.file.Files
 
 apply plugin: 'java-library'
 


[lucene-solr] 01/03: LUCENE-9546: Configure Nori and Kuromoji generation lazily when java plugin is applied to the projects

Posted by dw...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dweiss pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 3ae0b506463d76701659206f1636d99c439e982b
Author: Dawid Weiss <da...@carrotsearch.com>
AuthorDate: Tue Sep 29 09:41:36 2020 +0200

    LUCENE-9546: Configure Nori and Kuromoji generation lazily when java plugin is applied to the projects
---
 gradle/generation/kuromoji.gradle | 173 +++++++++++++++++++-------------------
 gradle/generation/nori.gradle     |  77 ++++++++---------
 2 files changed, 126 insertions(+), 124 deletions(-)

diff --git a/gradle/generation/kuromoji.gradle b/gradle/generation/kuromoji.gradle
index c865a13..8dc082b 100644
--- a/gradle/generation/kuromoji.gradle
+++ b/gradle/generation/kuromoji.gradle
@@ -30,102 +30,103 @@ def recompileDictionary(project, dictionaryName, Closure closure) {
 }
 
 configure(project(":lucene:analysis:kuromoji")) {
-  apply plugin: 'java-library'
   apply plugin: "de.undercouch.download"
 
-  ext {
-    targetDir = file("src/resources")
-  }
-
-  task deleteDictionaryData() {
-    // There should really be just one but since we don't know which
-    // one it'll be, let's process all of them.
-    doFirst {
-      sourceSets.main.resources.srcDirs.each { location ->
-        delete fileTree(dir: location, include: "org/apache/lucene/analysis/ja/dict/*.dat")
-      }
+  plugins.withType(JavaPlugin) {
+    ext {
+      targetDir = file("src/resources")
     }
-  }
-
-  task compileMecab(type: Download) {
-    description "Recompile dictionaries from Mecab data."
-    group "generation"
-
-    dependsOn deleteDictionaryData
-    dependsOn sourceSets.main.runtimeClasspath
-
-    def dictionaryName = "mecab-ipadic-2.7.0-20070801"
-    def dictionarySource = "https://jaist.dl.sourceforge.net/project/mecab/mecab-ipadic/2.7.0-20070801/${dictionaryName}.tar.gz"
-    def dictionaryFile = file("${buildDir}/generate/${dictionaryName}.tar.gz")
-    def unpackedDir = file("${buildDir}/generate/${dictionaryName}")
-
-    src dictionarySource
-    dest dictionaryFile
-    onlyIfModified true
 
-    doLast {
-      // Unpack the downloaded archive.
-      delete unpackedDir
-      ant.untar(src: dictionaryFile, dest: unpackedDir, compression: "gzip") {
-        ant.cutdirsmapper(dirs: "1")
+    task deleteDictionaryData() {
+      // There should really be just one but since we don't know which
+      // one it'll be, let's process all of them.
+      doFirst {
+        sourceSets.main.resources.srcDirs.each { location ->
+          delete fileTree(dir: location, include: "org/apache/lucene/analysis/ja/dict/*.dat")
+        }
       }
+    }
 
-      // Apply patch via local git.
-      project.exec {
-        workingDir = unpackedDir
-        executable "git" // TODO: better use jgit to apply patch, this is not portable!!!
-        args += [
-            "apply",
-            file("src/tools/patches/Noun.proper.csv.patch").absolutePath
-        ]
+    task compileMecab(type: Download) {
+      description "Recompile dictionaries from Mecab data."
+      group "generation"
+
+      dependsOn deleteDictionaryData
+      dependsOn sourceSets.main.runtimeClasspath
+
+      def dictionaryName = "mecab-ipadic-2.7.0-20070801"
+      def dictionarySource = "https://jaist.dl.sourceforge.net/project/mecab/mecab-ipadic/2.7.0-20070801/${dictionaryName}.tar.gz"
+      def dictionaryFile = file("${buildDir}/generate/${dictionaryName}.tar.gz")
+      def unpackedDir = file("${buildDir}/generate/${dictionaryName}")
+
+      src dictionarySource
+      dest dictionaryFile
+      onlyIfModified true
+
+      doLast {
+        // Unpack the downloaded archive.
+        delete unpackedDir
+        ant.untar(src: dictionaryFile, dest: unpackedDir, compression: "gzip") {
+          ant.cutdirsmapper(dirs: "1")
+        }
+
+        // Apply patch via local git.
+        project.exec {
+          workingDir = unpackedDir
+          executable "git" // TODO: better use jgit to apply patch, this is not portable!!!
+          args += [
+              "apply",
+              file("src/tools/patches/Noun.proper.csv.patch").absolutePath
+          ]
+        }
+
+        // Compile the dictionary
+        recompileDictionary(project, dictionaryName, {
+          args += [
+              "ipadic",
+              unpackedDir,
+              targetDir,
+              "euc-jp",
+              false
+          ]
+        })
       }
-
-      // Compile the dictionary
-      recompileDictionary(project, dictionaryName, {
-        args += [
-            "ipadic",
-            unpackedDir,
-            targetDir,
-            "euc-jp",
-            false
-        ]
-      })
     }
-  }
-
-  task compileNaist(type: Download) {
-    description "Recompile dictionaries from Naist data."
-    group "generation"
 
-    dependsOn deleteDictionaryData
-    dependsOn sourceSets.main.runtimeClasspath
-
-    def dictionaryName = "mecab-naist-jdic-0.6.3b-20111013"
-    def dictionarySource = "https://rwthaachen.dl.osdn.jp/naist-jdic/53500/${dictionaryName}.tar.gz"
-    def dictionaryFile = file("${buildDir}/generate/${dictionaryName}.tar.gz")
-    def unpackedDir = file("${buildDir}/generate/${dictionaryName}")
-
-    src dictionarySource
-    dest dictionaryFile
-    onlyIfModified true
-
-    doLast {
-      // Unpack the downloaded archive.
-      delete unpackedDir
-      ant.untar(src: dictionaryFile, dest: unpackedDir, compression: "gzip") {
-        ant.cutdirsmapper(dirs: "1")
+    task compileNaist(type: Download) {
+      description "Recompile dictionaries from Naist data."
+      group "generation"
+
+      dependsOn deleteDictionaryData
+      dependsOn sourceSets.main.runtimeClasspath
+
+      def dictionaryName = "mecab-naist-jdic-0.6.3b-20111013"
+      def dictionarySource = "https://rwthaachen.dl.osdn.jp/naist-jdic/53500/${dictionaryName}.tar.gz"
+      def dictionaryFile = file("${buildDir}/generate/${dictionaryName}.tar.gz")
+      def unpackedDir = file("${buildDir}/generate/${dictionaryName}")
+
+      src dictionarySource
+      dest dictionaryFile
+      onlyIfModified true
+
+      doLast {
+        // Unpack the downloaded archive.
+        delete unpackedDir
+        ant.untar(src: dictionaryFile, dest: unpackedDir, compression: "gzip") {
+          ant.cutdirsmapper(dirs: "1")
+        }
+
+        // Compile the dictionary
+        recompileDictionary(project, dictionaryName, {
+          args += [
+              "ipadic",
+              unpackedDir,
+              targetDir,
+              "euc-jp",
+              false
+          ]
+        })
       }
-
-      // Compile the dictionary
-      recompileDictionary(project, dictionaryName, {
-        args += [
-            "ipadic",
-            unpackedDir,
-            targetDir,
-            "euc-jp",
-            false
-        ]
-      })
     }
   }
 }
diff --git a/gradle/generation/nori.gradle b/gradle/generation/nori.gradle
index eb6afa1..78da8cc 100644
--- a/gradle/generation/nori.gradle
+++ b/gradle/generation/nori.gradle
@@ -30,55 +30,56 @@ def recompileDictionary(project, dictionaryName, Closure closure) {
 }
 
 configure(project(":lucene:analysis:nori")) {
-  apply plugin: 'java-library'
   apply plugin: "de.undercouch.download"
 
-  ext {
-    targetDir = file("src/resources")
-  }
+  plugins.withType(JavaPlugin) {
+    ext {
+      targetDir = file("src/resources")
+    }
 
-  task deleteDictionaryData() {
-    // There should really be just one but since we don't know which
-    // one it'll be, let's process all of them.
-    doFirst {
-      sourceSets.main.resources.srcDirs.each { location ->
-        delete fileTree(dir: location, include: "org/apache/lucene/analysis/ko/dict/*.dat")
+    task deleteDictionaryData() {
+      // There should really be just one but since we don't know which
+      // one it'll be, let's process all of them.
+      doFirst {
+        sourceSets.main.resources.srcDirs.each { location ->
+          delete fileTree(dir: location, include: "org/apache/lucene/analysis/ko/dict/*.dat")
+        }
       }
     }
-  }
 
-  task compileMecabKo(type: Download) {
-    description "Recompile dictionaries from Mecab-Ko data."
-    group "generation"
+    task compileMecabKo(type: Download) {
+      description "Recompile dictionaries from Mecab-Ko data."
+      group "generation"
 
-    dependsOn deleteDictionaryData
-    dependsOn sourceSets.main.runtimeClasspath
+      dependsOn deleteDictionaryData
+      dependsOn sourceSets.main.runtimeClasspath
 
-    def dictionaryName = "mecab-ko-dic-2.0.3-20170922"
-    def dictionarySource = "https://bitbucket.org/eunjeon/mecab-ko-dic/downloads/${dictionaryName}.tar.gz"
-    def dictionaryFile = file("${buildDir}/generate/${dictionaryName}.tar.gz")
-    def unpackedDir = file("${buildDir}/generate/${dictionaryName}")
+      def dictionaryName = "mecab-ko-dic-2.0.3-20170922"
+      def dictionarySource = "https://bitbucket.org/eunjeon/mecab-ko-dic/downloads/${dictionaryName}.tar.gz"
+      def dictionaryFile = file("${buildDir}/generate/${dictionaryName}.tar.gz")
+      def unpackedDir = file("${buildDir}/generate/${dictionaryName}")
 
-    src dictionarySource
-    dest dictionaryFile
-    onlyIfModified true
+      src dictionarySource
+      dest dictionaryFile
+      onlyIfModified true
 
-    doLast {
-      // Unpack the downloaded archive.
-      delete unpackedDir
-      ant.untar(src: dictionaryFile, dest: unpackedDir, compression: "gzip") {
-        ant.cutdirsmapper(dirs: "1")
-      }
+      doLast {
+        // Unpack the downloaded archive.
+        delete unpackedDir
+        ant.untar(src: dictionaryFile, dest: unpackedDir, compression: "gzip") {
+          ant.cutdirsmapper(dirs: "1")
+        }
 
-      // Compile the dictionary
-      recompileDictionary(project, dictionaryName, {
-        args += [
-            unpackedDir,
-            targetDir,
-            "utf-8",
-            false
-        ]
-      })
+        // Compile the dictionary
+        recompileDictionary(project, dictionaryName, {
+          args += [
+              unpackedDir,
+              targetDir,
+              "utf-8",
+              false
+          ]
+        })
+      }
     }
   }
 }


[lucene-solr] 03/03: LUCENE-9547: Race condition in maven artifact configuration results in wrong group/ artifact name

Posted by dw...@apache.org.
This is an automated email from the ASF dual-hosted git repository.

dweiss pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/lucene-solr.git

commit 2b692ccb714fe000bacceb4a5bcd21d5ae51930d
Author: Dawid Weiss <da...@carrotsearch.com>
AuthorDate: Tue Sep 29 09:42:56 2020 +0200

    LUCENE-9547: Race condition in maven artifact configuration results in wrong group/ artifact name
---
 gradle/maven/defaults-maven.gradle | 41 ++++++++++++++++++++++++--------------
 1 file changed, 26 insertions(+), 15 deletions(-)

diff --git a/gradle/maven/defaults-maven.gradle b/gradle/maven/defaults-maven.gradle
index 5c260f3..83e0051 100644
--- a/gradle/maven/defaults-maven.gradle
+++ b/gradle/maven/defaults-maven.gradle
@@ -69,26 +69,37 @@ configure(rootProject) {
         ":solr:test-framework",
     ]
   }
+}
 
-  configure(subprojects.findAll { it.path in rootProject.published }) {
-    apply plugin: 'maven-publish'
-    apply plugin: 'signing'
+configure(subprojects.findAll { it.path in rootProject.published }) { prj ->
+  apply plugin: 'maven-publish'
+  apply plugin: 'signing'
 
-    publishing {
-      // TODO: Add publishing repository details.
-    }
+  publishing {
+    // TODO: Add publishing repository details.
+  }
 
-    plugins.withType(JavaPlugin) {
-      task sourcesJar(type: Jar, dependsOn: classes) {
-        archiveClassifier = 'sources'
-        from sourceSets.main.allJava
-      }
+  plugins.withType(JavaPlugin) {
+    task sourcesJar(type: Jar, dependsOn: classes) {
+      archiveClassifier = 'sources'
+      from sourceSets.main.allJava
+    }
 
-      task javadocJar(type: Jar, dependsOn: javadoc) {
-        archiveClassifier = 'javadoc'
-        from javadoc.destinationDir
-      }
+    task javadocJar(type: Jar, dependsOn: javadoc) {
+      archiveClassifier = 'javadoc'
+      from javadoc.destinationDir
+    }
 
+    // This moves publishing configuration after all the scripts of all projects
+    // have been evaluated. This is required because we set artifact groups
+    // and archivesBaseName in other scripts (artifact-naming.gradle) and
+    // maven pom does not accept lazy property providers (so everything must
+    // be in its final form).
+    //
+    // In theory project.afterEvaluate closure should also work but for some reason
+    // it fired earlier than artifact-naming.gradle; don't know whether it's a bug
+    // in gradle or just complex relationships between lazy collection hooks.
+    gradle.projectsEvaluated {
       publishing {
         def configurePom = {
           name = "Apache Solr/Lucene (${project.name})"