You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by rm...@apache.org on 2014/10/22 07:44:21 UTC

svn commit: r1633538 [4/5] - in /lucene/dev/branches/lucene5969: ./ lucene/ lucene/analysis/ lucene/analysis/common/ lucene/analysis/icu/ lucene/analysis/kuromoji/ lucene/analysis/morfologik/ lucene/analysis/phonetic/ lucene/analysis/smartcn/ lucene/an...

Modified: lucene/dev/branches/lucene5969/solr/contrib/morphlines-core/ivy.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/contrib/morphlines-core/ivy.xml?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/contrib/morphlines-core/ivy.xml (original)
+++ lucene/dev/branches/lucene5969/solr/contrib/morphlines-core/ivy.xml Wed Oct 22 05:44:17 2014
@@ -18,103 +18,99 @@
 -->
 <ivy-module version="2.0" xmlns:maven="http://ant.apache.org/ivy/maven">
   <info organisation="org.apache.solr" module="morphlines-core" />
-  <configurations>
+  <configurations defaultconfmapping="compile->master;test->master;test.DfsMiniCluster->master">
     <!-- artifacts in the "compile" configuration will go into morphlines-core/lib/ -->
     <conf name="compile" transitive="false" />
-    <!-- artifacts in the "test" and "test.DfsMiniCluster" configuration will go into solr/core/test-lib/ -->
+    <!-- artifacts in the "test" and "test.DfsMiniCluster" configuration will go into morphlines-core/test-lib/ -->
     <conf name="test" transitive="false" />
-    <conf name="test.DfsMiniCluster" transitive="false"/>
+    <conf name="test.DfsMiniCluster" transitive="false" />
   </configurations>
 
   <dependencies>
-    <dependency org="org.kitesdk" name="kite-morphlines-core" rev="${/org.kitesdk/kite-morphlines-core}"  conf="compile->*;test->*" >
-      <artifact name="kite-morphlines-core"  ext="jar"/>
-      <artifact name="kite-morphlines-core" type="test" ext="jar" maven:classifier="tests"/>
+    <dependency org="org.kitesdk" name="kite-morphlines-core" rev="${/org.kitesdk/kite-morphlines-core}"  conf="compile;test">
+      <artifact name="kite-morphlines-core"  ext="jar" />
+      <artifact name="kite-morphlines-core" type="test" ext="jar" maven:classifier="tests" />
     </dependency>
 
-    <dependency org="org.kitesdk" name="kite-morphlines-avro" rev="${/org.kitesdk/kite-morphlines-avro}" conf="compile->*" />
+    <dependency org="org.kitesdk" name="kite-morphlines-avro" rev="${/org.kitesdk/kite-morphlines-avro}" conf="compile" />
     
-    <dependency org="com.codahale.metrics" name="metrics-core" rev="${/com.codahale.metrics/metrics-core}" conf="compile->*" />
-    <dependency org="com.codahale.metrics" name="metrics-healthchecks" rev="${/com.codahale.metrics/metrics-healthchecks}" conf="compile->*" />
-    <dependency org="com.typesafe" name="config" rev="${/com.typesafe/config}" conf="compile->*" />
+    <dependency org="com.codahale.metrics" name="metrics-core" rev="${/com.codahale.metrics/metrics-core}" conf="compile" />
+    <dependency org="com.codahale.metrics" name="metrics-healthchecks" rev="${/com.codahale.metrics/metrics-healthchecks}" conf="compile" />
+    <dependency org="com.typesafe" name="config" rev="${/com.typesafe/config}" conf="compile" />
     
     <!-- Test Dependencies -->
 
-    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-core" rev="${/org.apache.hadoop/hadoop-mapreduce-client-core}" conf="test->*" />
-
-    <dependency org="org.apache.hadoop" name="hadoop-yarn-common" rev="${/org.apache.hadoop/hadoop-yarn-common}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-yarn-api" rev="${/org.apache.hadoop/hadoop-yarn-api}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-yarn-client" rev="${/org.apache.hadoop/hadoop-yarn-client}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests" rev="${/org.apache.hadoop/hadoop-yarn-server-tests}" conf="test->*">
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-core" rev="${/org.apache.hadoop/hadoop-mapreduce-client-core}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-common" rev="${/org.apache.hadoop/hadoop-yarn-common}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-api" rev="${/org.apache.hadoop/hadoop-yarn-api}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-client" rev="${/org.apache.hadoop/hadoop-yarn-client}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-tests" rev="${/org.apache.hadoop/hadoop-yarn-server-tests}" conf="test">
       <artifact name="hadoop-yarn-server-tests" type="test" ext="jar" maven:classifier="tests" />
     </dependency>
-    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-common" rev="${/org.apache.hadoop/hadoop-yarn-server-common}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-nodemanager" rev="${/org.apache.hadoop/hadoop-yarn-server-nodemanager}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app" rev="${/org.apache.hadoop/hadoop-mapreduce-client-app}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-common" rev="${/org.apache.hadoop/hadoop-mapreduce-client-common}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs" rev="${/org.apache.hadoop/hadoop-mapreduce-client-hs}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-resourcemanager" rev="${/org.apache.hadoop/hadoop-yarn-server-resourcemanager}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-shuffle" rev="${/org.apache.hadoop/hadoop-mapreduce-client-shuffle}" conf="test->*" />
-    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-web-proxy" rev="${/org.apache.hadoop/hadoop-yarn-server-web-proxy}" conf="test->*" />
-
-    <dependency org="aopalliance" name="aopalliance" rev="${/aopalliance/aopalliance}" conf="test->*" />
-    <dependency org="com.sun.xml.bind" name="jaxb-impl" rev="${/com.sun.xml.bind/jaxb-impl}" conf="test->*" />
-    <dependency org="org.objenesis" name="objenesis" rev="${/org.objenesis/objenesis}" conf="test->*"/>
-    <dependency org="io.netty" name="netty" rev="${/io.netty/netty}" conf="test->*" />
-    
-    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient" rev="${/org.apache.hadoop/hadoop-mapreduce-client-jobclient}" conf="test->*">
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-common" rev="${/org.apache.hadoop/hadoop-yarn-server-common}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-nodemanager" rev="${/org.apache.hadoop/hadoop-yarn-server-nodemanager}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-app" rev="${/org.apache.hadoop/hadoop-mapreduce-client-app}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-common" rev="${/org.apache.hadoop/hadoop-mapreduce-client-common}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-hs" rev="${/org.apache.hadoop/hadoop-mapreduce-client-hs}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-resourcemanager" rev="${/org.apache.hadoop/hadoop-yarn-server-resourcemanager}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-shuffle" rev="${/org.apache.hadoop/hadoop-mapreduce-client-shuffle}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-yarn-server-web-proxy" rev="${/org.apache.hadoop/hadoop-yarn-server-web-proxy}" conf="test" />
+    <dependency org="org.apache.hadoop" name="hadoop-mapreduce-client-jobclient" rev="${/org.apache.hadoop/hadoop-mapreduce-client-jobclient}" conf="test">
       <artifact name="hadoop-mapreduce-client-jobclient" type="jar" ext="jar" />
       <artifact name="hadoop-mapreduce-client-jobclient" type="test" ext="jar" maven:classifier="tests" />
     </dependency>
 
-    <dependency org="org.apache.mrunit" name="mrunit" rev="${/org.apache.mrunit/mrunit}" conf="test->*">
+    <dependency org="aopalliance" name="aopalliance" rev="${/aopalliance/aopalliance}" conf="test" />
+    <dependency org="com.sun.xml.bind" name="jaxb-impl" rev="${/com.sun.xml.bind/jaxb-impl}" conf="test" />
+    <dependency org="org.objenesis" name="objenesis" rev="${/org.objenesis/objenesis}" conf="test" />
+    <dependency org="io.netty" name="netty" rev="${/io.netty/netty}" conf="test" />
+    <dependency org="org.apache.mrunit" name="mrunit" rev="${/org.apache.mrunit/mrunit}" conf="test">
       <artifact name="mrunit" maven:classifier="hadoop2" />
       <exclude org="log4j" module="log4j" />
     </dependency>
-    
-    <dependency org="org.mockito" name="mockito-core" rev="${/org.mockito/mockito-core}" conf="test->*" />
+    <dependency org="org.mockito" name="mockito-core" rev="${/org.mockito/mockito-core}" conf="test" />
+    <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="test" />
+    <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test" />
+
 
     <!-- FasterXml Jackson Dependencies -->
-    <dependency org="com.fasterxml.jackson.core" name="jackson-core" rev="${/com.fasterxml.jackson.core/jackson-core}" conf="test->*" />
-    <dependency org="com.fasterxml.jackson.core" name="jackson-databind" rev="${/com.fasterxml.jackson.core/jackson-databind}" conf="test->*" />
-    <dependency org="com.fasterxml.jackson.core" name="jackson-annotations" rev="${/com.fasterxml.jackson.core/jackson-annotations}" conf="test->*" />
-
-    <!-- CodeHause Jackson Dependencies -->
-    <dependency org="org.codehaus.jackson" name="jackson-jaxrs" rev="${/org.codehaus.jackson/jackson-jaxrs}" conf="test->*" />
-    <dependency org="org.codehaus.jackson" name="jackson-mapper-asl" rev="${/org.codehaus.jackson/jackson-mapper-asl}" conf="test->*" />
-    <dependency org="org.codehaus.jackson" name="jackson-core-asl" rev="${/org.codehaus.jackson/jackson-core-asl}" conf="test->*" />
+    <dependency org="com.fasterxml.jackson.core" name="jackson-core" rev="${/com.fasterxml.jackson.core/jackson-core}" conf="test" />
+    <dependency org="com.fasterxml.jackson.core" name="jackson-databind" rev="${/com.fasterxml.jackson.core/jackson-databind}" conf="test" />
+    <dependency org="com.fasterxml.jackson.core" name="jackson-annotations" rev="${/com.fasterxml.jackson.core/jackson-annotations}" conf="test" />
+
+    <!-- CodeHaus Jackson Dependencies -->
+    <dependency org="org.codehaus.jackson" name="jackson-jaxrs" rev="${/org.codehaus.jackson/jackson-jaxrs}" conf="test" />
+    <dependency org="org.codehaus.jackson" name="jackson-mapper-asl" rev="${/org.codehaus.jackson/jackson-mapper-asl}" conf="test" />
+    <dependency org="org.codehaus.jackson" name="jackson-core-asl" rev="${/org.codehaus.jackson/jackson-core-asl}" conf="test" />
 
     <!-- Jersey Dependencies -->
-    <dependency org="com.sun.jersey.contribs" name="jersey-guice" rev="${/com.sun.jersey.contribs/jersey-guice}" conf="test->*" />
-    <dependency org="com.sun.jersey" name="jersey-core" rev="${/com.sun.jersey/jersey-core}" conf="test->*" />
-    <dependency org="com.sun.jersey" name="jersey-json" rev="${/com.sun.jersey/jersey-json}" conf="test->*" />
-    <dependency org="com.sun.jersey" name="jersey-server" rev="${/com.sun.jersey/jersey-server}" conf="test->*" />
-    <dependency org="com.sun.jersey" name="jersey-bundle" rev="${/com.sun.jersey/jersey-bundle}" conf="test->*" />
+    <dependency org="com.sun.jersey.contribs" name="jersey-guice" rev="${/com.sun.jersey.contribs/jersey-guice}" conf="test" />
+    <dependency org="com.sun.jersey" name="jersey-core" rev="${/com.sun.jersey/jersey-core}" conf="test" />
+    <dependency org="com.sun.jersey" name="jersey-json" rev="${/com.sun.jersey/jersey-json}" conf="test" />
+    <dependency org="com.sun.jersey" name="jersey-server" rev="${/com.sun.jersey/jersey-server}" conf="test" />
+    <dependency org="com.sun.jersey" name="jersey-bundle" rev="${/com.sun.jersey/jersey-bundle}" conf="test" />
 
     <!-- Guice Dependencies -->
-    <dependency org="com.google.inject" name="guice" rev="${/com.google.inject/guice}" conf="test->*" />
-    <dependency org="com.google.inject.extensions" name="guice-servlet" rev="${/com.google.inject.extensions/guice-servlet}" conf="test->*" />
-    <dependency org="javax.inject" name="javax.inject" rev="${/javax.inject/javax.inject}" conf="test->*" />
+    <dependency org="com.google.inject" name="guice" rev="${/com.google.inject/guice}" conf="test" />
+    <dependency org="com.google.inject.extensions" name="guice-servlet" rev="${/com.google.inject.extensions/guice-servlet}" conf="test" />
+    <dependency org="javax.inject" name="javax.inject" rev="${/javax.inject/javax.inject}" conf="test" />
 
     <!-- Avro Dependencies -->
-    <dependency org="org.apache.avro" name="avro" rev="${/org.apache.avro/avro}" conf="test->*" />
-    <dependency org="com.thoughtworks.paranamer" name="paranamer" rev="${/com.thoughtworks.paranamer/paranamer}"  conf="test->*"  />
-    <dependency org="org.xerial.snappy" name="snappy-java" rev="${/org.xerial.snappy/snappy-java}" conf="test->*" />
+    <dependency org="org.apache.avro" name="avro" rev="${/org.apache.avro/avro}" conf="test" />
+    <dependency org="com.thoughtworks.paranamer" name="paranamer" rev="${/com.thoughtworks.paranamer/paranamer}" conf="test" />
+    <dependency org="org.xerial.snappy" name="snappy-java" rev="${/org.xerial.snappy/snappy-java}" conf="test" />
 
     <!-- Hadoop DfsMiniCluster Dependencies -->
-    <dependency org="org.apache.hadoop" name="hadoop-common" rev="${/org.apache.hadoop/hadoop-common}" conf="test->*">
+    <dependency org="org.apache.hadoop" name="hadoop-common" rev="${/org.apache.hadoop/hadoop-common}" conf="test.DfsMiniCluster">
       <artifact name="hadoop-common" type="jar" ext="jar" />
       <artifact name="hadoop-common" type="test" ext="jar" maven:classifier="tests" />
     </dependency>
-    <dependency org="org.apache.hadoop" name="hadoop-hdfs" rev="${/org.apache.hadoop/hadoop-hdfs}" conf="test->*">
+    <dependency org="org.apache.hadoop" name="hadoop-hdfs" rev="${/org.apache.hadoop/hadoop-hdfs}" conf="test.DfsMiniCluster">
       <artifact name="hadoop-hdfs" type="test" ext="jar" maven:classifier="tests" />
     </dependency>
-    <dependency org="org.mortbay.jetty" name="jetty" rev="${/org.mortbay.jetty/jetty}" conf="test.DfsMiniCluster->*"/>
-    <dependency org="org.mortbay.jetty" name="jetty-util" rev="${/org.mortbay.jetty/jetty-util}" conf="test.DfsMiniCluster->*"/>
-    <dependency org="com.sun.jersey" name="jersey-core" rev="${/com.sun.jersey/jersey-core}" conf="test->*" />
-
-    <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="test->*" />
-    <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test->*"/>
+    <dependency org="org.mortbay.jetty" name="jetty" rev="${/org.mortbay.jetty/jetty}" conf="test.DfsMiniCluster" />
+    <dependency org="org.mortbay.jetty" name="jetty-util" rev="${/org.mortbay.jetty/jetty-util}" conf="test.DfsMiniCluster" />
+    <dependency org="com.sun.jersey" name="jersey-core" rev="${/com.sun.jersey/jersey-core}" conf="test.DfsMiniCluster" />
 
     <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}" />
 

Modified: lucene/dev/branches/lucene5969/solr/contrib/uima/ivy.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/contrib/uima/ivy.xml?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/contrib/uima/ivy.xml (original)
+++ lucene/dev/branches/lucene5969/solr/contrib/uima/ivy.xml Wed Oct 22 05:44:17 2014
@@ -18,19 +18,19 @@
 -->
 <ivy-module version="2.0">
   <info organisation="org.apache.solr" module="uima"/>
-  <configurations>
+  <configurations defaultconfmapping="compile->master;test->master">
     <conf name="compile" transitive="false"/>
     <conf name="test" transitive="false"/>
   </configurations>
   <dependencies>
-    <dependency org="commons-digester" name="commons-digester" rev="${/commons-digester/commons-digester}" conf="compile->*"/>
-    <dependency org="org.apache.uima" name="AlchemyAPIAnnotator" rev="${/org.apache.uima/AlchemyAPIAnnotator}" conf="compile->*"/>
-    <dependency org="org.apache.uima" name="OpenCalaisAnnotator" rev="${/org.apache.uima/OpenCalaisAnnotator}" conf="compile->*"/>
-    <dependency org="org.apache.uima" name="Tagger" rev="${/org.apache.uima/Tagger}" conf="compile->*"/>
-    <dependency org="org.apache.uima" name="WhitespaceTokenizer" rev="${/org.apache.uima/WhitespaceTokenizer}" conf="compile->*"/>
-    <dependency org="org.apache.uima" name="uimaj-core" rev="${/org.apache.uima/uimaj-core}" conf="compile->*"/>
+    <dependency org="commons-digester" name="commons-digester" rev="${/commons-digester/commons-digester}" conf="compile"/>
+    <dependency org="org.apache.uima" name="AlchemyAPIAnnotator" rev="${/org.apache.uima/AlchemyAPIAnnotator}" conf="compile"/>
+    <dependency org="org.apache.uima" name="OpenCalaisAnnotator" rev="${/org.apache.uima/OpenCalaisAnnotator}" conf="compile"/>
+    <dependency org="org.apache.uima" name="Tagger" rev="${/org.apache.uima/Tagger}" conf="compile"/>
+    <dependency org="org.apache.uima" name="WhitespaceTokenizer" rev="${/org.apache.uima/WhitespaceTokenizer}" conf="compile"/>
+    <dependency org="org.apache.uima" name="uimaj-core" rev="${/org.apache.uima/uimaj-core}" conf="compile"/>
 
-    <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test->*"/>
+    <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test"/>
 
     <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
   </dependencies>

Modified: lucene/dev/branches/lucene5969/solr/contrib/velocity/ivy.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/contrib/velocity/ivy.xml?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/contrib/velocity/ivy.xml (original)
+++ lucene/dev/branches/lucene5969/solr/contrib/velocity/ivy.xml Wed Oct 22 05:44:17 2014
@@ -18,17 +18,17 @@
 -->
 <ivy-module version="2.0">
   <info organisation="org.apache.solr" module="velocity"/>
-  <configurations>
+  <configurations defaultconfmapping="compile->master;test->master">
     <conf name="compile" transitive="false"/>
     <conf name="test" transitive="false"/>
   </configurations>
   <dependencies>
-    <dependency org="commons-beanutils" name="commons-beanutils" rev="${/commons-beanutils/commons-beanutils}" conf="compile->*"/>
-    <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="compile->*"/>
-    <dependency org="org.apache.velocity" name="velocity" rev="${/org.apache.velocity/velocity}" conf="compile->*"/>
-    <dependency org="org.apache.velocity" name="velocity-tools" rev="${/org.apache.velocity/velocity-tools}" conf="compile->*"/>
+    <dependency org="commons-beanutils" name="commons-beanutils" rev="${/commons-beanutils/commons-beanutils}" conf="compile"/>
+    <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="compile"/>
+    <dependency org="org.apache.velocity" name="velocity" rev="${/org.apache.velocity/velocity}" conf="compile"/>
+    <dependency org="org.apache.velocity" name="velocity-tools" rev="${/org.apache.velocity/velocity-tools}" conf="compile"/>
 
-    <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test->*"/>
+    <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test"/>
 
     <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/>
   </dependencies>

Modified: lucene/dev/branches/lucene5969/solr/core/ivy.xml
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/ivy.xml?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/ivy.xml (original)
+++ lucene/dev/branches/lucene5969/solr/core/ivy.xml Wed Oct 22 05:44:17 2014
@@ -19,7 +19,7 @@
 <ivy-module version="2.0"  xmlns:maven="http://ant.apache.org/ivy/maven">
   <info organisation="org.apache.solr" module="core"/>
   
-  <configurations>
+  <configurations defaultconfmapping="compile->master;compile.hadoop->master;test->master;test.DfsMiniCluster->master">
     <!-- artifacts in the "compile" and "compile.hadoop" configurations will go into solr/core/lib/ -->
     <conf name="compile" transitive="false"/>
     <conf name="compile.hadoop" transitive="false"/>
@@ -29,53 +29,53 @@
   </configurations>
 
   <dependencies>
-    <dependency org="commons-codec" name="commons-codec" rev="${/commons-codec/commons-codec}" conf="compile->*"/>
-    <dependency org="commons-fileupload" name="commons-fileupload" rev="${/commons-fileupload/commons-fileupload}" conf="compile->*"/>
-    <dependency org="commons-cli" name="commons-cli" rev="${/commons-cli/commons-cli}" conf="compile->*"/>
-    <dependency org="commons-lang" name="commons-lang" rev="${/commons-lang/commons-lang}" conf="compile->*"/>
-    <dependency org="com.google.guava" name="guava" rev="${/com.google.guava/guava}" conf="compile->*"/>
-    <dependency org="com.spatial4j" name="spatial4j" rev="${/com.spatial4j/spatial4j}" conf="compile->*"/>
-    <dependency org="org.antlr" name="antlr-runtime" rev="${/org.antlr/antlr-runtime}" conf="compile->*"/>
-    <dependency org="org.ow2.asm" name="asm" rev="${/org.ow2.asm/asm}" conf="compile->*"/>
-    <dependency org="org.ow2.asm" name="asm-commons" rev="${/org.ow2.asm/asm-commons}" conf="compile->*"/>
-    <dependency org="org.restlet.jee" name="org.restlet" rev="${/org.restlet.jee/org.restlet}" conf="compile->*"/>
-    <dependency org="org.restlet.jee" name="org.restlet.ext.servlet" rev="${/org.restlet.jee/org.restlet.ext.servlet}" conf="compile->*"/>
-    <dependency org="joda-time" name="joda-time" rev="${/joda-time/joda-time}" conf="compile->*"/>
-    <dependency org="dom4j" name="dom4j" rev="${/dom4j/dom4j}" conf="compile->*"/>
-    <dependency org="com.carrotsearch" name="hppc" rev="${/com.carrotsearch/hppc}" conf="compile->*"/>
-    <dependency org="log4j" name="log4j" rev="${/log4j/log4j}" conf="compile->*"/>
-    <dependency org="org.slf4j" name="slf4j-log4j12" rev="${/org.slf4j/slf4j-log4j12}" conf="compile->*"/>
+    <dependency org="commons-codec" name="commons-codec" rev="${/commons-codec/commons-codec}" conf="compile"/>
+    <dependency org="commons-fileupload" name="commons-fileupload" rev="${/commons-fileupload/commons-fileupload}" conf="compile"/>
+    <dependency org="commons-cli" name="commons-cli" rev="${/commons-cli/commons-cli}" conf="compile"/>
+    <dependency org="commons-lang" name="commons-lang" rev="${/commons-lang/commons-lang}" conf="compile"/>
+    <dependency org="com.google.guava" name="guava" rev="${/com.google.guava/guava}" conf="compile"/>
+    <dependency org="com.spatial4j" name="spatial4j" rev="${/com.spatial4j/spatial4j}" conf="compile"/>
+    <dependency org="org.antlr" name="antlr-runtime" rev="${/org.antlr/antlr-runtime}" conf="compile"/>
+    <dependency org="org.ow2.asm" name="asm" rev="${/org.ow2.asm/asm}" conf="compile"/>
+    <dependency org="org.ow2.asm" name="asm-commons" rev="${/org.ow2.asm/asm-commons}" conf="compile"/>
+    <dependency org="org.restlet.jee" name="org.restlet" rev="${/org.restlet.jee/org.restlet}" conf="compile"/>
+    <dependency org="org.restlet.jee" name="org.restlet.ext.servlet" rev="${/org.restlet.jee/org.restlet.ext.servlet}" conf="compile"/>
+    <dependency org="joda-time" name="joda-time" rev="${/joda-time/joda-time}" conf="compile"/>
+    <dependency org="dom4j" name="dom4j" rev="${/dom4j/dom4j}" conf="compile"/>
+    <dependency org="com.carrotsearch" name="hppc" rev="${/com.carrotsearch/hppc}" conf="compile"/>
+    <dependency org="log4j" name="log4j" rev="${/log4j/log4j}" conf="compile"/>
+    <dependency org="org.slf4j" name="slf4j-log4j12" rev="${/org.slf4j/slf4j-log4j12}" conf="compile"/>
 
-    <dependency org="javax.servlet" name="javax.servlet-api" rev="${/javax.servlet/javax.servlet-api}" conf="test->*"/>
-    <dependency org="org.easymock" name="easymock" rev="${/org.easymock/easymock}" conf="test->*"/>
-    <dependency org="cglib" name="cglib-nodep" rev="${/cglib/cglib-nodep}" conf="test->*"/>
-    <dependency org="org.objenesis" name="objenesis" rev="${/org.objenesis/objenesis}" conf="test->*"/>
-    <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test->*"/>
+    <dependency org="javax.servlet" name="javax.servlet-api" rev="${/javax.servlet/javax.servlet-api}" conf="test"/>
+    <dependency org="org.easymock" name="easymock" rev="${/org.easymock/easymock}" conf="test"/>
+    <dependency org="cglib" name="cglib-nodep" rev="${/cglib/cglib-nodep}" conf="test"/>
+    <dependency org="org.objenesis" name="objenesis" rev="${/org.objenesis/objenesis}" conf="test"/>
+    <dependency org="org.slf4j" name="jcl-over-slf4j" rev="${/org.slf4j/jcl-over-slf4j}" conf="test"/>
 
-    <dependency org="org.apache.hadoop" name="hadoop-common" rev="${/org.apache.hadoop/hadoop-common}" conf="compile.hadoop->*"/>
+    <dependency org="org.apache.hadoop" name="hadoop-common" rev="${/org.apache.hadoop/hadoop-common}" conf="compile.hadoop"/>
     <!--
       hadoop-hdfs, hadoop-annotations and hadoop-auth are runtime dependencies,
       so even though they are not compile-time dependencies, they are included
       here as such so that they are included in the runtime distribution.
      -->
-    <dependency org="org.apache.hadoop" name="hadoop-hdfs" rev="${/org.apache.hadoop/hadoop-hdfs}" conf="compile.hadoop->*"/>
-    <dependency org="org.apache.hadoop" name="hadoop-annotations" rev="${/org.apache.hadoop/hadoop-annotations}" conf="compile.hadoop->*"/>
-    <dependency org="org.apache.hadoop" name="hadoop-auth" rev="${/org.apache.hadoop/hadoop-auth}" conf="compile.hadoop->*"/>
-    <dependency org="commons-configuration" name="commons-configuration" rev="${/commons-configuration/commons-configuration}" conf="compile.hadoop->*"/>
-    <dependency org="com.google.protobuf" name="protobuf-java" rev="${/com.google.protobuf/protobuf-java}" conf="compile.hadoop->*"/>
-    <dependency org="com.googlecode.concurrentlinkedhashmap" name="concurrentlinkedhashmap-lru" rev="${/com.googlecode.concurrentlinkedhashmap/concurrentlinkedhashmap-lru}" conf="compile.hadoop->*"/>
+    <dependency org="org.apache.hadoop" name="hadoop-hdfs" rev="${/org.apache.hadoop/hadoop-hdfs}" conf="compile.hadoop"/>
+    <dependency org="org.apache.hadoop" name="hadoop-annotations" rev="${/org.apache.hadoop/hadoop-annotations}" conf="compile.hadoop"/>
+    <dependency org="org.apache.hadoop" name="hadoop-auth" rev="${/org.apache.hadoop/hadoop-auth}" conf="compile.hadoop"/>
+    <dependency org="commons-configuration" name="commons-configuration" rev="${/commons-configuration/commons-configuration}" conf="compile.hadoop"/>
+    <dependency org="com.google.protobuf" name="protobuf-java" rev="${/com.google.protobuf/protobuf-java}" conf="compile.hadoop"/>
+    <dependency org="com.googlecode.concurrentlinkedhashmap" name="concurrentlinkedhashmap-lru" rev="${/com.googlecode.concurrentlinkedhashmap/concurrentlinkedhashmap-lru}" conf="compile.hadoop"/>
 
     <!-- Hadoop DfsMiniCluster Dependencies-->
-    <dependency org="org.apache.hadoop" name="hadoop-common" rev="${/org.apache.hadoop/hadoop-common}" conf="test.DfsMiniCluster->*">
+    <dependency org="org.apache.hadoop" name="hadoop-common" rev="${/org.apache.hadoop/hadoop-common}" conf="test.DfsMiniCluster">
       <artifact name="hadoop-common" type="test" ext="jar" maven:classifier="tests" />
     </dependency>
-    <dependency org="org.apache.hadoop" name="hadoop-hdfs" rev="${/org.apache.hadoop/hadoop-hdfs}" conf="test.DfsMiniCluster->*">
+    <dependency org="org.apache.hadoop" name="hadoop-hdfs" rev="${/org.apache.hadoop/hadoop-hdfs}" conf="test.DfsMiniCluster">
       <artifact name="hadoop-hdfs" type="test" ext="jar" maven:classifier="tests" />
     </dependency>
-    <dependency org="org.mortbay.jetty" name="jetty" rev="${/org.mortbay.jetty/jetty}" conf="test.DfsMiniCluster->*"/>
-    <dependency org="org.mortbay.jetty" name="jetty-util" rev="${/org.mortbay.jetty/jetty-util}" conf="test.DfsMiniCluster->*"/>
-    <dependency org="com.sun.jersey" name="jersey-core" rev="${/com.sun.jersey/jersey-core}" conf="test.DfsMiniCluster->*"/>
-    <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="test.DfsMiniCluster->*"/>
+    <dependency org="org.mortbay.jetty" name="jetty" rev="${/org.mortbay.jetty/jetty}" conf="test.DfsMiniCluster"/>
+    <dependency org="org.mortbay.jetty" name="jetty-util" rev="${/org.mortbay.jetty/jetty-util}" conf="test.DfsMiniCluster"/>
+    <dependency org="com.sun.jersey" name="jersey-core" rev="${/com.sun.jersey/jersey-core}" conf="test.DfsMiniCluster"/>
+    <dependency org="commons-collections" name="commons-collections" rev="${/commons-collections/commons-collections}" conf="test.DfsMiniCluster"/>
 
     <exclude org="*" ext="*" matcher="regexp" type="${ivy.exclude.types}"/> 
   </dependencies>

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/ElectionContext.java Wed Oct 22 05:44:17 2014
@@ -212,7 +212,7 @@ final class ShardLeaderElectionContext e
     
     int leaderVoteWait = cc.getZkController().getLeaderVoteWait();
     if (!weAreReplacement) {
-      waitForReplicasToComeUp(weAreReplacement, leaderVoteWait);
+      waitForReplicasToComeUp(leaderVoteWait);
     }
 
     try (SolrCore core = cc.getCore(coreName)) {
@@ -226,7 +226,7 @@ final class ShardLeaderElectionContext e
       
       // should I be leader?
       if (weAreReplacement && !shouldIBeLeader(leaderProps, core, weAreReplacement)) {
-        rejoinLeaderElection(leaderSeqPath, core);
+        rejoinLeaderElection(core);
         return;
       }
       
@@ -297,7 +297,7 @@ final class ShardLeaderElectionContext e
         }
       }
       if (!success) {
-        rejoinLeaderElection(leaderSeqPath, core);
+        rejoinLeaderElection(core);
         return;
       }
 
@@ -323,7 +323,7 @@ final class ShardLeaderElectionContext e
         core.getCoreDescriptor().getCloudDescriptor().setLeader(false);
         
         // we could not publish ourselves as leader - try and rejoin election
-        rejoinLeaderElection(leaderSeqPath, core);
+        rejoinLeaderElection(core);
       }
     }
 
@@ -401,7 +401,7 @@ final class ShardLeaderElectionContext e
     } // core gets closed automagically    
   }
 
-  private void waitForReplicasToComeUp(boolean weAreReplacement, int timeoutms) throws InterruptedException {
+  private void waitForReplicasToComeUp(int timeoutms) throws InterruptedException {
     long timeoutAt = System.nanoTime() + TimeUnit.NANOSECONDS.convert(timeoutms, TimeUnit.MILLISECONDS);
     final String shardsElectZkPath = electionPath + LeaderElector.ELECTION_NODE;
     
@@ -448,11 +448,11 @@ final class ShardLeaderElectionContext e
     }
   }
 
-  private void rejoinLeaderElection(String leaderSeqPath, SolrCore core)
+  private void rejoinLeaderElection(SolrCore core)
       throws InterruptedException, KeeperException, IOException {
     // remove our ephemeral and re join the election
     if (cc.isShutDown()) {
-      log.info("Not rejoining election because CoreContainer is close");
+      log.info("Not rejoining election because CoreContainer is closed");
       return;
     }
     

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/LeaderElector.java Wed Oct 22 05:44:17 2014
@@ -106,7 +106,6 @@ public  class LeaderElector {
         return;
       }
       // first we delete the node advertising the old leader in case the ephem is still there
-      // first we delete the node advertising the old leader in case the ephem is still there
       try {
         zkClient.delete(context.leaderPath, -1, true);
       }catch (KeeperException.NoNodeException nne){
@@ -244,7 +243,7 @@ public  class LeaderElector {
       try {
         if(joinAtHead){
           log.info("node {} Trying to join election at the head ", id);
-          List<String> nodes = OverseerCollectionProcessor.getSortedElectionNodes(zkClient);
+          List<String> nodes = OverseerCollectionProcessor.getSortedElectionNodes(zkClient, shardsElectZkPath);
           if(nodes.size() <2){
             leaderSeqPath = zkClient.create(shardsElectZkPath + "/" + id + "-n_", null,
                 CreateMode.EPHEMERAL_SEQUENTIAL, false);

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/Overseer.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/Overseer.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/Overseer.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/Overseer.java Wed Oct 22 05:44:17 2014
@@ -130,7 +130,9 @@ public class Overseer implements Closeab
 
   static enum LeaderStatus {DONT_KNOW, NO, YES}
 
-  public static final Set<String> sliceUniqueBooleanProperties = ImmutableSet.of("property.preferredleader");
+  public static final String preferredLeaderProp = COLL_PROP_PREFIX + "preferredleader";
+
+  public static final Set<String> sliceUniqueBooleanProperties = ImmutableSet.of(preferredLeaderProp);
 
   private long lastUpdatedTime = 0;
 
@@ -1169,7 +1171,7 @@ public class Overseer implements Closeab
         return null;
       }
 
-    ClusterState updateSlice(ClusterState state, String collectionName, Slice slice) {
+    private ClusterState updateSlice(ClusterState state, String collectionName, Slice slice) {
         // System.out.println("###!!!### OLD CLUSTERSTATE: " + JSONUtil.toJSON(state.getCollectionStates()));
         // System.out.println("Updating slice:" + slice);
         DocCollection newCollection = null;
@@ -1396,7 +1398,6 @@ public class Overseer implements Closeab
       }
 
   }
-
   // Class to encapsulate processing replica properties that have at most one replica hosting a property per slice.
   private class ExclusiveSliceProperty {
     private ClusterStateUpdater updater;
@@ -1698,6 +1699,7 @@ public class Overseer implements Closeab
       this.replica = replica;
     }
   }
+
   static void getShardNames(Integer numShards, List<String> shardNames) {
     if(numShards == null)
       throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "numShards" + " is a required param");

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/OverseerCollectionProcessor.java Wed Oct 22 05:44:17 2014
@@ -18,7 +18,9 @@ package org.apache.solr.cloud;
  */
 
 import static org.apache.solr.cloud.Assign.getNodesForNewShard;
+import static org.apache.solr.common.cloud.ZkStateReader.BASE_URL_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP;
@@ -31,9 +33,9 @@ import static org.apache.solr.common.par
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATE;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATESHARD;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETE;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICAPROP;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESHARD;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.REMOVEROLE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETEREPLICAPROP;
 
 import java.io.Closeable;
 import java.io.IOException;
@@ -441,7 +443,7 @@ public class OverseerCollectionProcessor
     String ldr = getLeaderNode(zk);
     if(overseerDesignates.contains(ldr)) return;
     log.info("prioritizing overseer nodes at {} overseer designates are {}", myId, overseerDesignates);
-    List<String> electionNodes = getSortedElectionNodes(zk);
+    List<String> electionNodes = getSortedElectionNodes(zk, OverseerElectionContext.PATH + LeaderElector.ELECTION_NODE);
     if(electionNodes.size()<2) return;
     log.info("sorted nodes {}", electionNodes);
 
@@ -484,10 +486,10 @@ public class OverseerCollectionProcessor
     return nodeNames;
   }
 
-  public static List<String> getSortedElectionNodes(SolrZkClient zk) throws KeeperException, InterruptedException {
+  public static List<String> getSortedElectionNodes(SolrZkClient zk, String path) throws KeeperException, InterruptedException {
     List<String> children = null;
     try {
-      children = zk.getChildren(OverseerElectionContext.PATH + LeaderElector.ELECTION_NODE, null, true);
+      children = zk.getChildren(path, null, true);
       LeaderElector.sortSeqs(children);
       return children;
     } catch (Exception e) {
@@ -651,6 +653,9 @@ public class OverseerCollectionProcessor
           case BALANCESLICEUNIQUE:
             balanceProperty(message);
             break;
+          case REBALANCELEADERS:
+            processAssignLeaders(message);
+            break;
           default:
             throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown operation:"
                 + operation);
@@ -677,6 +682,32 @@ public class OverseerCollectionProcessor
   }
 
   @SuppressWarnings("unchecked")
+  // re-purpose BALANCELEADERS to reassign a single leader over here
+  private void processAssignLeaders(ZkNodeProps message) throws KeeperException, InterruptedException {
+    String collectionName = message.getStr(COLLECTION_PROP);
+    String shardId = message.getStr(SHARD_ID_PROP);
+    String baseURL = message.getStr(BASE_URL_PROP);
+    String coreName = message.getStr(CORE_NAME_PROP);
+
+    if (StringUtils.isBlank(collectionName) || StringUtils.isBlank(shardId) || StringUtils.isBlank(baseURL) ||
+        StringUtils.isBlank(coreName)) {
+      throw new SolrException(ErrorCode.BAD_REQUEST,
+          String.format(Locale.ROOT, "The '%s', '%s', '%s' and '%s' parameters are required when assigning a leader",
+              COLLECTION_PROP, SHARD_ID_PROP, BASE_URL_PROP, CORE_NAME_PROP));
+    }
+    SolrZkClient zkClient = zkStateReader.getZkClient();
+    DistributedQueue inQueue = Overseer.getInQueue(zkClient);
+    Map<String, Object> propMap = new HashMap<>();
+    propMap.put(Overseer.QUEUE_OPERATION, Overseer.OverseerAction.LEADER.toLower());
+    propMap.put(COLLECTION_PROP, collectionName);
+    propMap.put(SHARD_ID_PROP, shardId);
+    propMap.put(BASE_URL_PROP, baseURL);
+    propMap.put(CORE_NAME_PROP, coreName);
+    inQueue.offer(zkStateReader.toJSON(propMap));
+  }
+
+
+  @SuppressWarnings("unchecked")
   private void processReplicaAddPropertyCommand(ZkNodeProps message) throws KeeperException, InterruptedException {
     if (StringUtils.isBlank(message.getStr(COLLECTION_PROP)) ||
         StringUtils.isBlank(message.getStr(SHARD_ID_PROP)) ||
@@ -684,7 +715,7 @@ public class OverseerCollectionProcessor
         StringUtils.isBlank(message.getStr(PROPERTY_PROP)) ||
         StringUtils.isBlank(message.getStr(PROPERTY_VALUE_PROP))) {
       throw new SolrException(ErrorCode.BAD_REQUEST,
-          String.format(Locale.ROOT, "The '%s', '%s', '%s', '%s', and '%s' parameters are required for all replica properties add/delete' operations",
+          String.format(Locale.ROOT, "The '%s', '%s', '%s', '%s', and '%s' parameters are required for all replica properties add/delete operations",
               COLLECTION_PROP, SHARD_ID_PROP, REPLICA_PROP, PROPERTY_PROP, PROPERTY_VALUE_PROP));
     }
     SolrZkClient zkClient = zkStateReader.getZkClient();
@@ -702,7 +733,7 @@ public class OverseerCollectionProcessor
         StringUtils.isBlank(message.getStr(REPLICA_PROP)) ||
         StringUtils.isBlank(message.getStr(PROPERTY_PROP))) {
       throw new SolrException(ErrorCode.BAD_REQUEST,
-          String.format(Locale.ROOT, "The '%s', '%s', '%s', and '%s' parameters are required for all replica properties add/delete' operations",
+          String.format(Locale.ROOT, "The '%s', '%s', '%s', and '%s' parameters are required for all replica properties add/delete operations",
               COLLECTION_PROP, SHARD_ID_PROP, REPLICA_PROP, PROPERTY_PROP));
     }
     SolrZkClient zkClient = zkStateReader.getZkClient();

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/ZkController.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/ZkController.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/ZkController.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/cloud/ZkController.java Wed Oct 22 05:44:17 2014
@@ -834,7 +834,13 @@ public final class ZkController {
     ZkNodeProps leaderProps = new ZkNodeProps(props);
     
     try {
-      joinElection(desc, afterExpiration);
+      // If we're a preferred leader, insert ourselves at the head of the queue
+      boolean joinAtHead = false;
+      Replica replica = zkStateReader.getClusterState().getReplica(desc.getCloudDescriptor().getCollectionName(), coreZkNodeName);
+      if (replica != null) {
+        joinAtHead = replica.getBool(Overseer.preferredLeaderProp, false);
+      }
+      joinElection(desc, afterExpiration, joinAtHead);
     } catch (InterruptedException e) {
       // Restore the interrupted status
       Thread.currentThread().interrupt();
@@ -988,7 +994,8 @@ public final class ZkController {
   }
 
 
-  private void joinElection(CoreDescriptor cd, boolean afterExpiration) throws InterruptedException, KeeperException, IOException {
+  private void joinElection(CoreDescriptor cd, boolean afterExpiration, boolean joinAtHead)
+      throws InterruptedException, KeeperException, IOException {
     // look for old context - if we find it, cancel it
     String collection = cd.getCloudDescriptor().getCollectionName();
     final String coreNodeName = cd.getCloudDescriptor().getCoreNodeName();
@@ -1018,7 +1025,7 @@ public final class ZkController {
 
     leaderElector.setup(context);
     electionContexts.put(contextKey, context);
-    leaderElector.joinElection(context, false);
+    leaderElector.joinElection(context, false, joinAtHead);
   }
 
 

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/admin/CollectionsHandler.java Wed Oct 22 05:44:17 2014
@@ -30,16 +30,23 @@ import static org.apache.solr.cloud.Over
 import static org.apache.solr.cloud.OverseerCollectionProcessor.ROUTER;
 import static org.apache.solr.cloud.OverseerCollectionProcessor.SHARDS_PROP;
 import static org.apache.solr.common.cloud.ZkNodeProps.makeMap;
+import static org.apache.solr.common.cloud.ZkStateReader.ACTIVE;
+import static org.apache.solr.common.cloud.ZkStateReader.BASE_URL_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.COLLECTION_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.CORE_NAME_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.LEADER_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.SHARD_ID_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.PROPERTY_VALUE_PROP;
 import static org.apache.solr.common.cloud.ZkStateReader.MAX_SHARDS_PER_NODE;
 import static org.apache.solr.common.cloud.ZkStateReader.AUTO_ADD_REPLICAS;
 import static org.apache.solr.common.cloud.ZkStateReader.REPLICA_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.MAX_AT_ONCE_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.MAX_WAIT_SECONDS_PROP;
+import static org.apache.solr.common.cloud.ZkStateReader.STATE_PROP;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDROLE;
-import static org.apache.solr.common.params.CollectionParams.CollectionAction.BALANCESLICEUNIQUE;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.ADDREPLICAPROP;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.BALANCESLICEUNIQUE;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.CLUSTERPROP;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATE;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.CREATEALIAS;
@@ -51,6 +58,7 @@ import static org.apache.solr.common.par
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.DELETESHARD;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.MIGRATE;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.OVERSEERSTATUS;
+import static org.apache.solr.common.params.CollectionParams.CollectionAction.REBALANCELEADERS;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.RELOAD;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.REMOVEROLE;
 import static org.apache.solr.common.params.CollectionParams.CollectionAction.SPLITSHARD;
@@ -80,6 +88,8 @@ import org.apache.solr.common.SolrExcept
 import org.apache.solr.common.cloud.ClusterState;
 import org.apache.solr.common.cloud.DocCollection;
 import org.apache.solr.common.cloud.ImplicitDocRouter;
+import org.apache.solr.common.cloud.Replica;
+import org.apache.solr.common.cloud.Slice;
 import org.apache.solr.common.cloud.ZkCoreNodeProps;
 import org.apache.solr.common.cloud.ZkNodeProps;
 import org.apache.solr.common.cloud.ZkStateReader;
@@ -252,6 +262,10 @@ public class CollectionsHandler extends 
         this.handleBalanceSliceUnique(req, rsp);
         break;
       }
+      case REBALANCELEADERS: {
+        this.handleBalanceLeaders(req, rsp);
+        break;
+      }
       default: {
           throw new RuntimeException("Unknown action: " + action);
       }
@@ -260,6 +274,156 @@ public class CollectionsHandler extends 
     rsp.setHttpCaching(false);
   }
 
+
+  private void handleBalanceLeaders(SolrQueryRequest req, SolrQueryResponse rsp) throws KeeperException, InterruptedException {
+    req.getParams().required().check(COLLECTION_PROP);
+
+    String collectionName = req.getParams().get(COLLECTION_PROP);
+    if (StringUtils.isBlank(collectionName)) {
+      throw new SolrException(ErrorCode.BAD_REQUEST,
+          String.format(Locale.ROOT, "The " + COLLECTION_PROP + " is required for the REASSIGNLEADERS command."));
+    }
+    coreContainer.getZkController().getZkStateReader().updateClusterState(true);
+    ClusterState clusterState = coreContainer.getZkController().getClusterState();
+    DocCollection dc = clusterState.getCollection(collectionName);
+    if (dc == null) {
+      throw new SolrException(ErrorCode.BAD_REQUEST, "Collection '" + collectionName + "' does not exist, no action taken.");
+    }
+    Map<String, String> current = new HashMap<>();
+    int max = req.getParams().getInt(MAX_AT_ONCE_PROP, Integer.MAX_VALUE);
+    if (max <= 0) max = Integer.MAX_VALUE;
+    int maxWaitSecs = req.getParams().getInt(MAX_WAIT_SECONDS_PROP, 60);
+    NamedList<Object> results = new NamedList<>();
+    SolrQueryResponse rspIgnore = new SolrQueryResponse();
+    final String inactivePreferreds = "inactivePreferreds";
+    final String alreadyLeaders = "alreadyLeaders";
+    boolean keepGoing = true;
+    for (Slice slice : dc.getSlices()) {
+      for (Replica replica : slice.getReplicas()) {
+        // Tell the replica to become the leader if we're the preferred leader AND active AND not the leader already
+        if (replica.getBool(Overseer.preferredLeaderProp, false) == false) {
+          continue;
+        }
+        if (StringUtils.equalsIgnoreCase(replica.getStr(STATE_PROP), ACTIVE) == false) {
+          NamedList<Object> inactives = (NamedList<Object>) results.get(inactivePreferreds);
+          if (inactives == null) {
+            inactives = new NamedList<>();
+            results.add(inactivePreferreds, inactives);
+          }
+          NamedList<Object> res = new NamedList<>();
+          res.add("status", "skipped");
+          res.add("msg", "Node is a referredLeader, but it's inactive. Skipping");
+          res.add("nodeName", replica.getNodeName());
+          inactives.add(replica.getName(), res);
+          break; // Don't try to assign if we're not active!
+        }        // OK, we're the one, get in the queue to become the leader.
+        if (replica.getBool(LEADER_PROP, false)) {
+          NamedList<Object> noops = (NamedList<Object>) results.get(alreadyLeaders);
+          if (noops == null) {
+            noops = new NamedList<>();
+            results.add(alreadyLeaders, noops);
+          }
+          NamedList<Object> res = new NamedList<>();
+          res.add("status", "success");
+          res.add("msg", "Already leader");
+          res.add("nodeName", replica.getNodeName());
+          noops.add(replica.getName(), res);
+          break; // already the leader, do nothing.
+        }
+        Map<String, Object> propMap = new HashMap<>();
+        propMap.put(Overseer.QUEUE_OPERATION, REBALANCELEADERS.toLower());
+        propMap.put(COLLECTION_PROP, collectionName);
+        propMap.put(SHARD_ID_PROP, slice.getName());
+        propMap.put(BASE_URL_PROP, replica.get(BASE_URL_PROP));
+
+        String coreName = (String) replica.get(CORE_NAME_PROP);
+        // Put it in the waiting list.
+        String asyncId = REBALANCELEADERS.toLower() + "_" + coreName;
+        current.put(asyncId, String.format(Locale.ROOT, "Collection: '%s', Shard: '%s', Core: '%s', BaseUrl: '%s'",
+            collectionName, slice.getName(), coreName, replica.get(BASE_URL_PROP)));
+
+        propMap.put(CORE_NAME_PROP, coreName);
+        propMap.put(ASYNC, asyncId);
+
+        ZkNodeProps m = new ZkNodeProps(propMap);
+        log.info("Queueing collection '" + collectionName + "' slice '" + slice.getName() + "' replica '" +
+                coreName + "' to become leader.");
+        handleResponse(REBALANCELEADERS.toLower(), m, rspIgnore); // Want to construct my own response here.
+        break; // Done with this slice, skip the rest of the replicas.
+      }
+      if (current.size() == max) {
+        log.info("Queued " + max + " leader reassgnments, waiting for some to complete.");
+        keepGoing = waitForLeaderChange(current, maxWaitSecs, false, results);
+        if (keepGoing == false) {
+          break; // If we've waited longer than specified, don't continue to wait!
+        }
+      }
+    }
+    if (keepGoing == true) {
+      keepGoing = waitForLeaderChange(current, maxWaitSecs, true, results);
+    }
+    if (keepGoing == true) {
+      log.info("All leader reassignments completed.");
+    } else {
+      log.warn("Exceeded specified timeout of ." + maxWaitSecs + "' all leaders may not have been reassigned");
+    }
+
+    rsp.getValues().addAll(results);
+  }
+
+  // currentAsyncIds - map of request IDs and reporting data (value)
+  // maxWaitSecs - How long are we going to wait? Defaults to 30 seconds.
+  // waitForAll - if true, do not return until all assignments have been made.
+  // results - a place to stash results for reporting back to the user.
+  //
+  private boolean waitForLeaderChange(Map<String, String> currentAsyncIds, final int maxWaitSecs,
+                                      Boolean waitForAll, NamedList<Object> results)
+      throws KeeperException, InterruptedException {
+
+    if (currentAsyncIds.size() == 0) return true;
+
+    for (int idx = 0; idx < maxWaitSecs * 10; ++idx) {
+      Iterator<Map.Entry<String, String>> iter = currentAsyncIds.entrySet().iterator();
+      boolean foundChange = false;
+      while (iter.hasNext()) {
+        Map.Entry<String, String> pair = iter.next();
+        String asyncId = pair.getKey();
+        if (coreContainer.getZkController().getOverseerFailureMap().contains(asyncId)) {
+          coreContainer.getZkController().getOverseerFailureMap().remove(asyncId);
+          NamedList<Object> fails = (NamedList<Object>) results.get("failures");
+          if (fails == null) {
+            fails = new NamedList<>();
+            results.add("failures", fails);
+          }
+          NamedList<Object> res = new NamedList<>();
+          res.add("status", "failed");
+          res.add("msg", "Failed to assign '" + pair.getValue() + "' to be leader");
+          fails.add(asyncId.substring(REBALANCELEADERS.toLower().length()), res);
+          iter.remove();
+          foundChange = true;
+        } else if (coreContainer.getZkController().getOverseerCompletedMap().contains(asyncId)) {
+          coreContainer.getZkController().getOverseerCompletedMap().remove(asyncId);
+          NamedList<Object> successes = (NamedList<Object>) results.get("successes");
+          if (successes == null) {
+            successes = new NamedList<>();
+            results.add("successes", successes);
+          }
+          NamedList<Object> res = new NamedList<>();
+          res.add("status", "success");
+          res.add("msg", "Assigned '" + pair.getValue() + "' to be leader");
+          successes.add(asyncId.substring(REBALANCELEADERS.toLower().length()), res);
+          iter.remove();
+          foundChange = true;
+        }
+      }
+      // We're done if we're processing a few at a time or all requests are processed.
+      if ((foundChange && waitForAll == false) || currentAsyncIds.size() == 0) {
+        return true;
+      }
+      Thread.sleep(100); //TODO: Is there a better thing to do than sleep here?
+    }
+    return false;
+  }
   private void handleAddReplicaProp(SolrQueryRequest req, SolrQueryResponse rsp) throws KeeperException, InterruptedException {
     req.getParams().required().check(COLLECTION_PROP, PROPERTY_PROP, SHARD_ID_PROP, REPLICA_PROP, PROPERTY_VALUE_PROP);
 
@@ -425,7 +589,7 @@ public class CollectionsHandler extends 
        }
  
        NamedList<String> r = new NamedList<>();
- 
+
        if (coreContainer.getZkController().getOverseerCompletedMap().contains(asyncId) ||
            coreContainer.getZkController().getOverseerFailureMap().contains(asyncId) ||
            coreContainer.getZkController().getOverseerRunningMap().contains(asyncId) ||

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/admin/LukeRequestHandler.java Wed Oct 22 05:44:17 2014
@@ -176,7 +176,7 @@ public class LukeRequestHandler extends 
 
     StringBuilder flags = new StringBuilder();
 
-    flags.append( (f != null && f.fieldType().indexed())                     ? FieldFlag.INDEXED.getAbbreviation() : '-' );
+    flags.append( (f != null && f.fieldType().indexOptions() != null)                     ? FieldFlag.INDEXED.getAbbreviation() : '-' );
     flags.append( (f != null && f.fieldType().tokenized())                   ? FieldFlag.TOKENIZED.getAbbreviation() : '-' );
     flags.append( (f != null && f.fieldType().stored())                      ? FieldFlag.STORED.getAbbreviation() : '-' );
     flags.append( (f != null && f.fieldType().docValueType() != null)        ? FieldFlag.DOC_VALUES.getAbbreviation() : "-" );

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/component/QueryElevationComponent.java Wed Oct 22 05:44:17 2014
@@ -49,6 +49,7 @@ import org.apache.solr.common.params.Que
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.util.StrUtils;
 import org.apache.solr.schema.IndexSchema;
+import org.apache.solr.search.QueryParsing;
 import org.apache.solr.search.grouping.GroupingSpecification;
 import org.apache.solr.util.DOMUtil;
 import org.apache.solr.common.util.NamedList;
@@ -388,7 +389,8 @@ public class QueryElevationComponent ext
     String exStr = params.get(QueryElevationParams.EXCLUDE);
 
     Query query = rb.getQuery();
-    String qstr = rb.getQueryString();
+    SolrParams localParams = rb.getQparser().getLocalParams();
+    String qstr = localParams == null ? rb.getQueryString() : localParams.get(QueryParsing.V);
     if (query == null || qstr == null) {
       return;
     }

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/component/SearchHandler.java Wed Oct 22 05:44:17 2014
@@ -19,6 +19,7 @@ package org.apache.solr.handler.componen
 
 import org.apache.lucene.index.ExitableDirectoryReader;
 import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.common.SolrDocumentList;
 import org.apache.solr.common.SolrException;
 import org.apache.solr.common.SolrException.ErrorCode;
 import org.apache.solr.common.params.CommonParams;
@@ -244,6 +245,17 @@ public class SearchHandler extends Reque
         }
       } catch (ExitableDirectoryReader.ExitingReaderException ex) {
         log.warn( "Query: " + req.getParamString() + "; " + ex.getMessage());
+        SolrDocumentList r = (SolrDocumentList) rb.rsp.getValues().get("response");
+        if(r == null)
+          r = new SolrDocumentList();
+        r.setNumFound(0);
+        rb.rsp.add("response", r);
+        if(rb.isDebug()) {
+          NamedList debug = new NamedList();
+          debug.add("explain", new NamedList());
+          rb.rsp.add("debug", debug);
+        }
+        rb.rsp.getResponseHeader().add("partialResults", Boolean.TRUE);
       } finally {
         SolrQueryTimeoutImpl.reset();
       }

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/handler/loader/JsonLoader.java Wed Oct 22 05:44:17 2014
@@ -23,14 +23,19 @@ import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.HashMap;
 import java.util.Iterator;
+import java.util.LinkedHashMap;
 import java.util.List;
+import java.util.Locale;
 import java.util.Map;
-import java.util.concurrent.atomic.AtomicBoolean;
+import java.util.UUID;
 
 import org.apache.commons.io.IOUtils;
+import org.apache.solr.common.params.CommonParams;
 import org.apache.solr.common.params.SolrParams;
 import org.apache.solr.common.params.UpdateParams;
 import org.apache.solr.common.util.JsonRecordReader;
+import org.apache.solr.schema.SchemaField;
+import org.apache.solr.util.RecordingJSONParser;
 import org.noggit.JSONParser;
 import org.noggit.ObjectBuilder;
 import org.apache.solr.common.SolrException;
@@ -50,50 +55,49 @@ import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
 
-
 /**
  * @since solr 4.0
  */
 public class JsonLoader extends ContentStreamLoader {
-  final static Logger log = LoggerFactory.getLogger( JsonLoader.class );
+  final static Logger log = LoggerFactory.getLogger(JsonLoader.class);
   private static final String CHILD_DOC_KEY = "_childDocuments_";
 
   @Override
   public String getDefaultWT() {
     return "json";
   }
-  
+
   @Override
   public void load(SolrQueryRequest req, SolrQueryResponse rsp,
-      ContentStream stream, UpdateRequestProcessor processor) throws Exception {
-    new SingleThreadedJsonLoader(req,rsp,processor).load(req, rsp, stream, processor);
+                   ContentStream stream, UpdateRequestProcessor processor) throws Exception {
+    new SingleThreadedJsonLoader(req, rsp, processor).load(req, rsp, stream, processor);
   }
 
-  
+
   static class SingleThreadedJsonLoader extends ContentStreamLoader {
-    
+
     protected final UpdateRequestProcessor processor;
     protected final SolrQueryRequest req;
     protected SolrQueryResponse rsp;
     protected JSONParser parser;
     protected final int commitWithin;
     protected final boolean overwrite;
-  
+
     public SingleThreadedJsonLoader(SolrQueryRequest req, SolrQueryResponse rsp, UpdateRequestProcessor processor) {
       this.processor = processor;
       this.req = req;
       this.rsp = rsp;
 
       commitWithin = req.getParams().getInt(UpdateParams.COMMIT_WITHIN, -1);
-      overwrite = req.getParams().getBool(UpdateParams.OVERWRITE, true);  
+      overwrite = req.getParams().getBool(UpdateParams.OVERWRITE, true);
     }
-  
+
     @Override
-    public void load(SolrQueryRequest req, 
-        SolrQueryResponse rsp, 
-        ContentStream stream, 
+    public void load(SolrQueryRequest req,
+        SolrQueryResponse rsp,
+        ContentStream stream,
         UpdateRequestProcessor processor) throws Exception {
-      
+
       Reader reader = null;
       try {
         reader = stream.getReader();
@@ -102,34 +106,32 @@ public class JsonLoader extends ContentS
           log.trace("body", body);
           reader = new StringReader(body);
         }
-  
-        parser = new JSONParser(reader);
-        this.processUpdate();
-      }
-      finally {
+
+        this.processUpdate(reader);
+      } finally {
         IOUtils.closeQuietly(reader);
       }
     }
-  
+
     @SuppressWarnings("fallthrough")
-    void processUpdate() throws IOException
-    {
+    void processUpdate(Reader reader) throws IOException {
       String path = (String) req.getContext().get("path");
       if(UpdateRequestHandler.DOC_PATH.equals(path) ||   "false".equals( req.getParams().get("json.command"))){
         String split = req.getParams().get("split");
         String[] f = req.getParams().getParams("f");
-        handleSplitMode(split,f);
+        handleSplitMode(split, f, reader);
         return;
       }
+      parser = new JSONParser(reader);
       int ev = parser.nextEvent();
       while( ev != JSONParser.EOF ) {
-        
+
         switch( ev )
         {
           case JSONParser.ARRAY_START:
             handleAdds();
             break;
-  
+
         case JSONParser.STRING:
           if( parser.wasKey() ) {
             String v = parser.getString();
@@ -167,7 +169,7 @@ public class JsonLoader extends ContentS
             break;
           }
           // fall through
-  
+
         case JSONParser.LONG:
         case JSONParser.NUMBER:
         case JSONParser.BIGNUMBER:
@@ -175,12 +177,12 @@ public class JsonLoader extends ContentS
         case JSONParser.NULL:
           log.info( "can't have a value here! "
               +JSONParser.getEventString(ev)+" "+parser.getPosition() );
-          
+
         case JSONParser.OBJECT_START:
         case JSONParser.OBJECT_END:
         case JSONParser.ARRAY_END:
           break;
-          
+
         default:
           log.info("Noggit UNKNOWN_EVENT_ID:"+ev);
           break;
@@ -190,27 +192,41 @@ public class JsonLoader extends ContentS
       }
     }
 
-    private void handleSplitMode(String split, String[] fields) throws IOException {
-      if(split == null) split = "/";
-      if(fields == null || fields.length ==0) fields = new String[]{"$FQN:/**"};
-      final boolean echo = "true".equals( req.getParams().get("echo"));
+    private void handleSplitMode(String split, String[] fields, final Reader reader) throws IOException {
+      if (split == null) split = "/";
+      if (fields == null || fields.length == 0) fields = new String[]{"$FQN:/**"};
+      final boolean echo = "true".equals(req.getParams().get("echo"));
+      final String srcField = req.getParams().get("srcField");
+      final boolean mapUniqueKeyOnly = req.getParams().getBool("mapUniqueKeyOnly",false);
+      if (srcField != null) {
+        if (!"/".equals(split))
+          throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Raw data can be stored only if split=/");
+        parser = new RecordingJSONParser(reader);
+      } else {
+        parser = new JSONParser(reader);
+
+      }
+
       JsonRecordReader jsonRecordReader = JsonRecordReader.getInst(split, Arrays.asList(fields));
-      jsonRecordReader.streamRecords(parser,new JsonRecordReader.Handler() {
-        ArrayList docs =null;
+      jsonRecordReader.streamRecords(parser, new JsonRecordReader.Handler() {
+        ArrayList docs = null;
+
         @Override
         public void handle(Map<String, Object> record, String path) {
-          if(echo){
-            if(docs ==null) {
+          Map<String, Object> copy = getDocMap(record, parser, srcField, mapUniqueKeyOnly);
+
+          if (echo) {
+            if (docs == null) {
               docs = new ArrayList();
-              rsp.add("docs",docs);
+              rsp.add("docs", docs);
             }
-            docs.add(record);
+            docs.add(copy);
           } else {
             AddUpdateCommand cmd = new AddUpdateCommand(req);
             cmd.commitWithin = commitWithin;
             cmd.overwrite = overwrite;
-            cmd.solrDoc =  new SolrInputDocument();
-            for (Map.Entry<String, Object> entry : record.entrySet()) {
+            cmd.solrDoc = new SolrInputDocument();
+            for (Map.Entry<String, Object> entry : copy.entrySet()) {
               cmd.solrDoc.setField(entry.getKey(),entry.getValue());
             }
             try {
@@ -223,6 +239,37 @@ public class JsonLoader extends ContentS
       });
     }
 
+    private Map<String, Object> getDocMap(Map<String, Object> record, JSONParser parser, String srcField, boolean mapUniqueKeyOnly) {
+      Map result = record;
+      if(srcField != null && parser instanceof RecordingJSONParser){
+        //if srcFIeld specified extract it out first
+        result = new LinkedHashMap(record);
+        RecordingJSONParser rjp = (RecordingJSONParser) parser;
+        result.put(srcField, rjp.getBuf());
+        rjp.resetBuf();
+      }
+      if(mapUniqueKeyOnly){
+        SchemaField sf = req.getSchema().getUniqueKeyField();
+        if(sf == null) throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No uniqueKey specified in schema");
+        String df = req.getParams().get(CommonParams.DF);
+        if(df == null)throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "No 'df' specified in request");
+        Map copy = new LinkedHashMap();
+        String uniqueField = (String) record.get(sf.getName());
+        if(uniqueField == null) uniqueField = UUID.randomUUID().toString().toLowerCase(Locale.ROOT);
+        copy.put(sf.getName(),uniqueField);
+        if(srcField != null && result.containsKey(srcField)){
+          copy.put(srcField, result.remove(srcField));
+        }
+        copy.put(df, result.values());
+        result = copy;
+      }
+
+
+      return result;
+    }
+
+
+
     /*private void handleStreamingSingleDocs() throws IOException
     {
       while( true ) {
@@ -352,18 +399,18 @@ public class JsonLoader extends ContentS
 
 
 
-    
+
     RollbackUpdateCommand parseRollback() throws IOException {
       assertNextEvent( JSONParser.OBJECT_START );
       assertNextEvent( JSONParser.OBJECT_END );
       return new RollbackUpdateCommand(req);
     }
-  
+
     void parseCommitOptions(CommitUpdateCommand cmd ) throws IOException
     {
       assertNextEvent( JSONParser.OBJECT_START );
       final Map<String,Object> map = (Map)ObjectBuilder.getVal(parser);
-  
+
       // SolrParams currently expects string values...
       SolrParams p = new SolrParams() {
         @Override
@@ -371,31 +418,31 @@ public class JsonLoader extends ContentS
           Object o = map.get(param);
           return o == null ? null : o.toString();
         }
-  
+
         @Override
         public String[] getParams(String param) {
           return new String[]{get(param)};
         }
-  
+
         @Override
         public Iterator<String> getParameterNamesIterator() {
           return map.keySet().iterator();
         }
       };
-  
+
       RequestHandlerUtils.validateCommitParams(p);
       p = SolrParams.wrapDefaults(p, req.getParams());   // default to the normal request params for commit options
       RequestHandlerUtils.updateCommit(cmd, p);
     }
-    
+
     AddUpdateCommand parseAdd() throws IOException
     {
       AddUpdateCommand cmd = new AddUpdateCommand(req);
       cmd.commitWithin = commitWithin;
       cmd.overwrite = overwrite;
-  
+
       float boost = 1.0f;
-      
+
       while( true ) {
         int ev = parser.nextEvent();
         if( ev == JSONParser.STRING ) {
@@ -431,7 +478,7 @@ public class JsonLoader extends ContentS
           if( cmd.solrDoc == null ) {
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,"missing solr document. "+parser.getPosition() );
           }
-          cmd.solrDoc.setDocumentBoost( boost ); 
+          cmd.solrDoc.setDocumentBoost( boost );
           return cmd;
         }
         else {
@@ -441,8 +488,8 @@ public class JsonLoader extends ContentS
         }
       }
     }
-  
-  
+
+
     void handleAdds() throws IOException
     {
       while( true ) {
@@ -458,15 +505,15 @@ public class JsonLoader extends ContentS
         processor.processAdd(cmd);
       }
     }
-  
-  
+
+
     int assertNextEvent(int expected ) throws IOException
     {
       int got = parser.nextEvent();
       assertEvent(got, expected);
       return got;
     }
-  
+
     void assertEvent(int ev, int expected) {
       if( ev != expected ) {
         throw new SolrException(SolrException.ErrorCode.BAD_REQUEST,
@@ -518,14 +565,14 @@ public class JsonLoader extends ContentS
         sif.setValue(val, 1.0f);
       }
     }
-  
+
     private void parseExtendedFieldValue(SolrInputField sif, int ev)  throws IOException {
       assert ev == JSONParser.OBJECT_START;
-  
+
       float boost = 1.0f;
       Object normalFieldValue = null;
       Map<String, Object> extendedInfo = null;
-  
+
       for (;;) {
         ev = parser.nextEvent();
         switch (ev) {
@@ -538,7 +585,7 @@ public class JsonLoader extends ContentS
                   ev != JSONParser.BIGNUMBER ) {
                 throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "boost should have number! "+JSONParser.getEventString(ev) );
               }
-  
+
               boost = (float)parser.getDouble();
             } else if ("value".equals(label)) {
               normalFieldValue = parseNormalFieldValue(parser.nextEvent());
@@ -553,7 +600,7 @@ public class JsonLoader extends ContentS
               extendedInfo.put(label, val);
             }
             break;
-  
+
           case JSONParser.OBJECT_END:
             if (extendedInfo != null) {
               if (normalFieldValue != null) {
@@ -564,14 +611,14 @@ public class JsonLoader extends ContentS
               sif.setValue(normalFieldValue, boost);
             }
             return;
-  
+
           default:
             throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing JSON extended field value. Unexpected "+JSONParser.getEventString(ev) );
         }
       }
     }
-  
-  
+
+
     private Object parseNormalFieldValue(int ev) throws IOException {
       if (ev == JSONParser.ARRAY_START) {
         List<Object> val = parseArrayFieldValue(ev);
@@ -581,8 +628,8 @@ public class JsonLoader extends ContentS
         return val;
       }
     }
-  
-  
+
+
     private Object parseSingleFieldValue(int ev) throws IOException {
       switch (ev) {
         case JSONParser.STRING:
@@ -604,11 +651,11 @@ public class JsonLoader extends ContentS
           throw new SolrException(SolrException.ErrorCode.BAD_REQUEST, "Error parsing JSON field value. Unexpected "+JSONParser.getEventString(ev) );
       }
     }
-  
-  
+
+
     private List<Object> parseArrayFieldValue(int ev) throws IOException {
       assert ev == JSONParser.ARRAY_START;
-  
+
       ArrayList lst = new ArrayList(2);
       for (;;) {
         ev = parser.nextEvent();

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/BoolField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/BoolField.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/BoolField.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/BoolField.java Wed Oct 22 05:44:17 2014
@@ -172,7 +172,7 @@ public class BoolField extends Primitive
   }
 }
 
-// TODO - this can be much more efficient - use OpenBitSet or Bits
+// TODO - this can be much more efficient - use FixedBitSet or Bits
 class BoolFieldSource extends ValueSource {
   protected String field;
 

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/EnumField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/EnumField.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/EnumField.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/EnumField.java Wed Oct 22 05:44:17 2014
@@ -381,11 +381,10 @@ public class EnumField extends Primitive
     String intAsString =  intValue.toString();
     final FieldType newType = new FieldType();
 
-    newType.setIndexed(field.indexed());
     newType.setTokenized(field.isTokenized());
     newType.setStored(field.stored());
     newType.setOmitNorms(field.omitNorms());
-    newType.setIndexOptions(getIndexOptions(field, intAsString));
+    newType.setIndexOptions(field.indexed() ? getIndexOptions(field, intAsString) : null);
     newType.setStoreTermVectors(field.storeTermVector());
     newType.setStoreTermVectorOffsets(field.storeTermOffsets());
     newType.setStoreTermVectorPositions(field.storeTermPositions());

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/FieldType.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/FieldType.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/FieldType.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/FieldType.java Wed Oct 22 05:44:17 2014
@@ -265,11 +265,10 @@ public abstract class FieldType extends 
     if (val==null) return null;
 
     org.apache.lucene.document.FieldType newType = new org.apache.lucene.document.FieldType();
-    newType.setIndexed(field.indexed());
     newType.setTokenized(field.isTokenized());
     newType.setStored(field.stored());
     newType.setOmitNorms(field.omitNorms());
-    newType.setIndexOptions(getIndexOptions(field, val));
+    newType.setIndexOptions(field.indexed() ? getIndexOptions(field, val) : null);
     newType.setStoreTermVectors(field.storeTermVector());
     newType.setStoreTermVectorOffsets(field.storeTermOffsets());
     newType.setStoreTermVectorPositions(field.storeTermPositions());

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/PreAnalyzedField.java Wed Oct 22 05:44:17 2014
@@ -163,7 +163,6 @@ public class PreAnalyzedField extends Fi
       return null;
     }
     org.apache.lucene.document.FieldType newType = new org.apache.lucene.document.FieldType();
-    newType.setIndexed(field.indexed());
     newType.setTokenized(field.isTokenized());
     newType.setStored(field.stored());
     newType.setOmitNorms(field.omitNorms());
@@ -243,7 +242,6 @@ public class PreAnalyzedField extends Fi
     
     if (parse.hasTokenStream()) {
       if (field.indexed()) {
-        type.setIndexed(true);
         type.setTokenized(true);
         if (f != null) {
           f.setTokenStream(parse);
@@ -252,7 +250,7 @@ public class PreAnalyzedField extends Fi
         }
       } else {
         if (f != null) {
-          f.fieldType().setIndexed(false);
+          f.fieldType().setIndexOptions(null);
           f.fieldType().setTokenized(false);
         }
       }

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/TrieField.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/TrieField.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/TrieField.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/schema/TrieField.java Wed Oct 22 05:44:17 2014
@@ -600,9 +600,8 @@ public class TrieField extends Primitive
     FieldType ft = new FieldType();
     ft.setStored(stored);
     ft.setTokenized(true);
-    ft.setIndexed(indexed);
     ft.setOmitNorms(field.omitNorms());
-    ft.setIndexOptions(getIndexOptions(field, value.toString()));
+    ft.setIndexOptions(indexed ? getIndexOptions(field, value.toString()) : null);
 
     switch (type) {
       case INTEGER:

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/Insanity.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/Insanity.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/Insanity.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/Insanity.java Wed Oct 22 05:44:17 2014
@@ -64,7 +64,7 @@ public class Insanity {
       ArrayList<FieldInfo> filteredInfos = new ArrayList<>();
       for (FieldInfo fi : in.getFieldInfos()) {
         if (fi.name.equals(insaneField)) {
-          filteredInfos.add(new FieldInfo(fi.name, fi.isIndexed(), fi.number, fi.hasVectors(), fi.omitsNorms(),
+          filteredInfos.add(new FieldInfo(fi.name, fi.number, fi.hasVectors(), fi.omitsNorms(),
                                           fi.hasPayloads(), fi.getIndexOptions(), null, -1, null));
         } else {
           filteredInfos.add(fi);

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/SolrIndexSearcher.java Wed Oct 22 05:44:17 2014
@@ -608,7 +608,6 @@ public class SolrIndexSearcher extends I
     public void stringField(FieldInfo fieldInfo, String value) throws IOException {
       final FieldType ft = new FieldType(TextField.TYPE_STORED);
       ft.setStoreTermVectors(fieldInfo.hasVectors());
-      ft.setIndexed(fieldInfo.isIndexed());
       ft.setOmitNorms(fieldInfo.omitsNorms());
       ft.setIndexOptions(fieldInfo.getIndexOptions());
       doc.add(new Field(fieldInfo.name, value, ft));
@@ -618,7 +617,7 @@ public class SolrIndexSearcher extends I
     public void intField(FieldInfo fieldInfo, int value) {
       FieldType ft = new FieldType(IntField.TYPE_NOT_STORED);
       ft.setStored(true);
-      ft.setIndexed(fieldInfo.isIndexed());
+      ft.setIndexOptions(fieldInfo.getIndexOptions());
       doc.add(new IntField(fieldInfo.name, value, ft));
     }
 
@@ -626,7 +625,7 @@ public class SolrIndexSearcher extends I
     public void longField(FieldInfo fieldInfo, long value) {
       FieldType ft = new FieldType(LongField.TYPE_NOT_STORED);
       ft.setStored(true);
-      ft.setIndexed(fieldInfo.isIndexed());
+      ft.setIndexOptions(fieldInfo.getIndexOptions());
       doc.add(new LongField(fieldInfo.name, value, ft));
     }
 
@@ -634,7 +633,7 @@ public class SolrIndexSearcher extends I
     public void floatField(FieldInfo fieldInfo, float value) {
       FieldType ft = new FieldType(FloatField.TYPE_NOT_STORED);
       ft.setStored(true);
-      ft.setIndexed(fieldInfo.isIndexed());
+      ft.setIndexOptions(fieldInfo.getIndexOptions());
       doc.add(new FloatField(fieldInfo.name, value, ft));
     }
 
@@ -642,7 +641,7 @@ public class SolrIndexSearcher extends I
     public void doubleField(FieldInfo fieldInfo, double value) {
       FieldType ft = new FieldType(DoubleField.TYPE_NOT_STORED);
       ft.setStored(true);
-      ft.setIndexed(fieldInfo.isIndexed());
+      ft.setIndexOptions(fieldInfo.getIndexOptions());
       doc.add(new DoubleField(fieldInfo.name, value, ft));
     }
   }

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/search/grouping/distributed/responseprocessor/TopGroupsShardResponseProcessor.java Wed Oct 22 05:44:17 2014
@@ -120,6 +120,8 @@ public class TopGroupsShardResponseProce
         continue; // continue if there was an error and we're tolerant.  
       }
       NamedList<NamedList> secondPhaseResult = (NamedList<NamedList>) srsp.getSolrResponse().getResponse().get("secondPhase");
+      if(secondPhaseResult == null)
+        continue;
       Map<String, ?> result = serializer.transformToNative(secondPhaseResult, groupSort, sortWithinGroup, srsp.getShard());
       int numFound = 0;
       float maxScore = Float.NaN;

Modified: lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/update/UpdateLog.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/update/UpdateLog.java?rev=1633538&r1=1633537&r2=1633538&view=diff
==============================================================================
--- lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/update/UpdateLog.java (original)
+++ lucene/dev/branches/lucene5969/solr/core/src/java/org/apache/solr/update/UpdateLog.java Wed Oct 22 05:44:17 2014
@@ -197,7 +197,7 @@ public class UpdateLog implements Plugin
 
   public long getTotalLogsSize() {
     long size = 0;
-    synchronized (logs) {
+    synchronized (this) {
       for (TransactionLog log : logs) {
         size += log.getLogSize();
       }
@@ -206,7 +206,9 @@ public class UpdateLog implements Plugin
   }
 
   public long getTotalLogsNumber() {
-    return logs.size();
+    synchronized (this) {
+      return logs.size();
+    }
   }
 
   public VersionInfo getVersionInfo() {
@@ -317,7 +319,7 @@ public class UpdateLog implements Plugin
   /* Takes over ownership of the log, keeping it until no longer needed
      and then decrementing it's reference and dropping it.
    */
-  protected void addOldLog(TransactionLog oldLog, boolean removeOld) {
+  protected synchronized void addOldLog(TransactionLog oldLog, boolean removeOld) {
     if (oldLog == null) return;
 
     numOldRecords += oldLog.numRecords();