You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/06/09 23:11:30 UTC

svn commit: r413169 [1/2] - in /lucene/hadoop/branches/branch-0.3: ./ bin/ conf/ site/ src/contrib/streaming/src/java/org/apache/hadoop/streaming/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/fs/ src/site/src/documentation/content/xdocs/ ...

Author: cutting
Date: Fri Jun  9 14:11:29 2006
New Revision: 413169

URL: http://svn.apache.org/viewvc?rev=413169&view=rev
Log:
merge -r 411936:413147 from trunk, preparing for 0.3.2 release

Modified:
    lucene/hadoop/branches/branch-0.3/CHANGES.txt
    lucene/hadoop/branches/branch-0.3/bin/hadoop
    lucene/hadoop/branches/branch-0.3/build.xml
    lucene/hadoop/branches/branch-0.3/conf/log4j.properties
    lucene/hadoop/branches/branch-0.3/site/index.html
    lucene/hadoop/branches/branch-0.3/site/index.pdf
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamInputFormat.java
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamLineRecordReader.java
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
    lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
    lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DFSClient.java
    lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DataNode.java
    lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/FSDirectory.java
    lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/FSNamesystem.java
    lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/fs/FileSystem.java
    lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/fs/LocalFileSystem.java
    lucene/hadoop/branches/branch-0.3/src/site/src/documentation/content/xdocs/index.xml
    lucene/hadoop/branches/branch-0.3/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
    lucene/hadoop/branches/branch-0.3/src/test/org/apache/hadoop/test/AllTestDriver.java

Modified: lucene/hadoop/branches/branch-0.3/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/CHANGES.txt?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/CHANGES.txt (original)
+++ lucene/hadoop/branches/branch-0.3/CHANGES.txt Fri Jun  9 14:11:29 2006
@@ -1,6 +1,39 @@
 Hadoop Change Log
 
 
+Release 0.3.2 - 2006-06-09
+
+ 1. HADOOP-275.  Update the streaming contrib module to use log4j for
+    its logging.  (Michel Tourn via cutting)
+
+ 2. HADOOP-279.  Provide defaults for log4j logging parameters, so
+    that things still work reasonably when Hadoop-specific system
+    properties are not provided.  (omalley via cutting)
+
+ 3. HADOOP-280.  Fix a typo in AllTestDriver which caused the wrong
+    test to be run when "DistributedFSCheck" was specified.
+   (Konstantin Shvachko via cutting)
+
+ 4. HADOOP-240.  DFS's mkdirs() implementation no longer logs a warning
+    when the directory already exists. (Hairong Kuang via cutting)
+
+ 5. HADOOP-285.  Fix DFS datanodes to be able to re-join the cluster
+    after the connection to the namenode is lost.  (omalley via cutting)
+
+ 6. HADOOP-277.  Fix a race condition when creating directories.
+   (Sameer Paranjpye via cutting)
+
+ 7. HADOOP-289.  Improved exception handling in DFS datanode.
+    (Konstantin Shvachko via cutting)
+
+ 8. HADOOP-292.  Fix client-side logging to go to standard error
+    rather than standard output, so that it can be distinguished from
+    application output.  (omalley via cutting)
+
+ 9. HADOOP-294.  Fixed bug where conditions for retrying after errors
+    in the DFS client were reversed.  (omalley via cutting)
+
+
 Release 0.3.1 - 2006-06-05
 
  1. HADOOP-272.  Fix a bug in bin/hadoop setting log

Modified: lucene/hadoop/branches/branch-0.3/bin/hadoop
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/bin/hadoop?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/bin/hadoop (original)
+++ lucene/hadoop/branches/branch-0.3/bin/hadoop Fri Jun  9 14:11:29 2006
@@ -13,6 +13,8 @@
 #
 #   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
 #
+#   HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
+#
 
 # resolve links - $0 may be a softlink
 THIS="$0"
@@ -162,7 +164,7 @@
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,stdout}"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
 
 # run it
 exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"

Modified: lucene/hadoop/branches/branch-0.3/build.xml
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/build.xml?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/build.xml (original)
+++ lucene/hadoop/branches/branch-0.3/build.xml Fri Jun  9 14:11:29 2006
@@ -9,7 +9,7 @@
  
   <property name="Name" value="Hadoop"/>
   <property name="name" value="hadoop"/>
-  <property name="version" value="0.3.2-dev"/>
+  <property name="version" value="0.3.3-dev"/>
   <property name="final.name" value="${name}-${version}"/>
   <property name="year" value="2006"/>
   <property name="libhdfs.version" value="1"/>

Modified: lucene/hadoop/branches/branch-0.3/conf/log4j.properties
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/conf/log4j.properties?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/conf/log4j.properties (original)
+++ lucene/hadoop/branches/branch-0.3/conf/log4j.properties Fri Jun  9 14:11:29 2006
@@ -1,10 +1,14 @@
-# RootLogger - DailyRollingFileAppender
+# Define some default values that can be overridden by system properties
+hadoop.root.logger=INFO,console
+hadoop.log.dir=.
+hadoop.log.file=hadoop.log
+
+# Define the root logger to the system property "hadoop.root.logger".
 log4j.rootLogger=${hadoop.root.logger}
 
 # Logging Threshold
 log4j.threshhold=ALL
 
-
 #
 # Daily Rolling File Appender
 #
@@ -26,13 +30,14 @@
 
 
 #
-# stdout
-# Add *stdout* to rootlogger above if you want to use this 
+# console
+# Add "console" to rootlogger above if you want to use this 
 #
 
-log4j.appender.stdout=org.apache.log4j.ConsoleAppender
-log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
-log4j.appender.stdout.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+log4j.appender.console=org.apache.log4j.ConsoleAppender
+log4j.appender.console.target=System.err
+log4j.appender.console.layout=org.apache.log4j.PatternLayout
+log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
 
 #
 # Rolling File Appender
@@ -48,4 +53,9 @@
 #log4j.appender.RFA.layout=org.apache.log4j.PatternLayout
 #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} - %m%n
 #log4j.appender.RFA.layout.ConversionPattern=%d{ISO8601} %-5p %c{2} (%F:%M(%L)) - %m%n
+
+# Custom Logging levels
+
+#log4j.logger.org.apache.hadoop.mapred.JobTracker=DEBUG
+#log4j.logger.org.apache.hadoop.mapred.TaskTracker=DEBUG
 

Modified: lucene/hadoop/branches/branch-0.3/site/index.html
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/site/index.html?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/site/index.html (original)
+++ lucene/hadoop/branches/branch-0.3/site/index.html Fri Jun  9 14:11:29 2006
@@ -122,6 +122,12 @@
 <a href="#News">News</a>
 <ul class="minitoc">
 <li>
+<a href="#9+June%2C+2006%3A+release+0.3.2+available">9 June, 2006: release 0.3.2 available</a>
+</li>
+<li>
+<a href="#8+June%2C+2006%3A+FAQ+added+to+Wiki">8 June, 2006: FAQ added to Wiki</a>
+</li>
+<li>
 <a href="#5+June%2C+2006%3A+release+0.3.1+available">5 June, 2006: release 0.3.1 available</a>
 </li>
 <li>
@@ -154,26 +160,37 @@
 <a name="N1000C"></a><a name="News"></a>
 <h2 class="h3">News</h2>
 <div class="section">
-<a name="N10012"></a><a name="5+June%2C+2006%3A+release+0.3.1+available"></a>
+<a name="N10012"></a><a name="9+June%2C+2006%3A+release+0.3.2+available"></a>
+<h3 class="h4">9 June, 2006: release 0.3.2 available</h3>
+<p>This is a bugfix release.  For details see the <a href="http://tinyurl.com/k9g5c">change log</a>. The release can
+      be obtained from <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
+      nearby mirror</a>.
+      </p>
+<a name="N10024"></a><a name="8+June%2C+2006%3A+FAQ+added+to+Wiki"></a>
+<h3 class="h4">8 June, 2006: FAQ added to Wiki</h3>
+<p>Hadoop now has a <a href="http://wiki.apache.org/lucene-hadoop/FAQ">FAQ</a>.  Please
+      help make this more complete!
+      </p>
+<a name="N10032"></a><a name="5+June%2C+2006%3A+release+0.3.1+available"></a>
 <h3 class="h4">5 June, 2006: release 0.3.1 available</h3>
 <p>This is a bugfix release.  For details see the <a href="http://tinyurl.com/l6on4">change log</a>. The release can
       be obtained from <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
       nearby mirror</a>.
       </p>
-<a name="N10024"></a><a name="2+June%2C+2006%3A+release+0.3.0+available"></a>
+<a name="N10044"></a><a name="2+June%2C+2006%3A+release+0.3.0+available"></a>
 <h3 class="h4">2 June, 2006: release 0.3.0 available</h3>
 <p>This includes many fixes, improving performance, scalability
       and reliability and adding new features.  For details see the <a href="http://tinyurl.com/rq3f7">change log</a>. The release can
       be obtained from <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
       nearby mirror</a>.
       </p>
-<a name="N10036"></a><a name="12+May%2C+2006%3A+release+0.2.1+available"></a>
+<a name="N10056"></a><a name="12+May%2C+2006%3A+release+0.2.1+available"></a>
 <h3 class="h4">12 May, 2006: release 0.2.1 available</h3>
 <p>This fixes a few bugs in release 0.2.0, listed in the <a href="http://tinyurl.com/rnnvz">change log</a>. The
       release can be obtained from <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
       nearby mirror</a>.
       </p>
-<a name="N10048"></a><a name="5+May%2C+2006%3A+release+0.2.0+available"></a>
+<a name="N10068"></a><a name="5+May%2C+2006%3A+release+0.2.0+available"></a>
 <h3 class="h4">5 May, 2006: release 0.2.0 available</h3>
 <p>We are now aiming for monthly releases.  There have been many
       bug fixes and improvements in the past month.  MapReduce and DFS
@@ -182,24 +199,24 @@
       details. The release can be obtained from <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/"> a
       nearby mirror</a>.
       </p>
-<a name="N1005A"></a><a name="2+April%2C+2006%3A+release+0.1.0+available"></a>
+<a name="N1007A"></a><a name="2+April%2C+2006%3A+release+0.1.0+available"></a>
 <h3 class="h4">2 April, 2006: release 0.1.0 available</h3>
 <p>This is the first Hadoop release.  The release is available
       <a href="http://www.apache.org/dyn/closer.cgi/lucene/hadoop/">
       here</a>.</p>
-<a name="N10068"></a><a name="6+February%2C+2006%3A+nightly+builds"></a>
+<a name="N10088"></a><a name="6+February%2C+2006%3A+nightly+builds"></a>
 <h3 class="h4">6 February, 2006: nightly builds</h3>
 <p>Hadoop now has nightly builds.  This automatically creates a
       <a href="http://cvs.apache.org/dist/lucene/hadoop/nightly/">downloadable version of Hadoop every
       night</a>.  All unit tests must pass, or a message is sent to
       the developers mailing list and no new version is created.  This
       also updates the <a href="docs/api/">javadoc</a>.</p>
-<a name="N1007A"></a><a name="3+February%2C+2006%3A+Hadoop+code+moved+out+of+Nutch"></a>
+<a name="N1009A"></a><a name="3+February%2C+2006%3A+Hadoop+code+moved+out+of+Nutch"></a>
 <h3 class="h4">3 February, 2006: Hadoop code moved out of Nutch</h3>
 <p>The Hadoop code has now been moved into its own Subversion
       tree, renamed into packages under <span class="codefrag">org.apache.hadoop</span>.
       All unit tests pass, but little else has yet been tested.</p>
-<a name="N10087"></a><a name="30+March%2C+2006%3A+Hadoop+project+approved"></a>
+<a name="N100A7"></a><a name="30+March%2C+2006%3A+Hadoop+project+approved"></a>
 <h3 class="h4">30 March, 2006: Hadoop project approved</h3>
 <p>The Lucene PMC has elected to split the Nutch MapReduce and
       distributed filesytem code into a new project named Hadoop.</p>

Modified: lucene/hadoop/branches/branch-0.3/site/index.pdf
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/site/index.pdf?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/site/index.pdf (original)
+++ lucene/hadoop/branches/branch-0.3/site/index.pdf Fri Jun  9 14:11:29 2006
@@ -5,10 +5,10 @@
 /Producer (FOP 0.20.5) >>
 endobj
 5 0 obj
-<< /Length 707 /Filter [ /ASCII85Decode /FlateDecode ]
+<< /Length 783 /Filter [ /ASCII85Decode /FlateDecode ]
  >>
 stream
-Gb!$E?#Q2d'Sc)P(#E4GD4Wtr+hMNf78p$_RYK=MFVbNI%26R:5cah[foY3WW=!)$\gA6PR$V=._o^lN8:pRDN`_)D8lbt45A<C0D*S]<?]*u`9nidl)5C_p^<`s)Od_7n6gn+kgQSt`puB[M1T#J>L45fVdcQQ$h<-sm-mF3kQ[Q_=/W26PE2R;[jc<>=2S*_56U$&L3Qsu"EsW#h@FXJ8:sJu9GF(\iQ+j`l]A;uVs4GaOlM_Hk)*;$3,;(srm%JBGZuj<m37&$DqQ_^G>9>Uej9HdX]$P1"Srp]V\(=!GmSmNb:\"6Hg23c"piI]Z?nr:%j$\Eh4c'GiV*r22`:6*GNtOJ""!h7'DA!QT69R)lLRhFci";"@!.+/L*3Ze:,k9=sMfBk)&,P>!Wq0[4')GKs*"`tZPP$1F+Tl4SNY;G>kt>4DA_jB_8=u.BmCF`iIUUpl7J78L88]e=V)Oo)(e@'T,_2I^+jjL!q!QHnYADFRc@AK"S&ZPe<SO>8^Sg8k,j#d(&-5794AH.F3J"N4:=lDMWG;$'VZ;:Jlr$@;\ba=.+F>ac4c3$,n>e@$SWBNk_NCt#]*URX"KF=_"^cliB.V^9NSp0mT`DgTc)8)dBPsg9H$hI!Y',:"D^TIjTK]YXKTD9Rn=\s08n%YBXI=K9K9_.)b=je(.5A?6."'D/]`k[tlIn#4^@^<1YtdX~>
+Gb!$Fhf"u<&BE](/%i>jg-#DN#54?XMl'+I<"5f;h&Zh]%3*-B5cahWZQ[;SBaVu1Fs.CdRI_;Gq"csW`npS18'K!V`=%aZ^XOLlhP5mo+%;=ZP<QffL+$B%]?.0);S>sEkT($tF*#cCfD_:N>$#Vpd!$12RFhP7gXnRg73F;GZ4@d"o:cU?m&c+N\9=mq7[;5ELaN`[]C@:X2P?_q&,LMbHF#iK]A_-6>!8>Dh/)ld?Y8O,Fas4#_Y,V;&f7'Jp9\eOGNk&rZ/ZodcrY(`hE$i4,!S&n<eX/si#S4u`6=Xk6]r8a>C:m]Af<-%5+DlnQjcC83"b'j*_Ks$]`H=&16jl+h[b/42ZK2[Pe&C_eq5\]kl<Jf,]Pf*iM0qV^Gne8oLu[PS#dn@,5sP6i6P.B6I(jP,VG3g0hqlJ"!h6:bu$m].r"'X*^0]%<K?YNro+V3B.FNt/.=dqE?N%=k[RfHH#XN2qH?G$]GgEfocPGHJ@$5T'*8(FHrAJ,DNfC-:<NK`fb>CW0lQY-Hbo!dcGYdj"qLft:8:Z!d%;CIU+\<2SX9G[XRlVm*p^91HZaVi2X0eikujR1r.tn*V^!FJ=t/-_-a\-6!)?oWWRKtfE2m+A:aoe6<2Q]`oK:"DJro\H+?md$YO3gd<W&'a3&'(TT"sDGJSKlShMl_nO!&7(7W?EM?[rfGbtaB8-o-\5_YC(19l2hA4*Z-_Ji_'d&^H9#i3Y8AOqcVWoqH_*C="ccD4)\EY"K1X'nnHT\_"VtCK2=HfId%\mVBJ,~>
 endstream
 endobj
 6 0 obj
@@ -31,6 +31,8 @@
 20 0 R
 22 0 R
 24 0 R
+26 0 R
+28 0 R
 ]
 endobj
 8 0 obj
@@ -56,7 +58,7 @@
 12 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 108.0 523.266 297.464 511.266 ]
+/Rect [ 108.0 523.266 285.824 511.266 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A 13 0 R
@@ -66,7 +68,7 @@
 14 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 108.0 505.066 303.464 493.066 ]
+/Rect [ 108.0 505.066 297.464 493.066 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A 15 0 R
@@ -86,7 +88,7 @@
 18 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 108.0 468.666 300.8 456.666 ]
+/Rect [ 108.0 468.666 303.464 456.666 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A 19 0 R
@@ -96,7 +98,7 @@
 20 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 108.0 450.466 278.504 438.466 ]
+/Rect [ 108.0 450.466 297.464 438.466 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A 21 0 R
@@ -106,7 +108,7 @@
 22 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 108.0 432.266 375.128 420.266 ]
+/Rect [ 108.0 432.266 300.8 420.266 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A 23 0 R
@@ -116,7 +118,7 @@
 24 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 108.0 414.066 328.124 402.066 ]
+/Rect [ 108.0 414.066 278.504 402.066 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A 25 0 R
@@ -124,27 +126,43 @@
 >>
 endobj
 26 0 obj
-<< /Length 2596 /Filter [ /ASCII85Decode /FlateDecode ]
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 395.866 375.128 383.866 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 27 0 R
+/H /I
+>>
+endobj
+28 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 108.0 377.666 333.124 365.666 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A 29 0 R
+/H /I
+>>
+endobj
+30 0 obj
+<< /Length 2546 /Filter [ /ASCII85Decode /FlateDecode ]
  >>
 stream
-GauHM?*E$?&:NGC&`,bS'0CBsnC@XE<@W...@Eaksaq>.#-jq!)Rdf$>i-)V`#YYLQaHiZj/5`3<OuAE0/*V/`"H[[)6";,6L0+[J'WRFj8'lN%\`oO*JB3NUoGjO@UPPQ",qjJi1V6FXnjnkpO7)+B*l@hP]#N&Js/mioK-ir205%)0,:O^/6Cu^:hHnje:5;+HJ4S`E9Jlq_H7b(Z*B&jmfkCNm9&;-V%3OLco%dUg&;C3N^nq*
 3BLNbj*A(-u(83`RU,W*J,$]:V%am6P>WMSA;NO)X)'$R'D:>DAJoh@?OY,lWTcgNsN\hbiQe^tK$,K=ABSq?om1>I:*)'5.DC''`K/;_B:1jq?_Z1feQ#tcq*Ud[!ijC;i&Ji'.RjrtriU9$sI/W,S5?$DbA2sh>_",C^5t&RA9Gtd3pnGM\p_<so<9U#(&N(ogGpX^G=:LkFU,h<gMp1J^I`:6Cj@S;NWmH3cCM+c$4-=$DgPKZf<^frjD=9`1.$m-`"[A&/4K08E1P46Mnm80E]s>TGh4"pZB(M*pPqcWK\'Q?qU%mP^!R7H-Gs>rdBmR?s:bLk2gfES]dTk5H6W'_eAbrW*a&=,W(po+]Et2;uF$e=[kqJ'4U;.?E7HA8kVAJW$])=6)U]J:[Ds>+G0epXTMW2&-GT-7BkgdSn&aE;1H[XA\3Qdou+;F9L\-W=MB@Za2")uAMgZ@uo_Ft6^LlClYCGqU?Wcnet"'RdMDJ)TJBg;r2C\KFNNi"42]?24(dG$U,=267)$H?:bZ$m)Pl;D3h?/:Fqf=eP3Ps-5"(J#OSWI$@e6=8NZ2P.2M+fd<!s-TTnH!046Gq[IGVq:L0l"uLu60JIp(`7q.`A5ZJVS.FhPKFtDE:R:->uAm15(<6<;)%b]:5_J:KP3,-F:4lcX(tTn<)&Jq:JF#gi)uZ*5CP9m)jJNm2%0k&6:HB'2:lp#CemWN1V28^cW9eoPinjap.::Hp6A5gj%-_pqJ`/=Hk,+Y(Jrq)9$!ll9rgLQTVU'-\]Dr>Fk!#IX1MPTH=+9f0d*9.Ip!rah+D=;U4*.ePA,/L\QkP4K=@5Z]e(?8"3nM5!Yol?i6`5B>TR,l"dn3nC;!FJkV4#Hg`H]e&?W!9q-&f-F'S2Kphik8#fE;sqM!;3D[&:kU3ID(U?glqOh6gLe(<<qc[_)YNb77)E(<h@DS,)`%XoWPH5SHRX#rlr]?!eNJ\Bt!g\r_k/oZKZQi1
 %YTpqsdWpA_ZBdtt[@b+!HXKA>'&p\bjSINlkNnU8^;Y`,q6ICmn^9Ou6@LD2"^_^PAk$<nU...@XMn>\)(l1m\lBV,**YAWDq$+gB%F'0&bf%"^5d/'omZ`7$u1mJd!h*8c3-CF~>
+Gb!#]998+$&AI=/QlAS4j*nSaD0a@"PsE-F_uViBP"oJg@gC&B@K&FOGI?i+3!',L9AJS(^8cX0<Rq*6EV%.:'Mq*4i,QL<K>WEJ+oK_1%Ui$;:+02[j!jcr;bB4]hCr)nJ,4eC\U51j*^%0oI/CGL"0?7\GFR0@$_imn7.]gc&SiSX#YGamD1:2deT(/Ge)mEa7$6p50Om6EI"0DckMmcp6hTR9=IRGZ(GcPSKG-7--I58P,u9qKq]CFiPkY/E1q*!2!&s)dbEro.UXo7]@sT"QnaC#dq/edddm%KJUSo8lEdZN-$=$(t(PBT+N`JQMA&6L[JdmV__SNpI0Q?IpE)$_g@Mm4=OD#L80>WE%`6PeR"@Ii*"q#?s9_*goF37@d$:!hr6;/Oj9GDOmoZiJ_aPEpWFM?aO5!Yi$dJQ3MNFum@P8l/$qFC&Y/!ZU[33k2qP@LU<eV%psifoB8WU<r^&Hi9#3#LE;)FYofkU9q/8N)MekI%YFNE6QVDNaSTeNl"1XdPuWBau;a&5j*8=#`:1,(EC&=k6)X[LYi0oJYr?FR#,*RW'U#]jb[<kd>a0nFe4T%_Eb<2,uC1r[%N13[IE+:q8l3)7ZtEZ'Z`?'!;&g-+fm2O*l6C1-QZk>JZWW>>l/M5a<Sre5O.LM$MVuirK7-f$)KW`<L;R-CXrV-YN*c1)/Lmq.N>I(?e[oi\Oo2cC(ZU-b&f$!/1h+f'`d&I$&n/GL:bXQd@I\@<?GU&eQZ,V>o0&,pGW#\2WQQIUmlpZ9hf]].g;TZat/nJnPgW'4I?A/9ee<bBibEb$AMI?1,::I7=b^Lc9Ma"UD-&lZ5`aim1msZ,Wj;\I12b5EdUA+c%bmgiR]r7K2/f?.RahF-ker+cUi^&npI4^SP#j?$aHm6n7WU.gedGl:&D/\(Pn:Yb3B>/)&84TB:_l8l+WUGY[kJ3-7(?^*pg6MdsM<'8RsZdL)Zoq9GFheGfa6GDdn/0AY,
 [iGiP!_EPWG8ZHQ?0)rGg_F]`P"u6@o:tPMH7O3_*f8m!Gf)Z&cm$60O$h11CcT?cH(]Zt"5W6"h@dnRq7K+q*l\LC(rKd[g#XMo6B73Mlo>J2D6p1a3_POD6pmR<A$0MkI$DAbKZS\F'o$j_g'BGTSH`?--g'3Wq-6PgfII.K+X%n3e5ja!\f4V6CTk_)aO_EcP,gbErYBT4e]<@geE*QL9n'u&'@/`L'UsG&"/s_`KK/sL@0.g&9&QFQ5+\OI'5$lmo/Vfj;W"QXkbR9Fu_5<s(&F2j;S?s=>"n/WO-QKo?KmqSj`5DWui(u[AkX68X*A!>bN_6X56uW'g6F`*X@4"uMjj?RjU%YHC"0*6dKMS<=<D\8?,HJE$!<o_hgY<*$9e$")EsOLSJ>C7Rm=lHN_ql*<l7Z]rcp.ejg.O>VQl#f+XAY8UDu)P_,(JFFli;t#^+GTnLW4o>;Q5sdn,&C@Y"G#!d(K]Jgj2IYX\h]f)<%m52Y[G9OPj=KlS9eY:KA(<a49+IaK&hE3fE?6T2uGM9V)\WTa%&h]64BUFpg=5F5*GCS4m6pQB!n&0qj$F1OU>0nDmJA+sV=qcTdoTF__s\I.Qi9c"ug3[%,?Ag+N#j;oh2e/KhTg..3G/3f[!;FNK2SmpNaZc@_?U2;4Y]@V7:ALNR`%<V;WF"tW%a^o@nmTLN`7=XAV$$M)9-]V[5;YP.S5+nPnh;:3I='4OpuhA#eOW8hUA@A6it$SBlC?/*t44[Ph*"c#G7c@m37:it"ZIR4%!K)$kY*=83%6k\d.EI5r";nVG0"o\Y9-I[FL3J<thaKVi'fD%Fp,(JEKrW%l5^6uSM;^<?_r+PmAk3]]QZb)A:]eaq2BMDr]mfrt2689+n\#t]B;;\8t<qY]iL\X*=s!V(&dn=[ajYg'S>_1Y"#]RDYQ(bOa<Nd?E<TFZ=./5]q/elSo5'ltcStM3r3]lUcKgq9"cm=d!.^)G..;JGGE3(
 fFJ#CuFrS\gO9qhe1R_<ZGZU^^-(IHsT)9Lj.[IVo6?E<(M<9Z<_Ygp0=8S(qMT&LZWZVZ1a$j9LOQRRfO,HFjp5X@^)MO-FUP+?7S9tAuI'efI4c,p/UlUGWZ&GU":2+:h96QMBA`Jrnq`Pr+jR/5KJ&/0\"H=6r6jP(.TVT@_*n,"Lu+h*.7$Y9p,@lB;@5714!2SN;Jf?sEI\A/5[`60uiQl*3$T7Zuhe(MPOqeKROI>o=<DAtenf2CWfJ^BR!W`HXaRppT21V_H6or.XsU]bVB%+X!UM@p2br<cp*?ZC7?%KbP\j-gKa4Is^XOrPH5jT7ThE_UJK[2l>:<h-;Chur;Cl-qc)B7U7U7IjT2#a1;WTQqiJ`I/DeD2V<p*Pm2_'^KIga**eu@^#[&3sK*r<DBCM0UoA6n9ltKFR"G\C='=1L6N?o*rOYb`k]Vk#W-?r!%^3iKXAH\6/eSLTEPJII>7U035:dHU'%!iBB;'!l1bB199)eRY&>E=Ss(aTr_Vs^Q`cHd7imP'^4VYHj91mYH_l.(\1d(!>YOs0G%@,T^Aq(%AeY~>
 endstream
 endobj
-27 0 obj
+31 0 obj
 << /Type /Page
 /Parent 1 0 R
 /MediaBox [ 0 0 612 792 ]
 /Resources 3 0 R
-/Contents 26 0 R
-/Annots 28 0 R
+/Contents 30 0 R
+/Annots 32 0 R
 >>
 endobj
-28 0 obj
+32 0 obj
 [
-29 0 R
-30 0 R
-31 0 R
-32 0 R
 33 0 R
 34 0 R
 35 0 R
@@ -154,20 +172,27 @@
 39 0 R
 40 0 R
 41 0 R
+42 0 R
+43 0 R
+44 0 R
+45 0 R
+46 0 R
+47 0 R
+48 0 R
 ]
 endobj
-29 0 obj
+33 0 obj
 << /Type /Annot
 /Subtype /Link
 /Rect [ 297.3 599.613 349.62 587.613 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
-/A << /URI (http://tinyurl.com/l6on4)
+/A << /URI (http://tinyurl.com/k9g5c)
 /S /URI >>
 /H /I
 >>
 endobj
-30 0 obj
+34 0 obj
 << /Type /Annot
 /Subtype /Link
 /Rect [ 518.232 599.613 523.56 587.613 ]
@@ -178,7 +203,7 @@
 /H /I
 >>
 endobj
-31 0 obj
+35 0 obj
 << /Type /Annot
 /Subtype /Link
 /Rect [ 90.0 586.413 156.312 574.413 ]
@@ -189,21 +214,32 @@
 /H /I
 >>
 endobj
-32 0 obj
+36 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 223.968 521.96 276.288 509.96 ]
+/Rect [ 181.98 535.16 205.98 523.16 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
-/A << /URI (http://tinyurl.com/rq3f7)
+/A << /URI (http://wiki.apache.org/lucene-hadoop/FAQ)
 /S /URI >>
 /H /I
 >>
 endobj
-33 0 obj
+37 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 444.9 521.96 519.54 509.96 ]
+/Rect [ 297.3 483.907 349.62 471.907 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (http://tinyurl.com/l6on4)
+/S /URI >>
+/H /I
+>>
+endobj
+38 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 518.232 483.907 523.56 471.907 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A << /URI (http://www.apache.org/dyn/closer.cgi/lucene/hadoop/)
@@ -211,21 +247,32 @@
 /H /I
 >>
 endobj
-34 0 obj
+39 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 333.3 470.707 385.62 458.707 ]
+/Rect [ 90.0 470.707 156.312 458.707 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
-/A << /URI (http://tinyurl.com/rnnvz)
+/A << /URI (http://www.apache.org/dyn/closer.cgi/lucene/hadoop/)
 /S /URI >>
 /H /I
 >>
 endobj
-35 0 obj
+40 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 116.328 457.507 190.968 445.507 ]
+/Rect [ 223.968 406.254 276.288 394.254 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (http://tinyurl.com/rq3f7)
+/S /URI >>
+/H /I
+>>
+endobj
+41 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 444.9 406.254 519.54 394.254 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A << /URI (http://www.apache.org/dyn/closer.cgi/lucene/hadoop/)
@@ -233,21 +280,21 @@
 /H /I
 >>
 endobj
-36 0 obj
+42 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 161.988 379.854 214.308 367.854 ]
+/Rect [ 333.3 355.001 385.62 343.001 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
-/A << /URI (http://tinyurl.com/qkgsg)
+/A << /URI (http://tinyurl.com/rnnvz)
 /S /URI >>
 /H /I
 >>
 endobj
-37 0 obj
+43 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 434.244 379.854 508.884 367.854 ]
+/Rect [ 116.328 341.801 190.968 329.801 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A << /URI (http://www.apache.org/dyn/closer.cgi/lucene/hadoop/)
@@ -255,10 +302,21 @@
 /H /I
 >>
 endobj
-38 0 obj
+44 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 360.936 328.601 381.588 316.601 ]
+/Rect [ 161.988 264.148 214.308 252.148 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (http://tinyurl.com/qkgsg)
+/S /URI >>
+/H /I
+>>
+endobj
+45 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 434.244 264.148 508.884 252.148 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A << /URI (http://www.apache.org/dyn/closer.cgi/lucene/hadoop/)
@@ -266,21 +324,21 @@
 /H /I
 >>
 endobj
-39 0 obj
+46 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 383.304 277.348 501.948 265.348 ]
+/Rect [ 360.936 212.895 381.588 200.895 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
-/A << /URI (http://cvs.apache.org/dist/lucene/hadoop/nightly/)
+/A << /URI (http://www.apache.org/dyn/closer.cgi/lucene/hadoop/)
 /S /URI >>
 /H /I
 >>
 endobj
-40 0 obj
+47 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 90.0 264.148 185.316 252.148 ]
+/Rect [ 383.304 161.642 501.948 149.642 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
 /A << /URI (http://cvs.apache.org/dist/lucene/hadoop/nightly/)
@@ -288,139 +346,172 @@
 /H /I
 >>
 endobj
-41 0 obj
+48 0 obj
 << /Type /Annot
 /Subtype /Link
-/Rect [ 359.964 250.948 397.284 238.948 ]
+/Rect [ 90.0 148.442 185.316 136.442 ]
 /C [ 0 0 0 ]
 /Border [ 0 0 0 ]
-/A << /URI (docs/api/)
+/A << /URI (http://cvs.apache.org/dist/lucene/hadoop/nightly/)
 /S /URI >>
 /H /I
 >>
 endobj
-42 0 obj
-<< /Length 475 /Filter [ /ASCII85Decode /FlateDecode ]
+49 0 obj
+<< /Length 927 /Filter [ /ASCII85Decode /FlateDecode ]
  >>
 stream
-Gat$s9iHZu&A@ZciiO8j-@l^fg5a^'XAbB%&*b<l:iLq-"?ZSVL6%eQn_u5Vq_meffseSPSEn8lF>2A5pVmLO7)8PFrUjhGcWq'-TFkSp4?C[crk2E4J;HIG;?mY"83[rMKp4hEGp"TQlR^2s-('mIATltMU9a93ZSEZ`XT>.-YnDmb-u%\L/okMDfb+gdMDG>n]RCKMQ8L*t=gUcWW[?VGB3K"UM"[c8Bi;301rVP4mXFioL<r$TRTK27\Hsd+0VE=ELMkBuje]).b7c=?32nL\"9dG*D=j5M__/kQ*/.`f&^,4YOYI\.T:?>eJKsL"`&^T(+5FYc&VH$;UIL?lIa[IpdeHW:d";@gjL9MdY3fDZa(SWi(_Gh,CKW?f0_?e"4P^;bWE-;g2YfNUlsW3K`uC`)+G(.^B\KU$>h(sFnZtp>JN$VA/\<Lk.[LmWb)4&d6+aP/BLVM=&nm7$#&T^H'*~>
+Gat%!?#Q2d'Rf.GG]0^k\[Jatm&dCU:=D7HD59=2HZ6b,F9FBe;WZf^!E*9fZ?<0Fq&9-ZS9+,keNEU\%t4`tXk`@l8-gOjMB_eeMuEJmjUn@86<9jMn27`PpB?Bk3.3LpLbe^5^'mPVM7>d-qMQQ7&`IE;7c2P7U1qs$44^r&;cVt>b(-mKo#=_bVD+:m-lTiqX]DRLYhtl3n@L.hi-/+4mPO^ok5$(r7.X3&`I6J(rWWP"88o2]QbL8jV8#K.ii8%;:5K7:Kg\`#Tnd'dD-,p=!Ug2Bn>NcF1[,Gf!p2k#j*3P.0p@EJ4j%8f=bJ)4rFq-&!(Q1=1VL"3ATr(k(*b)Nn'-nG.oQOd9r>V32HpXS1)iKS=o.fX9q18Ve?OsGN$Xeq+b^p-#pG:$,Q`aQp*J"*rS"&geNefh<@tS\-AsCS(9"*DJJ7VXY!$S:-gGNMec93\/%(uB?*!Ua>4!Wh!B)E2,Ri-hd@2W7p/q^D#u$Qq*&9^lAF'XR(F%Hj+)i,<SDqaP_t)F\OO@]CJjuaI5eh(`#Z7^m"sC.#8h#\,/HJ/KCo6FMl#qreIDE#Ms5"S:@[N#5ckr"_c6.RaX)+6>^+X5:*d%hQ=REOX!DTM!#``')s&\lF]*X\>0<"mA-Iij8mr#<V:t2=7($Z=0YGo\WO"eN!`u_#P?=tnW`;%X5A@I9Q)TGqrV/CIY?6Aq)$Y!`]7u(9g4ea8EL(bVg3j%SiX+[2!iI23@FOHk!&BQk:VInZ7-m_W!`"%;\S%[E4j(DlPo1Y"Z;@GAqVnG7:b6,r[`V9b[YoLX<\PY$f(4tTr).$Db1W]fF2Q1tbP";75]1Rh']f,Q/(9>*#8&fm<IP=($j[:`625mQ70"[8)m0Bt6T9KL@1ZRV[C>Igci*&>gHD?DS!*_D9DLKtpi;N`QAG;G~>
 endstream
 endobj
-43 0 obj
+50 0 obj
 << /Type /Page
 /Parent 1 0 R
 /MediaBox [ 0 0 612 792 ]
 /Resources 3 0 R
-/Contents 42 0 R
+/Contents 49 0 R
+/Annots 51 0 R
 >>
 endobj
-45 0 obj
+51 0 obj
+[
+52 0 R
+]
+endobj
+52 0 obj
+<< /Type /Annot
+/Subtype /Link
+/Rect [ 359.964 664.8 397.284 652.8 ]
+/C [ 0 0 0 ]
+/Border [ 0 0 0 ]
+/A << /URI (docs/api/)
+/S /URI >>
+/H /I
+>>
+endobj
+54 0 obj
 <<
  /Title (\376\377\0\61\0\40\0\116\0\145\0\167\0\163)
- /Parent 44 0 R
- /First 46 0 R
- /Last 53 0 R
- /Count -8
+ /Parent 53 0 R
+ /First 55 0 R
+ /Last 64 0 R
+ /Count -10
  /A 9 0 R
 >> endobj
-46 0 obj
+55 0 obj
 <<
- /Title (\376\377\0\61\0\56\0\61\0\40\0\65\0\40\0\112\0\165\0\156\0\145\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\63\0\56\0\61\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
- /Parent 45 0 R
- /Next 47 0 R
+ /Title (\376\377\0\61\0\56\0\61\0\40\0\71\0\40\0\112\0\165\0\156\0\145\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\63\0\56\0\62\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
+ /Parent 54 0 R
+ /Next 56 0 R
  /A 11 0 R
 >> endobj
-47 0 obj
+56 0 obj
 <<
- /Title (\376\377\0\61\0\56\0\62\0\40\0\62\0\40\0\112\0\165\0\156\0\145\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\63\0\56\0\60\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
- /Parent 45 0 R
- /Prev 46 0 R
- /Next 48 0 R
+ /Title (\376\377\0\61\0\56\0\62\0\40\0\70\0\40\0\112\0\165\0\156\0\145\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\106\0\101\0\121\0\40\0\141\0\144\0\144\0\145\0\144\0\40\0\164\0\157\0\40\0\127\0\151\0\153\0\151)
+ /Parent 54 0 R
+ /Prev 55 0 R
+ /Next 57 0 R
  /A 13 0 R
 >> endobj
-48 0 obj
+57 0 obj
 <<
- /Title (\376\377\0\61\0\56\0\63\0\40\0\61\0\62\0\40\0\115\0\141\0\171\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\62\0\56\0\61\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
- /Parent 45 0 R
- /Prev 47 0 R
- /Next 49 0 R
+ /Title (\376\377\0\61\0\56\0\63\0\40\0\65\0\40\0\112\0\165\0\156\0\145\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\63\0\56\0\61\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
+ /Parent 54 0 R
+ /Prev 56 0 R
+ /Next 58 0 R
  /A 15 0 R
 >> endobj
-49 0 obj
+58 0 obj
 <<
- /Title (\376\377\0\61\0\56\0\64\0\40\0\65\0\40\0\115\0\141\0\171\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\62\0\56\0\60\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
- /Parent 45 0 R
- /Prev 48 0 R
- /Next 50 0 R
+ /Title (\376\377\0\61\0\56\0\64\0\40\0\62\0\40\0\112\0\165\0\156\0\145\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\63\0\56\0\60\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
+ /Parent 54 0 R
+ /Prev 57 0 R
+ /Next 59 0 R
  /A 17 0 R
 >> endobj
-50 0 obj
+59 0 obj
 <<
- /Title (\376\377\0\61\0\56\0\65\0\40\0\62\0\40\0\101\0\160\0\162\0\151\0\154\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\61\0\56\0\60\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
- /Parent 45 0 R
- /Prev 49 0 R
- /Next 51 0 R
+ /Title (\376\377\0\61\0\56\0\65\0\40\0\61\0\62\0\40\0\115\0\141\0\171\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\62\0\56\0\61\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
+ /Parent 54 0 R
+ /Prev 58 0 R
+ /Next 60 0 R
  /A 19 0 R
 >> endobj
-51 0 obj
+60 0 obj
 <<
- /Title (\376\377\0\61\0\56\0\66\0\40\0\66\0\40\0\106\0\145\0\142\0\162\0\165\0\141\0\162\0\171\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\156\0\151\0\147\0\150\0\164\0\154\0\171\0\40\0\142\0\165\0\151\0\154\0\144\0\163)
- /Parent 45 0 R
- /Prev 50 0 R
- /Next 52 0 R
+ /Title (\376\377\0\61\0\56\0\66\0\40\0\65\0\40\0\115\0\141\0\171\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\62\0\56\0\60\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
+ /Parent 54 0 R
+ /Prev 59 0 R
+ /Next 61 0 R
  /A 21 0 R
 >> endobj
-52 0 obj
+61 0 obj
 <<
- /Title (\376\377\0\61\0\56\0\67\0\40\0\63\0\40\0\106\0\145\0\142\0\162\0\165\0\141\0\162\0\171\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\110\0\141\0\144\0\157\0\157\0\160\0\40\0\143\0\157\0\144\0\145\0\40\0\155\0\157\0\166\0\145\0\144\0\40\0\157\0\165\0\164\0\40\0\157\0\146\0\40\0\116\0\165\0\164\0\143\0\150)
- /Parent 45 0 R
- /Prev 51 0 R
- /Next 53 0 R
+ /Title (\376\377\0\61\0\56\0\67\0\40\0\62\0\40\0\101\0\160\0\162\0\151\0\154\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\162\0\145\0\154\0\145\0\141\0\163\0\145\0\40\0\60\0\56\0\61\0\56\0\60\0\40\0\141\0\166\0\141\0\151\0\154\0\141\0\142\0\154\0\145)
+ /Parent 54 0 R
+ /Prev 60 0 R
+ /Next 62 0 R
  /A 23 0 R
 >> endobj
-53 0 obj
+62 0 obj
 <<
- /Title (\376\377\0\61\0\56\0\70\0\40\0\63\0\60\0\40\0\115\0\141\0\162\0\143\0\150\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\110\0\141\0\144\0\157\0\157\0\160\0\40\0\160\0\162\0\157\0\152\0\145\0\143\0\164\0\40\0\141\0\160\0\160\0\162\0\157\0\166\0\145\0\144)
- /Parent 45 0 R
- /Prev 52 0 R
+ /Title (\376\377\0\61\0\56\0\70\0\40\0\66\0\40\0\106\0\145\0\142\0\162\0\165\0\141\0\162\0\171\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\156\0\151\0\147\0\150\0\164\0\154\0\171\0\40\0\142\0\165\0\151\0\154\0\144\0\163)
+ /Parent 54 0 R
+ /Prev 61 0 R
+ /Next 63 0 R
  /A 25 0 R
 >> endobj
-54 0 obj
+63 0 obj
+<<
+ /Title (\376\377\0\61\0\56\0\71\0\40\0\63\0\40\0\106\0\145\0\142\0\162\0\165\0\141\0\162\0\171\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\110\0\141\0\144\0\157\0\157\0\160\0\40\0\143\0\157\0\144\0\145\0\40\0\155\0\157\0\166\0\145\0\144\0\40\0\157\0\165\0\164\0\40\0\157\0\146\0\40\0\116\0\165\0\164\0\143\0\150)
+ /Parent 54 0 R
+ /Prev 62 0 R
+ /Next 64 0 R
+ /A 27 0 R
+>> endobj
+64 0 obj
+<<
+ /Title (\376\377\0\61\0\56\0\61\0\60\0\40\0\63\0\60\0\40\0\115\0\141\0\162\0\143\0\150\0\54\0\40\0\62\0\60\0\60\0\66\0\72\0\40\0\110\0\141\0\144\0\157\0\157\0\160\0\40\0\160\0\162\0\157\0\152\0\145\0\143\0\164\0\40\0\141\0\160\0\160\0\162\0\157\0\166\0\145\0\144)
+ /Parent 54 0 R
+ /Prev 63 0 R
+ /A 29 0 R
+>> endobj
+65 0 obj
 << /Type /Font
 /Subtype /Type1
 /Name /F3
 /BaseFont /Helvetica-Bold
 /Encoding /WinAnsiEncoding >>
 endobj
-55 0 obj
+66 0 obj
 << /Type /Font
 /Subtype /Type1
 /Name /F5
 /BaseFont /Times-Roman
 /Encoding /WinAnsiEncoding >>
 endobj
-56 0 obj
+67 0 obj
 << /Type /Font
 /Subtype /Type1
 /Name /F1
 /BaseFont /Helvetica
 /Encoding /WinAnsiEncoding >>
 endobj
-57 0 obj
+68 0 obj
 << /Type /Font
 /Subtype /Type1
 /Name /F9
 /BaseFont /Courier
 /Encoding /WinAnsiEncoding >>
 endobj
-58 0 obj
+69 0 obj
 << /Type /Font
 /Subtype /Type1
 /Name /F2
 /BaseFont /Helvetica-Oblique
 /Encoding /WinAnsiEncoding >>
 endobj
-59 0 obj
+70 0 obj
 << /Type /Font
 /Subtype /Type1
 /Name /F7
@@ -430,147 +521,170 @@
 1 0 obj
 << /Type /Pages
 /Count 3
-/Kids [6 0 R 27 0 R 43 0 R ] >>
+/Kids [6 0 R 31 0 R 50 0 R ] >>
 endobj
 2 0 obj
 << /Type /Catalog
 /Pages 1 0 R
- /Outlines 44 0 R
+ /Outlines 53 0 R
  /PageMode /UseOutlines
  >>
 endobj
 3 0 obj
 << 
-/Font << /F3 54 0 R /F5 55 0 R /F1 56 0 R /F9 57 0 R /F2 58 0 R /F7 59 0 R >> 
+/Font << /F3 65 0 R /F5 66 0 R /F1 67 0 R /F9 68 0 R /F2 69 0 R /F7 70 0 R >> 
 /ProcSet [ /PDF /ImageC /Text ] >> 
 endobj
 9 0 obj
 <<
 /S /GoTo
-/D [27 0 R /XYZ 85.0 659.0 null]
+/D [31 0 R /XYZ 85.0 659.0 null]
 >>
 endobj
 11 0 obj
 <<
 /S /GoTo
-/D [27 0 R /XYZ 85.0 627.866 null]
+/D [31 0 R /XYZ 85.0 627.866 null]
 >>
 endobj
 13 0 obj
 <<
 /S /GoTo
-/D [27 0 R /XYZ 85.0 563.413 null]
+/D [31 0 R /XYZ 85.0 563.413 null]
 >>
 endobj
 15 0 obj
 <<
 /S /GoTo
-/D [27 0 R /XYZ 85.0 498.96 null]
+/D [31 0 R /XYZ 85.0 512.16 null]
 >>
 endobj
 17 0 obj
 <<
 /S /GoTo
-/D [27 0 R /XYZ 85.0 434.507 null]
+/D [31 0 R /XYZ 85.0 447.707 null]
 >>
 endobj
 19 0 obj
 <<
 /S /GoTo
-/D [27 0 R /XYZ 85.0 356.854 null]
+/D [31 0 R /XYZ 85.0 383.254 null]
 >>
 endobj
 21 0 obj
 <<
 /S /GoTo
-/D [27 0 R /XYZ 85.0 305.601 null]
+/D [31 0 R /XYZ 85.0 318.801 null]
 >>
 endobj
 23 0 obj
 <<
 /S /GoTo
-/D [27 0 R /XYZ 85.0 227.948 null]
+/D [31 0 R /XYZ 85.0 241.148 null]
 >>
 endobj
 25 0 obj
 <<
 /S /GoTo
-/D [27 0 R /XYZ 85.0 163.495 null]
+/D [31 0 R /XYZ 85.0 189.895 null]
 >>
 endobj
-44 0 obj
+27 0 obj
+<<
+/S /GoTo
+/D [50 0 R /XYZ 85.0 641.8 null]
+>>
+endobj
+29 0 obj
+<<
+/S /GoTo
+/D [50 0 R /XYZ 85.0 577.347 null]
+>>
+endobj
+53 0 obj
 <<
- /First 45 0 R
- /Last 45 0 R
+ /First 54 0 R
+ /Last 54 0 R
 >> endobj
 xref
-0 60
+0 71
 0000000000 65535 f 
-0000011790 00000 n 
-0000011862 00000 n 
-0000011954 00000 n 
+0000014006 00000 n 
+0000014078 00000 n 
+0000014170 00000 n 
 0000000015 00000 n 
 0000000071 00000 n 
-0000000869 00000 n 
-0000000989 00000 n 
-0000001070 00000 n 
-0000012088 00000 n 
-0000001205 00000 n 
-0000012151 00000 n 
-0000001342 00000 n 
-0000012217 00000 n 
-0000001479 00000 n 
-0000012283 00000 n 
-0000001616 00000 n 
-0000012348 00000 n 
-0000001753 00000 n 
-0000012414 00000 n 
-0000001888 00000 n 
-0000012480 00000 n 
-0000002025 00000 n 
-0000012546 00000 n 
-0000002162 00000 n 
-0000012612 00000 n 
-0000002299 00000 n 
-0000004988 00000 n 
-0000005111 00000 n 
-0000005222 00000 n 
-0000005397 00000 n 
-0000005601 00000 n 
-0000005803 00000 n 
-0000005979 00000 n 
-0000006179 00000 n 
-0000006354 00000 n 
-0000006559 00000 n 
-0000006737 00000 n 
-0000006942 00000 n 
-0000007147 00000 n 
-0000007350 00000 n 
-0000007550 00000 n 
-0000007713 00000 n 
-0000008280 00000 n 
-0000012678 00000 n 
-0000008388 00000 n 
-0000008529 00000 n 
-0000008836 00000 n 
-0000009157 00000 n 
-0000009477 00000 n 
-0000009792 00000 n 
-0000010119 00000 n 
-0000010416 00000 n 
-0000010805 00000 n 
-0000011128 00000 n 
-0000011241 00000 n 
-0000011351 00000 n 
-0000011459 00000 n 
-0000011565 00000 n 
-0000011681 00000 n 
+0000000945 00000 n 
+0000001065 00000 n 
+0000001160 00000 n 
+0000014304 00000 n 
+0000001295 00000 n 
+0000014367 00000 n 
+0000001432 00000 n 
+0000014433 00000 n 
+0000001569 00000 n 
+0000014499 00000 n 
+0000001706 00000 n 
+0000014564 00000 n 
+0000001843 00000 n 
+0000014630 00000 n 
+0000001980 00000 n 
+0000014696 00000 n 
+0000002117 00000 n 
+0000014762 00000 n 
+0000002252 00000 n 
+0000014828 00000 n 
+0000002389 00000 n 
+0000014894 00000 n 
+0000002526 00000 n 
+0000014958 00000 n 
+0000002663 00000 n 
+0000005302 00000 n 
+0000005425 00000 n 
+0000005557 00000 n 
+0000005732 00000 n 
+0000005936 00000 n 
+0000006138 00000 n 
+0000006328 00000 n 
+0000006503 00000 n 
+0000006707 00000 n 
+0000006909 00000 n 
+0000007087 00000 n 
+0000007289 00000 n 
+0000007464 00000 n 
+0000007669 00000 n 
+0000007847 00000 n 
+0000008052 00000 n 
+0000008257 00000 n 
+0000008460 00000 n 
+0000008660 00000 n 
+0000009679 00000 n 
+0000009802 00000 n 
+0000009829 00000 n 
+0000015024 00000 n 
+0000009988 00000 n 
+0000010130 00000 n 
+0000010437 00000 n 
+0000010726 00000 n 
+0000011047 00000 n 
+0000011368 00000 n 
+0000011688 00000 n 
+0000012003 00000 n 
+0000012330 00000 n 
+0000012627 00000 n 
+0000013016 00000 n 
+0000013344 00000 n 
+0000013457 00000 n 
+0000013567 00000 n 
+0000013675 00000 n 
+0000013781 00000 n 
+0000013897 00000 n 
 trailer
 <<
-/Size 60
+/Size 71
 /Root 2 0 R
 /Info 4 0 R
 >>
 startxref
-12729
+15075
 %%EOF

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/Environment.java Fri Jun  9 14:11:29 2006
@@ -17,8 +17,12 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
+import java.net.InetAddress;
 import java.util.*;
 
+/*
+ * If we move to Java 1.5, we can get rid of this class and just use System.getenv
+ */
 public class Environment extends Properties
 {
    public Environment()
@@ -26,13 +30,15 @@
    {
       // Extend this code to fit all operating
       // environments that you expect to run in
-
       String command = null;
       String OS = System.getProperty("os.name");
+      String lowerOs = OS.toLowerCase();
       if (OS.equals("Windows NT")) {
          command = "cmd /C set";
       } else if (OS.indexOf("ix") > -1 || OS.indexOf("inux") > -1) {
          command = "env";
+      } else if(lowerOs.startsWith("mac os x")) {
+         command = "env";
       } else {
          // Add others here
       }
@@ -83,4 +89,19 @@
      }     
      return arr;
    }
-} 
\ No newline at end of file
+   
+   public String getHost()
+   {
+     String host = getProperty("HOST");
+     if(host == null) {
+       // HOST isn't always in the environment
+       try {
+         host = InetAddress.getLocalHost().getHostName();
+       } catch(IOException io) {
+         io.printStackTrace();
+       }
+     }
+     return host;
+   }
+   
+} 

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapRed.java Fri Jun  9 14:11:29 2006
@@ -22,9 +22,12 @@
 import java.util.Date;
 import java.util.Map;
 import java.util.Arrays;
+import java.util.ArrayList;
 import java.util.Properties;
 import java.util.regex.*;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.Reducer;
@@ -43,6 +46,8 @@
  */
 public abstract class PipeMapRed {
 
+  protected static final Log LOG = LogFactory.getLog(PipeMapRed.class.getName());  
+  
   /** The command to be spawned as a subprocess.
    * Mapper/Reducer operations will delegate to it
    */
@@ -53,9 +58,9 @@
   
 
   /**
-   * @returns ow many TABS before the end of the key part 
+   * @returns how many TABS before the end of the key part 
    * usually: 1 or "ALL"
-   * used both for tool output of both Map and Reduce
+   * used for tool output of both Map and Reduce
    * configured via tool's argv: splitKeyVal=ALL or 1..
    * although it is interpreted here, not by tool
    */
@@ -91,20 +96,57 @@
     return cols;
   }
   
-  String[] splitArgs(String args)
-  {
-    String regex = "\\s(?=(?:[^\"]*\"[^\"]*\")*[^\"]*\\z)";
-    String[] split = args.split(regex);
-    // remove outer quotes
-    for(int i=0; i<split.length; i++) {
-        String si = split[i].trim();
-        if(si.charAt(0)=='"' && si.charAt(si.length()-1)=='"') {
-            si = si.substring(1, si.length()-1);
-            split[i] = si;
+  final static int OUTSIDE = 1;
+  final static int SINGLEQ = 2;
+  final static int DOUBLEQ = 3;
+  
+  static String[] splitArgs(String args)
+  {
+    ArrayList argList = new ArrayList();
+    char[] ch = args.toCharArray();
+    int clen = ch.length;
+    int state = OUTSIDE;
+    int argstart = 0;
+    for(int c=0; c<=clen; c++) {
+        boolean last = (c==clen);
+        int lastState = state;
+        boolean endToken = false;
+        if(!last) {
+          if(ch[c]=='\'') {
+            if(state == OUTSIDE) {
+              state = SINGLEQ;
+            } else if(state == SINGLEQ) {
+              state = OUTSIDE;  
+            }
+            endToken = (state != lastState);
+          } else if(ch[c]=='"') {
+            if(state == OUTSIDE) {
+              state = DOUBLEQ;
+            } else if(state == DOUBLEQ) {
+              state = OUTSIDE;  
+            }          
+            endToken = (state != lastState);
+          } else if(ch[c]==' ') {
+            if(state == OUTSIDE) {
+              endToken = true;
+            }            
+          }
+        }
+        if(last || endToken) {
+          if(c == argstart) {
+            // unquoted space
+          } else {
+            String a;
+            a = args.substring(argstart, c); 
+            argList.add(a);
+          }
+          argstart = c+1;
+          lastState = state;
         }
     }
-    return split;
+    return (String[])argList.toArray(new String[0]);
   }
+
   public void configure(JobConf job)
   {
 
@@ -132,7 +174,7 @@
 	  // A  relative path should match in the unjarred Job data
       // In this case, force an absolute path to make sure exec finds it.
       argvSplit[0] = new File(argvSplit[0]).getAbsolutePath();
-      log_.println("PipeMapRed exec " + Arrays.toString(argvSplit));
+      log_.println("PipeMapRed exec " + Arrays.asList(argvSplit));
             
       
       Environment childEnv = (Environment)StreamUtil.env().clone();
@@ -440,4 +482,5 @@
       }
     }    
   }
+  
 }

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeMapper.java Fri Jun  9 14:11:29 2006
@@ -25,7 +25,6 @@
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.OutputCollector;
 
-import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableComparable;

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/PipeReducer.java Fri Jun  9 14:11:29 2006
@@ -25,7 +25,6 @@
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.OutputCollector;
 
-import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.WritableComparator;
 import org.apache.hadoop.io.WritableComparable;

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamBaseRecordReader.java Fri Jun  9 14:11:29 2006
@@ -20,14 +20,14 @@
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.UTF8;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.mapred.Reporter;
 import org.apache.hadoop.mapred.RecordReader;
 import org.apache.hadoop.mapred.FileSplit;
 import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.util.LogFormatter;
+import org.apache.commons.logging.*;
+
 
 /** 
  * Shared functionality for hadoopStreaming formats.
@@ -40,7 +40,10 @@
 public abstract class StreamBaseRecordReader implements RecordReader
 {
     
-  protected static final Logger LOG = LogFormatter.getLogger(StreamBaseRecordReader.class.getName());
+  protected static final Log LOG = LogFactory.getLog(StreamBaseRecordReader.class.getName());
+  
+  // custom JobConf properties for this class are prefixed with this namespace
+  final String CONF_NS = "stream.recordreader.";
 
   public StreamBaseRecordReader(
     FSDataInputStream in, long start, long end, 
@@ -49,15 +52,45 @@
   {
     in_ = in;
     start_ = start;
-    splitName_ = splitName;
     end_ = end;
+    length_ = end_ - start_;
+    splitName_ = splitName;
     reporter_ = reporter;
     job_ = job;
+    
+    statusMaxRecordChars_ = job_.getInt(CONF_NS + "statuschars", 200);
+  }
+
+  /// RecordReader API
+  
+  /** Read a record. Implementation should call numRecStats at the end
+   */  
+  public abstract boolean next(Writable key, Writable value) throws IOException;
+
+  /** Returns the current position in the input. */
+  public synchronized long getPos() throws IOException 
+  { 
+    return in_.getPos(); 
+  }
+
+  /** Close this to future operations.*/
+  public synchronized void close() throws IOException 
+  { 
+    in_.close(); 
   }
+  
+  /// StreamBaseRecordReader API
 
-  /** Called once before the first call to next */
   public void init() throws IOException
   {
+    LOG.info("StreamBaseRecordReader.init: " +
+    " start_=" + start_ + " end_=" + end_ + " length_=" + length_ +
+    " start_ > in_.getPos() =" 
+        + (start_ > in_.getPos()) + " " + start_ 
+        + " > " + in_.getPos() );
+    if (start_ > in_.getPos()) {
+      in_.seek(start_);
+    }  
     seekNextRecordBoundary();
   }
   
@@ -66,17 +99,12 @@
    */
   public abstract void seekNextRecordBoundary() throws IOException;
   
-  
-  /** Read a record. Implementation should call numRecStats at the end
-   */  
-  public abstract boolean next(Writable key, Writable value) throws IOException;
-
-  
+    
   void numRecStats(CharSequence record) throws IOException
   {
     numRec_++;          
     if(numRec_ == nextStatusRec_) {
-      nextStatusRec_ +=100000;//*= 10;
+      nextStatusRec_ +=100;//*= 10;
       String status = getStatus(record);
       LOG.info(status);
       reporter_.setStatus(status);
@@ -91,10 +119,9 @@
       pos = getPos();
     } catch(IOException io) {
     }
-    final int M = 2000;
     String recStr;
-    if(record.length() > M) {
-    	recStr = record.subSequence(0, M) + "...";
+    if(record.length() > statusMaxRecordChars_) {
+        recStr = record.subSequence(0, statusMaxRecordChars_) + "...";
     } else {
     	recStr = record.toString();
     }
@@ -103,25 +130,15 @@
     return status;
   }
 
-  /** Returns the current position in the input. */
-  public synchronized long getPos() throws IOException 
-  { 
-    return in_.getPos(); 
-  }
-
-  /** Close this to future operations.*/
-  public synchronized void close() throws IOException 
-  { 
-    in_.close(); 
-  }
-
   FSDataInputStream in_;
   long start_;
   long end_;
+  long length_;
   String splitName_;
   Reporter reporter_;
   JobConf job_;
   int numRec_ = 0;
   int nextStatusRec_ = 1;
+  int statusMaxRecordChars_;
   
 }

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamInputFormat.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamInputFormat.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamInputFormat.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamInputFormat.java Fri Jun  9 14:11:29 2006
@@ -23,6 +23,8 @@
 import java.util.regex.Matcher;
 import java.util.regex.Pattern;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.PathFilter;
@@ -30,11 +32,8 @@
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
-import org.apache.hadoop.io.UTF8;
 
 import org.apache.hadoop.mapred.*;
-import org.apache.hadoop.util.LogFormatter;
-
 
 /** An input format that performs globbing on DFS paths and 
  * selects a RecordReader based on a JobConf property.
@@ -46,7 +45,8 @@
   // an InputFormat should be public with the synthetic public default constructor
   // JobTracker's JobInProgress will instantiate with clazz.newInstance() (and a custom ClassLoader)
   
-  protected static final Logger LOG = LogFormatter.getLogger(StreamInputFormat.class.getName());
+  protected static final Log LOG = LogFactory.getLog(StreamInputFormat.class.getName());
+  
   static {
     //LOG.setLevel(Level.FINE);
   }
@@ -59,7 +59,7 @@
     int dsup = globs.length;
     for(int d=0; d<dsup; d++) {
       String leafName = globs[d].getName();
-      LOG.fine("StreamInputFormat: globs[" + d + "] leafName = " + leafName);
+      LOG.info("StreamInputFormat: globs[" + d + "] leafName = " + leafName);
       Path[] paths; Path dir;
 	  PathFilter filter = new GlobFilter(fs, leafName);
 	  dir = new Path(globs[d].getParent().toString());
@@ -79,7 +79,13 @@
     }
     String globToRegexp(String glob)
 	{
-	  return glob.replaceAll("\\*", ".*");
+      String re = glob;
+      re = re.replaceAll("\\.", "\\\\.");
+      re = re.replaceAll("\\+", "\\\\+");
+	  re = re.replaceAll("\\*", ".*");
+      re = re.replaceAll("\\?", ".");
+      LOG.info("globToRegexp: |" + glob + "|  ->  |" + re + "|");
+      return re;
 	}
 
     public boolean accept(Path pathname)
@@ -88,7 +94,7 @@
       if(acc) {
       	acc = pat_.matcher(pathname.getName()).matches();
       }
-      LOG.finer("matches " + pat_ + ", " + pathname + " = " + acc);
+      LOG.info("matches " + pat_ + ", " + pathname + " = " + acc);
       return acc;
     }
 	
@@ -99,7 +105,7 @@
   public RecordReader getRecordReader(FileSystem fs, final FileSplit split,
                                       JobConf job, Reporter reporter)
     throws IOException {
-    LOG.finer("getRecordReader start.....");
+    LOG.info("getRecordReader start.....");
     reporter.setStatus(split.toString());
 
     final long start = split.getStart();
@@ -143,5 +149,5 @@
     
     return reader;
   }
-  
+
 }

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Fri Jun  9 14:11:29 2006
@@ -23,6 +23,8 @@
 import java.util.Arrays;
 import java.util.Iterator;
 
+import org.apache.commons.logging.*;
+
 import org.apache.hadoop.conf.Configuration;
 
 import org.apache.hadoop.io.UTF8;
@@ -32,16 +34,14 @@
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.RunningJob;
 
-import org.apache.hadoop.util.LogFormatter;
-
 /** All the client-side work happens here. 
  * (Jar packaging, MapRed job submission and monitoring)
  * @author Michel Tourn
  */
 public class StreamJob
 {
-  protected static final Logger LOG = LogFormatter.getLogger(StreamJob.class.getName());
-    
+  protected static final Log LOG = LogFactory.getLog(StreamJob.class.getName());    
+  
   public StreamJob(String[] argv, boolean mayExit)
   {
     argv_ = argv;
@@ -72,9 +72,10 @@
   void preProcessArgs()
   {
     verbose_ = false;
+    addTaskEnvironment_ = "";
   }
   
-  void postProcessArgs()
+  void postProcessArgs() throws IOException
   {
     if(cluster_ == null) {
         // hadoop-default.xml is standard, hadoop-local.xml is not.
@@ -87,22 +88,35 @@
     if(output_ == null) {
         fail("Required argument: -output ");
     }
-    // careful with class names..
-    mapCmd_ = packageOrTrimNoShip(mapCmd_);
-    redCmd_ = packageOrTrimNoShip(redCmd_);
+    msg("addTaskEnvironment=" + addTaskEnvironment_);
+
+    Iterator it = packageFiles_.iterator();
+    while(it.hasNext()) {
+      File f = new File((String)it.next());    
+      if(f.isFile()) {
+        shippedCanonFiles_.add(f.getCanonicalPath());
+      }
+    }
+    msg("shippedCanonFiles_=" + shippedCanonFiles_);
     
-    // TBD -D format or sthg on cmdline. 
-    // Plus maybe a standard list originating on client or server    
-    addTaskEnvironment_ = ""; 
+    // careful with class names..
+    mapCmd_ = unqualifyIfLocalPath(mapCmd_);
+    redCmd_ = unqualifyIfLocalPath(redCmd_);    
+  }
+  
+  void validateNameEqValue(String neqv)
+  {
+    String[] nv = neqv.split("=", 2);
+    if(nv.length < 2) {
+        fail("Invalid name=value spec: " + neqv);
+    }
+    msg("Recording name=value: name=" + nv[0] + " value=" + nv[1]);
   }
   
-  String packageOrTrimNoShip(String cmd)
+  String unqualifyIfLocalPath(String cmd) throws IOException
   {
     if(cmd == null) {
       //    
-    } else if(cmd.startsWith(NOSHIP)) {
-      // don't package the file, but keep the abolute path
-      cmd = cmd.substring(NOSHIP.length());
     } else {
       String prog = cmd;
       String args = "";
@@ -111,18 +125,23 @@
         prog = cmd.substring(0, s);
         args = cmd.substring(s+1);
       }
-      packageFiles_.add(new File(prog).getAbsolutePath());
-      // Change path to simple filename. 
-      // That way when PipeMapRed calls Runtime.exec(), 
-      // it will look for the excutable in Task's working dir.
-      // And this is where TaskRunner unjars our job jar.
-      prog = new File(prog).getName();
-      if(args.length() > 0) {
-        cmd = prog + " " + args;
-      } else {
-        cmd = prog;
+      String progCanon = new File(prog).getCanonicalPath();
+      boolean shipped = shippedCanonFiles_.contains(progCanon);
+      msg("shipped: " + shipped + " " + progCanon);
+      if(shipped) {
+        // Change path to simple filename. 
+        // That way when PipeMapRed calls Runtime.exec(), 
+        // it will look for the excutable in Task's working dir.
+        // And this is where TaskRunner unjars our job jar.
+        prog = new File(prog).getName();
+        if(args.length() > 0) {
+          cmd = prog + " " + args;
+        } else {
+          cmd = prog;
+        }
       }
     }
+    msg("cmd=" + cmd);
     return cmd;
   }
   
@@ -130,17 +149,20 @@
   {
     return new File(getHadoopClientHome() + "/conf", hadoopAliasConf_).getAbsolutePath();
   }
+   
   
   void parseArgv()
   {
     if(argv_.length==0) {
-      exitUsage();
+      exitUsage(false);
     }
     int i=0; 
     while(i < argv_.length) {
       String s;
       if(argv_[i].equals("-verbose")) {
         verbose_ = true;      
+      } else if(argv_[i].equals("-info")) {
+        detailedUsage_ = true;      
       } else if(argv_[i].equals("-debug")) {
         debug_++;
       } else if((s = optionArg(argv_, i, "-input", false)) != null) {
@@ -155,7 +177,7 @@
       } else if((s = optionArg(argv_, i, "-reducer", redCmd_ != null)) != null) {
         i++;
         redCmd_ = s;
-      } else if((s = optionArg(argv_, i, "-files", false)) != null) {
+      } else if((s = optionArg(argv_, i, "-file", false)) != null) {
         i++;
         packageFiles_.add(s);
       } else if((s = optionArg(argv_, i, "-cluster", cluster_ != null)) != null) {
@@ -164,15 +186,35 @@
       } else if((s = optionArg(argv_, i, "-config", false)) != null) {
         i++;
         configPath_.add(s);
+      } else if((s = optionArg(argv_, i, "-dfs", false)) != null) {
+        i++;
+        userJobConfProps_.add("fs.default.name="+s);
+      } else if((s = optionArg(argv_, i, "-jt", false)) != null) {
+        i++;
+        userJobConfProps_.add("mapred.job.tracker="+s);
+      } else if((s = optionArg(argv_, i, "-jobconf", false)) != null) {
+        i++;
+        validateNameEqValue(s);
+        userJobConfProps_.add(s);
+      } else if((s = optionArg(argv_, i, "-cmdenv", false)) != null) {
+        i++;
+        validateNameEqValue(s);
+        if(addTaskEnvironment_.length() > 0) {
+            addTaskEnvironment_ += " ";
+        }
+        addTaskEnvironment_ += s;
       } else if((s = optionArg(argv_, i, "-inputreader", inReaderSpec_ != null)) != null) {
         i++;
         inReaderSpec_ = s;
       } else {
         System.err.println("Unexpected argument: " + argv_[i]);
-        exitUsage();
+        exitUsage(false);
       }
       i++;
     }
+    if(detailedUsage_) {
+        exitUsage(true);
+    }
   }
   
   String optionArg(String[] args, int index, String arg, boolean argSet)
@@ -196,22 +238,32 @@
     }
   }
 
-  public void exitUsage()
+  public void exitUsage(boolean detailed)
   {
                       //         1         2         3         4         5         6         7         
                       //1234567890123456789012345678901234567890123456789012345678901234567890123456789
-    System.out.println("Usage: bin/hadoop jar build/hadoop-streaming.jar [options]");
+    System.out.println("Usage: $HADOOP_HOME/bin/hadoop jar build/hadoop-streaming.jar [options]");
     System.out.println("Options:");
-    System.out.println("  -input   <path>     DFS input file(s) for the Map step");
-    System.out.println("  -output  <path>     DFS output directory for the Reduce step");
-    System.out.println("  -mapper  <cmd>      The streaming command to run");
-    System.out.println("  -reducer <cmd>      The streaming command to run");
-    System.out.println("  -files   <file>     Additional files to be shipped in the Job jar file");
-    System.out.println("  -cluster <name>     Default uses hadoop-default.xml and hadoop-site.xml");
-    System.out.println("  -config  <file>     Optional. One or more paths to xml config files");
-    System.out.println("  -inputreader <spec> Optional. See below");
+    System.out.println("  -input    <path>     DFS input file(s) for the Map step");
+    System.out.println("  -output   <path>     DFS output directory for the Reduce step");
+    System.out.println("  -mapper   <cmd>      The streaming command to run");
+    System.out.println("  -combiner <cmd>      Not implemented. But you can pipe the mapper output");
+    System.out.println("  -reducer  <cmd>      The streaming command to run");
+    System.out.println("  -file     <file>     File/dir to be shipped in the Job jar file");
+    System.out.println("  -cluster  <name>     Default uses hadoop-default.xml and hadoop-site.xml");
+    System.out.println("  -config   <file>     Optional. One or more paths to xml config files");
+    System.out.println("  -dfs      <h:p>      Optional. Override DFS configuration");
+    System.out.println("  -jt       <h:p>      Optional. Override JobTracker configuration");
+    System.out.println("  -inputreader <spec>  Optional.");
+    System.out.println("  -jobconf  <n>=<v>    Optional.");
+    System.out.println("  -cmdenv   <n>=<v>    Optional. Pass env.var to streaming commands");
     System.out.println("  -verbose");
     System.out.println();
+    if(!detailed) {    
+    System.out.println("For more details about these options:");
+    System.out.println("Use $HADOOP_HOME/bin/hadoop jar build/hadoop-streaming.jar -info");
+        fail("");
+    }
     System.out.println("In -input: globbing on <path> is supported and can have multiple -input");
     System.out.println("Default Map input format: a line is a record in UTF-8");
     System.out.println("  the key part ends at first TAB, the rest of the line is the value");
@@ -220,21 +272,34 @@
     System.out.println("  Ex: -inputreader 'StreamXmlRecordReader,begin=<doc>,end=</doc>'");
     System.out.println("Map output format, reduce input/output format:");
     System.out.println("  Format defined by what mapper command outputs. Line-oriented");
-    System.out.println("Mapper and Reducer <cmd> syntax: ");
-    System.out.println("  If the mapper or reducer programs are prefixed with " + NOSHIP + " then ");
-    System.out.println("  the paths are assumed to be valid absolute paths on the task tracker machines");
-    System.out.println("  and are NOT packaged with the Job jar file.");
+    System.out.println();
     System.out.println("Use -cluster <name> to switch between \"local\" Hadoop and one or more remote ");
     System.out.println("  Hadoop clusters. ");
     System.out.println("  The default is to use the normal hadoop-default.xml and hadoop-site.xml");
     System.out.println("  Else configuration will use $HADOOP_HOME/conf/hadoop-<name>.xml");
     System.out.println();
-    System.out.println("Example: hadoopStreaming -mapper \"noship:/usr/local/bin/perl5 filter.pl\"");
-    System.out.println("           -files /local/filter.pl -input \"/logs/0604*/*\" [...]");
+    System.out.println("To set the number of reduce tasks (num. of output files):");
+    System.out.println("  -jobconf mapred.reduce.tasks=10");
+    System.out.println("To change the local temp directory:");
+    System.out.println("  -jobconf dfs.data.dir=/tmp");
+    System.out.println("Additional local temp directories with -cluster local:");
+    System.out.println("  -jobconf mapred.local.dir=/tmp/local");
+    System.out.println("  -jobconf mapred.system.dir=/tmp/system");
+    System.out.println("  -jobconf mapred.temp.dir=/tmp/temp");
+    System.out.println("For more details about jobconf parameters see:");
+    System.out.println("  http://wiki.apache.org/lucene-hadoop/JobConfFile");
+    System.out.println("To set an environement variable in a streaming command:");
+    System.out.println("   -cmdenv EXAMPLE_DIR=/home/example/dictionaries/");
+    System.out.println();
+    System.out.println("Shortcut to run from any directory:");
+    System.out.println("   setenv HSTREAMING \"$HADOOP_HOME/bin/hadoop jar $HADOOP_HOME/build/hadoop-streaming.jar\"");
+    System.out.println();
+    System.out.println("Example: $HSTREAMING -mapper \"/usr/local/bin/perl5 filter.pl\"");
+    System.out.println("           -file /local/filter.pl -input \"/logs/0604*/*\" [...]");
     System.out.println("  Ships a script, invokes the non-shipped perl interpreter");
     System.out.println("  Shipped files go to the working directory so filter.pl is found by perl");
     System.out.println("  Input files are all the daily logs for days in month 2006-04");
-    fail("");    
+    fail("");
   }
   
   public void fail(String message)
@@ -291,7 +356,7 @@
         msg("Found runtime classes in: " + runtimeClasses);
     }
     if(isLocalHadoop()) {
-      // don't package class files (they might get unpackaged in . and then 
+      // don't package class files (they might get unpackaged in "." and then 
       //  hide the intended CLASSPATH entry)
       // we still package everything else (so that scripts and executable are found in 
       //  Task workdir like distributed Hadoop)
@@ -393,7 +458,17 @@
     if(jar_ != null) {
         jobConf_.setJar(jar_);
     }
-    //jobConf_.mtdump();System.exit(1);
+
+    // last, allow user to override anything 
+    // (although typically used with properties we didn't touch)
+    it = userJobConfProps_.iterator();
+    while(it.hasNext()) {
+        String prop = (String)it.next();
+        String[] nv = prop.split("=", 2);
+        msg("JobConf: set(" + nv[0] + ", " + nv[1]+")");
+        jobConf_.set(nv[0], nv[1]);
+    }   
+    
   }
   
   protected String getJobTrackerHostPort()
@@ -432,7 +507,7 @@
       running_ = jc_.submitJob(jobConf_);
       jobId_ = running_.getJobID();
 
-      LOG.info("getLocalDirs(): " + Arrays.toString(jobConf_.getLocalDirs()));     
+      LOG.info("getLocalDirs(): " + Arrays.asList(jobConf_.getLocalDirs()));     
       LOG.info("Running job: " + jobId_);      
       jobInfo();
 
@@ -467,11 +542,10 @@
   }
   
 
-  public final static String NOSHIP = "noship:";
-  
   protected boolean mayExit_;
   protected String[] argv_;
   protected boolean verbose_;
+  protected boolean detailedUsage_;
   protected int debug_;
 
   protected Environment env_;
@@ -483,8 +557,10 @@
   protected JobClient jc_;
 
   // command-line arguments
-  protected ArrayList inputGlobs_   = new ArrayList(); // <String>
-  protected ArrayList packageFiles_ = new ArrayList(); // <String>
+  protected ArrayList inputGlobs_       = new ArrayList(); // <String>
+  protected ArrayList packageFiles_     = new ArrayList(); // <String>
+  protected ArrayList shippedCanonFiles_= new ArrayList(); // <String>  
+  protected ArrayList userJobConfProps_ = new ArrayList(); // <String>
   protected String output_;
   protected String mapCmd_;
   protected String redCmd_;

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamLineRecordReader.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamLineRecordReader.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamLineRecordReader.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamLineRecordReader.java Fri Jun  9 14:11:29 2006
@@ -69,7 +69,7 @@
       return false;
 
     //((LongWritable)key).set(pos);           // key is position
-    //((UTF8)value).set(readLine(in));        // value is line
+    //((UTF8)value).set(readLine(in));   // value is line
     String line = readLine(in_);
 
     // key is line up to TAB, value is rest

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamUtil.java Fri Jun  9 14:11:29 2006
@@ -198,7 +198,7 @@
   static {
     try {
       env = new Environment();
-      HOST = env.get("HOST").toString();
+      HOST = env.getHost();
     } catch(IOException io) {
       io.printStackTrace();
     }
@@ -275,6 +275,22 @@
     }
   }
   
+  static final String regexpSpecials = "[]()?*+|.!^-\\~@";
+  
+  public static String regexpEscape(String plain)
+  {
+    StringBuffer buf = new StringBuffer();
+    char[] ch = plain.toCharArray();
+    int csup = ch.length;
+    for(int c=0; c<csup; c++) {
+      if(regexpSpecials.indexOf(ch[c]) != -1) {
+        buf.append("\\");    
+      }
+      buf.append(ch[c]);
+    }
+    return buf.toString();
+  }
+  
   static String slurp(File f) throws IOException
   {
     FileInputStream in = new FileInputStream(f);
@@ -298,5 +314,5 @@
     }
     return env_;
   }
-
+  
 }

Modified: lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamXmlRecordReader.java Fri Jun  9 14:11:29 2006
@@ -17,10 +17,12 @@
 package org.apache.hadoop.streaming;
 
 import java.io.*;
+import java.util.regex.*;
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.UTF8;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.mapred.Reporter;
@@ -32,6 +34,14 @@
  *  Values are XML subtrees delimited by configurable tags.
  *  Keys could be the value of a certain attribute in the XML subtree, 
  *  but this is left to the stream processor application.
+ *
+ *  The name-value properties that StreamXmlRecordReader understands are:
+ *    String begin (chars marking beginning of record)
+ *    String end   (chars marking end of record)
+ *    int maxrec   (maximum record size)
+ *    int lookahead(maximum lookahead to sync CDATA)
+ *    boolean slowmatch
+ *
  *  @author Michel Tourn
  */
 public class StreamXmlRecordReader extends StreamBaseRecordReader 
@@ -42,67 +52,278 @@
     throws IOException
   {
     super(in, start, end, splitName, reporter, job);
-    beginMark_ = checkJobGet("stream.recordreader.begin");
-    endMark_   = checkJobGet("stream.recordreader.end");
-  }
+    
+    beginMark_ = checkJobGet(CONF_NS + "begin");
+    endMark_   = checkJobGet(CONF_NS + "end");
 
-  String checkJobGet(String prop) throws IOException
-  {
-  	String val = job_.get(prop);
-  	if(val == null) {
-  		throw new IOException("JobConf: missing required property: " + prop);
-  	}
-  	return val;
+    maxRecSize_= job_.getInt(CONF_NS + "maxrec", 50*1000);
+    lookAhead_ = job_.getInt(CONF_NS + "lookahead", 2*maxRecSize_);
+    synched_ = false;
+    
+    slowMatch_ = job_.getBoolean(CONF_NS + "slowmatch", false);
+    if(slowMatch_) {
+      beginPat_  = makePatternCDataOrMark(beginMark_);
+      endPat_    = makePatternCDataOrMark(endMark_);
+    }
   }
   
-  public void seekNextRecordBoundary() throws IOException
-  {
-  System.out.println("@@@start seekNext " + in_.getPos());
-    readUntilMatch(beginMark_, null);      
-  System.out.println("@@@end   seekNext " + in_.getPos());
-  }
-    
+  int numNext = 0;
   public synchronized boolean next(Writable key, Writable value)
    throws IOException
   {
     long pos = in_.getPos();
-    if (pos >= end_)
+    numNext++;
+    if (pos >= end_) {
       return false;
+    }
     
     StringBuffer buf = new StringBuffer();
-    readUntilMatch(endMark_, buf);
+    if(!readUntilMatchBegin()) {
+        return false;
+    }
+    if(!readUntilMatchEnd(buf)) {
+        return false;
+    }
     numRecStats(buf);
+    
+    // There is only one elem..key/value splitting is not done here.
+    ((UTF8)key).set(buf.toString());
+    ((UTF8)value).set("");
+    
+    /*if(numNext < 5) {
+        System.out.println("@@@ " + numNext + ". true next k=|" + key.toString().replaceAll("[\\r\\n]", " ")
+        + "|, len=" + buf.length() + " v=|" + value.toString().replaceAll("[\\r\\n]", " ") + "|");
+    }*/
+
     return true;
   }
+  
+  public void seekNextRecordBoundary() throws IOException
+  {
+    readUntilMatchBegin();
+  }
+  
+  boolean readUntilMatchBegin() throws IOException
+  {
+    if(slowMatch_) {
+        return slowReadUntilMatch(beginPat_, false, null);
+    } else {
+        return fastReadUntilMatch(beginMark_, false, null);
+    }
+  }
+  
+  boolean readUntilMatchEnd(StringBuffer buf) throws IOException
+  {
+    if(slowMatch_) {
+      return slowReadUntilMatch(endPat_, true, buf);
+    } else {
+      return fastReadUntilMatch(endMark_, true, buf);
+    }
+  }
+  
+  
+  boolean slowReadUntilMatch(Pattern markPattern, boolean includePat, StringBuffer outBufOrNull) 
+    throws IOException   
+  {
+    try {
+      long inStart = in_.getPos();
+      byte[] buf = new byte[Math.max(lookAhead_, maxRecSize_)];
+      int read = 0;
+      boolean success = true;
+      in_.mark(lookAhead_ + 2);
+      read = in_.read(buf);
+      String sbuf = new String(buf);        
+      Matcher match = markPattern.matcher(sbuf);
 
-  void readUntilMatch(String pat, StringBuffer outBuf) throws IOException 
+      firstMatchStart_ = NA;
+      firstMatchEnd_ = NA;
+      int bufPos = 0;
+      int state = synched_ ? CDATA_OUT : CDATA_UNK;
+      int s=0;
+      int matchLen = 0;
+      while(match.find(bufPos)) {
+        int input;
+        matchLen = match.group(0).length();
+        if(match.group(1) != null) {
+          input = CDATA_BEGIN;
+        } else if(match.group(2) != null) {
+          input = CDATA_END;
+          firstMatchStart_ = NA; // |<DOC CDATA[ </DOC> ]]> should keep it
+        } else {
+          input = RECORD_MAYBE;
+        }
+        if(input == RECORD_MAYBE) {
+            if(firstMatchStart_ == NA) {
+              firstMatchStart_ = match.start();
+              firstMatchEnd_   = match.end();
+            }
+        }
+        state = nextState(state, input, match.start());
+        /*System.out.println("@@@" +
+         s + ". Match " + match.start() + " " + match.groupCount() +
+         " state=" + state + " input=" + input + 
+         " firstMatchStart_=" + firstMatchStart_ + " startinstream=" + (inStart+firstMatchStart_) + 
+         " match=" + match.group(0) + " in=" + in_.getPos());*/
+        if(state == RECORD_ACCEPT) {
+          break;
+        }
+        bufPos = match.end();
+        s++;
+      }
+      if(state != CDATA_UNK) {
+        synched_ = true;
+      }
+      boolean matched = (firstMatchStart_ != NA) && (state == RECORD_ACCEPT || state == CDATA_UNK); 
+      if(matched) {
+        int endPos = includePat ? firstMatchEnd_ : firstMatchStart_;
+        //System.out.println("firstMatchStart_=" + firstMatchStart_ + " firstMatchEnd_=" + firstMatchEnd_);
+        String snip = sbuf.substring(firstMatchStart_, firstMatchEnd_);
+        //System.out.println(" match snip=|" + snip + "| markPattern=" + markPattern);
+        if(outBufOrNull != null) {
+          buf = new byte[endPos];
+          in_.reset();      
+          read = in_.read(buf);
+          if(read != endPos) {
+              //System.out.println("@@@ BAD re-read less: " + read + " < " + endPos);
+          }          
+          outBufOrNull.append(new String(buf));
+        } else {
+          //System.out.println("Skip to " + (inStart + endPos));
+          in_.seek(inStart + endPos);
+        }
+      }
+      return matched;
+    } catch(Exception e) {
+      e.printStackTrace();
+    } finally {
+      // in_ ?
+    }
+    return false;
+  }  
+  
+  // states
+  final static int CDATA_IN  = 10;
+  final static int CDATA_OUT = 11;
+  final static int CDATA_UNK = 12;
+  final static int RECORD_ACCEPT = 13;
+  // inputs
+  final static int CDATA_BEGIN = 20;
+  final static int CDATA_END   = 21;
+  final static int RECORD_MAYBE= 22;
+  
+  /* also updates firstMatchStart_;*/
+  int nextState(int state, int input, int bufPos)
   {
+    switch(state) {
+      case CDATA_UNK:
+      case CDATA_OUT:
+        switch(input) {
+          case CDATA_BEGIN:
+            return CDATA_IN;
+          case CDATA_END:
+            if(state==CDATA_OUT) {
+              //System.out.println("buggy XML " + bufPos);
+            }
+            return CDATA_OUT;
+          case RECORD_MAYBE:
+            return (state==CDATA_UNK) ? CDATA_UNK : RECORD_ACCEPT;
+        }
+      break;
+      case CDATA_IN:
+       return (input==CDATA_END) ? CDATA_OUT : CDATA_IN;
+    }
+    throw new IllegalStateException(state + " " + input + " " + bufPos + " " + splitName_);
+  }
+  
     
-    char[] cpat = pat.toCharArray();
+  Pattern makePatternCDataOrMark(String escapedMark)
+  {
+    StringBuffer pat = new StringBuffer();
+    addGroup(pat, StreamUtil.regexpEscape("CDATA["));   // CDATA_BEGIN
+    addGroup(pat, StreamUtil.regexpEscape("]]>"));      // CDATA_END
+    addGroup(pat, escapedMark);                         // RECORD_MAYBE
+    return Pattern.compile(pat.toString());
+  }
+  void addGroup(StringBuffer pat, String escapedGroup)
+  {
+    if(pat.length() > 0) {
+        pat.append("|");
+    }
+    pat.append("(");
+    pat.append(escapedGroup);
+    pat.append(")");
+  }
+  
+  
+  
+  boolean fastReadUntilMatch(String textPat, boolean includePat, StringBuffer outBufOrNull) throws IOException 
+  {
+    //System.out.println("@@@BEGIN readUntilMatch inPos=" + in_.getPos());  
+    char[] cpat = textPat.toCharArray();
     int m = 0;
+    boolean match = false;
+    long markPos = -1;
     int msup = cpat.length;
+    if(!includePat) {
+      int LL = 120000 * 10;
+      markPos = in_.getPos();
+      in_.mark(LL); // lookAhead_
+    }
     while (true) {
       int b = in_.read();
       if (b == -1)
         break;
 
       char c = (char)b; // this assumes eight-bit matching. OK with UTF-8
+      if(outBufOrNull != null) {
+        outBufOrNull.append(c);
+      }
       if (c == cpat[m]) {
         m++;
-        if(m==msup-1) {
+        if(m==msup) {
+          match = true;
           break;
         }
       } else {
         m = 0;
       }
-      if(outBuf != null) {
-        outBuf.append(c);
+    }
+    if(!includePat && match) {
+      if(outBufOrNull != null) {
+        outBufOrNull.setLength(outBufOrNull.length() - textPat.length());
       }
+      long pos = in_.getPos() - textPat.length();
+      in_.reset();
+      in_.seek(pos);
+    }
+    //System.out.println("@@@DONE  readUntilMatch inPos=" + in_.getPos() + " includePat=" + includePat + " pat=" + textPat + ", buf=|" + outBufOrNull + "|");
+    return match;
+  }
+  
+  String checkJobGet(String prop) throws IOException
+  {
+    String val = job_.get(prop);
+    if(val == null) {
+        throw new IOException("JobConf: missing required property: " + prop);
     }
-System.out.println("@@@START readUntilMatch(" + pat + ", " + outBuf + "\n@@@END readUntilMatch");
+    return val;
   }
   
   
   String beginMark_;
   String endMark_;
+  
+  Pattern beginPat_;
+  Pattern endPat_;
+
+  boolean slowMatch_;  
+  int lookAhead_; // bytes to read to try to synch CDATA/non-CDATA. Should be more than max record size
+  int maxRecSize_;
+
+  final static int NA = -1;  
+  int firstMatchStart_ = 0; // candidate record boundary. Might just be CDATA.
+  int firstMatchEnd_ = 0;
+  
+  boolean isRecordMatch_;
+  boolean synched_;
 }

Modified: lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DFSClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DFSClient.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DFSClient.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DFSClient.java Fri Jun  9 14:11:29 2006
@@ -808,7 +808,7 @@
                     localName, overwrite, replication, blockSize);
               } catch (RemoteException e) {
                 if (--retries == 0 || 
-                    AlreadyBeingCreatedException.class.getName().
+                    !AlreadyBeingCreatedException.class.getName().
                         equals(e.getClassName())) {
                   throw e;
                 } else {
@@ -838,7 +838,7 @@
                                          clientName.toString());
               } catch (RemoteException e) {
                 if (--retries == 0 || 
-                    NotReplicatedYetException.class.getName().
+                    !NotReplicatedYetException.class.getName().
                         equals(e.getClassName())) {
                   throw e;
                 } else {

Modified: lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DataNode.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DataNode.java?rev=413169&r1=413168&r2=413169&view=diff
==============================================================================
--- lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DataNode.java (original)
+++ lucene/hadoop/branches/branch-0.3/src/java/org/apache/hadoop/dfs/DataNode.java Fri Jun  9 14:11:29 2006
@@ -24,7 +24,6 @@
 
 import java.io.*;
 import java.net.*;
-import java.nio.channels.FileLock;
 import java.util.*;
 
 /**********************************************************
@@ -173,7 +172,20 @@
      * @throws IOException
      */
     private void register() throws IOException {
-      dnRegistration = namenode.register( dnRegistration );
+      while (shouldRun) {
+        try {
+          dnRegistration = namenode.register( dnRegistration );
+          break;
+        } catch( ConnectException se ) {  // namenode has not been started
+          LOG.info("Namenode not available yet, Zzzzz...");
+        } catch( SocketTimeoutException te ) {  // namenode is busy
+          LOG.info("Problem connecting to Namenode: " + 
+                   StringUtils.stringifyException(te));
+        }
+        try {
+          Thread.sleep(10 * 1000);
+        } catch (InterruptedException ie) {}
+      }
       if( storage.getStorageID().equals("") ) {
         storage.setStorageID( dnRegistration.getStorageID());
         storage.write();
@@ -194,7 +206,7 @@
     }
 
     void handleDiskError( String errMsgr ) {
-        LOG.warn( "Shuting down DataNode because "+errMsgr );
+        LOG.warn( "DataNode is shutting down.\n" + errMsgr );
         try {
             namenode.errorReport(
                     dnRegistration, DatanodeProtocol.DISK_ERROR, errMsgr);
@@ -208,9 +220,7 @@
      * forever calling remote NameNode functions.
      */
     public void offerService() throws Exception {
-      // start dataXceiveServer  
-      dataXceiveServer.start();
-      
+     
       long lastHeartbeat = 0, lastBlockReport = 0;
       LOG.info("using BLOCKREPORT_INTERVAL of " + blockReportInterval + "msec");
 
@@ -325,13 +335,16 @@
           } // synchronized
         } // while (shouldRun)
       } catch(DiskErrorException e) {
-        handleDiskError(e.getMessage());
-      }
-      
-      // wait for dataXceiveServer to terminate
-      try {
-          this.dataXceiveServer.join();
-      } catch (InterruptedException ie) {
+        handleDiskError(e.getLocalizedMessage());
+      } catch( RemoteException re ) {
+        String reClass = re.getClassName();
+        if( UnregisteredDatanodeException.class.getName().equals( reClass )) {
+          LOG.warn( "DataNode is shutting down: " + 
+                    StringUtils.stringifyException(re));
+          shutdown();
+          return;
+        }
+        throw re;
       }
     } // offerService
 
@@ -818,6 +831,10 @@
      */
     public void run() {
         LOG.info("Starting DataNode in: "+data.data);
+        
+        // start dataXceiveServer
+        dataXceiveServer.start();
+        
         while (shouldRun) {
             try {
                 offerService();
@@ -832,7 +849,14 @@
               }
             }
         }
-      LOG.info("Finishing DataNode in: "+data.data);
+        
+        // wait for dataXceiveServer to terminate
+        try {
+            this.dataXceiveServer.join();
+        } catch (InterruptedException ie) {
+        }
+        
+        LOG.info("Finishing DataNode in: "+data.data);
     }
 
     /** Start datanode daemons.