You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/02/03 21:34:55 UTC
svn commit: r374738 [3/3] - in /lucene/hadoop/trunk: bin/ lib/
src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/
src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/io/
src/java/org/apache/hadoop/ipc/ src/java/org/apache/hadoop/map...
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSequenceFile.java Fri Feb 3 12:34:32 2006
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
import java.io.*;
import java.util.*;
@@ -42,35 +42,35 @@
int seed = new Random().nextInt();
- NutchFileSystem nfs = new LocalFileSystem(new Configuration());
+ FileSystem fs = new LocalFileSystem(new Configuration());
try {
//LOG.setLevel(Level.FINE);
- writeTest(nfs, count, seed, file, false);
- readTest(nfs, count, seed, file);
+ writeTest(fs, count, seed, file, false);
+ readTest(fs, count, seed, file);
- sortTest(nfs, count, megabytes, factor, false, file);
- checkSort(nfs, count, seed, file);
+ sortTest(fs, count, megabytes, factor, false, file);
+ checkSort(fs, count, seed, file);
- sortTest(nfs, count, megabytes, factor, true, file);
- checkSort(nfs, count, seed, file);
+ sortTest(fs, count, megabytes, factor, true, file);
+ checkSort(fs, count, seed, file);
- mergeTest(nfs, count, seed, file, false, factor, megabytes);
- checkSort(nfs, count, seed, file);
+ mergeTest(fs, count, seed, file, false, factor, megabytes);
+ checkSort(fs, count, seed, file);
- mergeTest(nfs, count, seed, file, true, factor, megabytes);
- checkSort(nfs, count, seed, file);
+ mergeTest(fs, count, seed, file, true, factor, megabytes);
+ checkSort(fs, count, seed, file);
} finally {
- nfs.close();
+ fs.close();
}
}
- private static void writeTest(NutchFileSystem nfs, int count, int seed,
+ private static void writeTest(FileSystem fs, int count, int seed,
String file, boolean compress)
throws IOException {
new File(file).delete();
LOG.fine("creating with " + count + " records");
SequenceFile.Writer writer =
- new SequenceFile.Writer(nfs, file, RandomDatum.class, RandomDatum.class,
+ new SequenceFile.Writer(fs, file, RandomDatum.class, RandomDatum.class,
compress);
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for (int i = 0; i < count; i++) {
@@ -83,12 +83,12 @@
writer.close();
}
- private static void readTest(NutchFileSystem nfs, int count, int seed, String file)
+ private static void readTest(FileSystem fs, int count, int seed, String file)
throws IOException {
RandomDatum k = new RandomDatum();
RandomDatum v = new RandomDatum();
LOG.fine("reading " + count + " records");
- SequenceFile.Reader reader = new SequenceFile.Reader(nfs, file, conf);
+ SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for (int i = 0; i < count; i++) {
generator.next();
@@ -106,17 +106,17 @@
}
- private static void sortTest(NutchFileSystem nfs, int count, int megabytes,
+ private static void sortTest(FileSystem fs, int count, int megabytes,
int factor, boolean fast, String file)
throws IOException {
new File(file+".sorted").delete();
- SequenceFile.Sorter sorter = newSorter(nfs, fast, megabytes, factor);
+ SequenceFile.Sorter sorter = newSorter(fs, fast, megabytes, factor);
LOG.fine("sorting " + count + " records");
sorter.sort(file, file+".sorted");
LOG.fine("done sorting " + count + " records");
}
- private static void checkSort(NutchFileSystem nfs, int count, int seed, String file)
+ private static void checkSort(FileSystem fs, int count, int seed, String file)
throws IOException {
LOG.fine("sorting " + count + " records in memory for check");
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
@@ -132,7 +132,7 @@
RandomDatum k = new RandomDatum();
RandomDatum v = new RandomDatum();
Iterator iterator = map.entrySet().iterator();
- SequenceFile.Reader reader = new SequenceFile.Reader(nfs, file + ".sorted", conf);
+ SequenceFile.Reader reader = new SequenceFile.Reader(fs, file + ".sorted", conf);
for (int i = 0; i < count; i++) {
Map.Entry entry = (Map.Entry)iterator.next();
RandomDatum key = (RandomDatum)entry.getKey();
@@ -150,7 +150,7 @@
LOG.fine("sucessfully checked " + count + " records");
}
- private static void mergeTest(NutchFileSystem nfs, int count, int seed,
+ private static void mergeTest(FileSystem fs, int count, int seed,
String file, boolean fast, int factor,
int megabytes)
throws IOException {
@@ -164,10 +164,10 @@
for (int i = 0; i < factor; i++) {
names[i] = file+"."+i;
sortedNames[i] = names[i] + ".sorted";
- nfs.delete(new File(names[i]));
- nfs.delete(new File(sortedNames[i]));
+ fs.delete(new File(names[i]));
+ fs.delete(new File(sortedNames[i]));
writers[i] =
- new SequenceFile.Writer(nfs, names[i], RandomDatum.class,RandomDatum.class);
+ new SequenceFile.Writer(fs, names[i], RandomDatum.class,RandomDatum.class);
}
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
@@ -185,21 +185,21 @@
for (int i = 0; i < factor; i++) {
LOG.fine("sorting file " + i + " with " + count/factor + " records");
- newSorter(nfs, fast, megabytes, factor).sort(names[i], sortedNames[i]);
+ newSorter(fs, fast, megabytes, factor).sort(names[i], sortedNames[i]);
}
LOG.fine("merging " + factor + " files with " + count/factor + " records");
- nfs.delete(new File(file+".sorted"));
- newSorter(nfs, fast, megabytes, factor).merge(sortedNames, file+".sorted");
+ fs.delete(new File(file+".sorted"));
+ newSorter(fs, fast, megabytes, factor).merge(sortedNames, file+".sorted");
}
- private static SequenceFile.Sorter newSorter(NutchFileSystem nfs,
+ private static SequenceFile.Sorter newSorter(FileSystem fs,
boolean fast,
int megabytes, int factor) {
SequenceFile.Sorter sorter =
fast
- ? new SequenceFile.Sorter(nfs, new RandomDatum.Comparator(),RandomDatum.class, conf)
- : new SequenceFile.Sorter(nfs, RandomDatum.class, RandomDatum.class, conf);
+ ? new SequenceFile.Sorter(fs, new RandomDatum.Comparator(),RandomDatum.class, conf)
+ : new SequenceFile.Sorter(fs, RandomDatum.class, RandomDatum.class, conf);
sorter.setMemory(megabytes * 1024*1024);
sorter.setFactor(factor);
return sorter;
@@ -224,7 +224,7 @@
System.exit(-1);
}
int i = 0;
- NutchFileSystem nfs = NutchFileSystem.parseArgs(args, i, conf);
+ FileSystem fs = FileSystem.parseArgs(args, i, conf);
try {
for (; i < args.length; i++) { // parse command line
if (args[i] == null) {
@@ -265,21 +265,21 @@
LOG.setLevel(Level.FINE);
if (create && !merge) {
- writeTest(nfs, count, seed, file, compress);
- readTest(nfs, count, seed, file);
+ writeTest(fs, count, seed, file, compress);
+ readTest(fs, count, seed, file);
}
if (merge) {
- mergeTest(nfs, count, seed, file, fast, factor, megabytes);
+ mergeTest(fs, count, seed, file, fast, factor, megabytes);
} else {
- sortTest(nfs, count, megabytes, factor, fast, file);
+ sortTest(fs, count, megabytes, factor, fast, file);
}
if (check) {
- checkSort(nfs, count, seed, file);
+ checkSort(fs, count, seed, file);
}
} finally {
- nfs.close();
+ fs.close();
}
}
}
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestSetFile.java Fri Feb 3 12:34:32 2006
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
import java.io.*;
import java.util.*;
@@ -22,6 +22,7 @@
import java.util.logging.*;
import org.apache.hadoop.fs.*;
+import org.apache.hadoop.conf.*;
import org.apache.hadoop.util.LogFormatter;
/** Support for flat files of binary key/value pairs. */
@@ -35,13 +36,13 @@
public TestSetFile(String name) { super(name); }
public void testSetFile() throws Exception {
- NutchFileSystem nfs = new LocalFileSystem(conf);
+ FileSystem fs = new LocalFileSystem(conf);
try {
RandomDatum[] data = generate(10000);
- writeTest(nfs, data, FILE);
- readTest(nfs, data, FILE);
+ writeTest(fs, data, FILE);
+ readTest(fs, data, FILE);
} finally {
- nfs.close();
+ fs.close();
}
}
@@ -58,21 +59,21 @@
return data;
}
- private static void writeTest(NutchFileSystem nfs, RandomDatum[] data, String file)
+ private static void writeTest(FileSystem fs, RandomDatum[] data, String file)
throws IOException {
- MapFile.delete(nfs, file);
+ MapFile.delete(fs, file);
LOG.fine("creating with " + data.length + " records");
- SetFile.Writer writer = new SetFile.Writer(nfs, file, RandomDatum.class);
+ SetFile.Writer writer = new SetFile.Writer(fs, file, RandomDatum.class);
for (int i = 0; i < data.length; i++)
writer.append(data[i]);
writer.close();
}
- private static void readTest(NutchFileSystem nfs, RandomDatum[] data, String file)
+ private static void readTest(FileSystem fs, RandomDatum[] data, String file)
throws IOException {
RandomDatum v = new RandomDatum();
LOG.fine("reading " + data.length + " records");
- SetFile.Reader reader = new SetFile.Reader(nfs, file, conf);
+ SetFile.Reader reader = new SetFile.Reader(fs, file, conf);
for (int i = 0; i < data.length; i++) {
if (!reader.seek(data[i]))
throw new RuntimeException("wrong value at " + i);
@@ -96,7 +97,7 @@
}
int i = 0;
- NutchFileSystem nfs = NutchFileSystem.parseArgs(args, i, conf);
+ FileSystem fs = FileSystem.parseArgs(args, i, conf);
try {
for (; i < args.length; i++) { // parse command line
if (args[i] == null) {
@@ -122,15 +123,15 @@
RandomDatum[] data = generate(count);
if (create) {
- writeTest(nfs, data, file);
+ writeTest(fs, data, file);
}
if (check) {
- readTest(nfs, data, file);
+ readTest(fs, data, file);
}
}
} finally {
- nfs.close();
+ fs.close();
}
}
}
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestUTF8.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestUTF8.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestUTF8.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestUTF8.java Fri Feb 3 12:34:32 2006
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
import junit.framework.TestCase;
import java.util.Random;
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestVersionedWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestVersionedWritable.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestVersionedWritable.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestVersionedWritable.java Fri Feb 3 12:34:32 2006
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
import java.io.*;
import java.util.Random;
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestWritable.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestWritable.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestWritable.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/io/TestWritable.java Fri Feb 3 12:34:32 2006
@@ -14,13 +14,12 @@
* limitations under the License.
*/
-package org.apache.hadoop.io.
+package org.apache.hadoop.io;
import java.io.*;
import java.util.Random;
import junit.framework.TestCase;
import org.apache.hadoop.io.*;
-import org.apache.nutch.parse.ParseData;
import org.apache.hadoop.conf.Configuration;
/** Unit tests for Writable. */
@@ -70,10 +69,6 @@
dib.reset(dob.getData(), dob.getLength());
Writable after = (Writable)before.getClass().newInstance();
- if(after instanceof ParseData) {
- ParseData parseData = (ParseData) after;
- parseData.setConf(new Configuration());
- }
after.readFields(dib);
assertEquals(before, after);
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestIPC.java Fri Feb 3 12:34:32 2006
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.ipc.
+package org.apache.hadoop.ipc;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.LongWritable;
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/ipc/TestRPC.java Fri Feb 3 12:34:32 2006
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.ipc.
+package org.apache.hadoop.ipc;
import java.io.IOException;
import java.net.InetSocketAddress;
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MapredLoadTest.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MapredLoadTest.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MapredLoadTest.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/MapredLoadTest.java Fri Feb 3 12:34:32 2006
@@ -13,7 +13,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.*;
@@ -145,7 +145,7 @@
//
// Write the answer key to a file.
//
- NutchFileSystem fs = NutchFileSystem.get(conf);
+ FileSystem fs = FileSystem.get(conf);
File testdir = new File("mapred.loadtest");
fs.mkdirs(testdir);
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestSequenceFileInputFormat.java Fri Feb 3 12:34:32 2006
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
import java.io.*;
import java.util.*;
@@ -33,7 +33,7 @@
public void testFormat() throws Exception {
JobConf job = new JobConf(conf);
- NutchFileSystem fs = NutchFileSystem.getNamed("local", conf);
+ FileSystem fs = FileSystem.getNamed("local", conf);
File dir = new File(System.getProperty("test.build.data",".") + "/mapred");
File file = new File(dir, "test.seq");
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java?rev=374738&r1=374737&r2=374738&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/TestTextInputFormat.java Fri Feb 3 12:34:32 2006
@@ -14,7 +14,7 @@
* limitations under the License.
*/
-package org.apache.hadoop.mapred.
+package org.apache.hadoop.mapred;
import java.io.*;
import java.util.*;
@@ -33,7 +33,7 @@
public void testFormat() throws Exception {
JobConf job = new JobConf(conf);
- NutchFileSystem fs = NutchFileSystem.getNamed("local", conf);
+ FileSystem fs = FileSystem.getNamed("local", conf);
File dir = new File(System.getProperty("test.build.data",".") + "/mapred");
File file = new File(dir, "test.txt");