You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ma...@apache.org on 2013/07/01 23:37:57 UTC

svn commit: r1498711 - in /lucene/dev/branches/branch_4x: ./ solr/ solr/core/ solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java

Author: markrmiller
Date: Mon Jul  1 21:37:56 2013
New Revision: 1498711

URL: http://svn.apache.org/r1498711
Log:
SOLR-4916: Do not run hdfs tests on Windows as it requires cygwin

Modified:
    lucene/dev/branches/branch_4x/   (props changed)
    lucene/dev/branches/branch_4x/solr/   (props changed)
    lucene/dev/branches/branch_4x/solr/core/   (props changed)
    lucene/dev/branches/branch_4x/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java

Modified: lucene/dev/branches/branch_4x/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java
URL: http://svn.apache.org/viewvc/lucene/dev/branches/branch_4x/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java?rev=1498711&r1=1498710&r2=1498711&view=diff
==============================================================================
--- lucene/dev/branches/branch_4x/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java (original)
+++ lucene/dev/branches/branch_4x/solr/core/src/test/org/apache/solr/cloud/hdfs/HdfsTestUtil.java Mon Jul  1 21:37:56 2013
@@ -7,7 +7,10 @@ import java.util.Locale;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.lucene.util.Constants;
+import org.apache.lucene.util.LuceneTestCase;
 import org.apache.solr.SolrTestCaseJ4;
+import org.junit.Assert;
 
 /*
  * Licensed to the Apache Software Foundation (ASF) under one or more
@@ -31,6 +34,10 @@ public class HdfsTestUtil {
   private static Locale savedLocale;
 
   public static MiniDFSCluster setupClass(String dataDir) throws Exception {
+    LuceneTestCase.assumeFalse("HDFS tests on Windows require Cygwin", Constants.WINDOWS);
+    File dir = new File(dataDir);
+    new File(dataDir).mkdirs();
+
     savedLocale = Locale.getDefault();
     // TODO: we HACK around HADOOP-9643
     Locale.setDefault(Locale.ENGLISH);
@@ -41,12 +48,12 @@ public class HdfsTestUtil {
     conf.set("dfs.block.access.token.enable", "false");
     conf.set("dfs.permissions.enabled", "false");
     conf.set("hadoop.security.authentication", "simple");
-    conf.set("hdfs.minidfs.basedir", dataDir + File.separator + "hdfsBaseDir");
-    conf.set("dfs.namenode.name.dir", dataDir + File.separator + "nameNodeNameDir");
+    conf.set("hdfs.minidfs.basedir", dir.getAbsolutePath() + File.separator + "hdfsBaseDir");
+    conf.set("dfs.namenode.name.dir", dir.getAbsolutePath() + File.separator + "nameNodeNameDir");
     
     
-    System.setProperty("test.build.data", dataDir + File.separator + "hdfs" + File.separator + "build");
-    System.setProperty("test.cache.data", dataDir + File.separator + "hdfs" + File.separator + "cache");
+    System.setProperty("test.build.data", dir.getAbsolutePath() + File.separator + "hdfs" + File.separator + "build");
+    System.setProperty("test.cache.data", dir.getAbsolutePath() + File.separator + "hdfs" + File.separator + "cache");
     System.setProperty("solr.lock.type", "hdfs");
     
     MiniDFSCluster dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null);
@@ -66,7 +73,9 @@ public class HdfsTestUtil {
     }
     
     // TODO: we HACK around HADOOP-9643
-    Locale.setDefault(savedLocale);
+    if (savedLocale != null) {
+      Locale.setDefault(savedLocale);
+    }
   }
   
   public static String getDataDir(MiniDFSCluster dfsCluster, String dataDir)