You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucene.apache.org by ma...@apache.org on 2013/07/17 21:14:16 UTC

svn commit: r1504236 - in /lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs: BasicHdfsTest.java StressHdfsTest.java

Author: markrmiller
Date: Wed Jul 17 19:14:16 2013
New Revision: 1504236

URL: http://svn.apache.org/r1504236
Log:
SOLR-4990: Beef up BasicHdfsTest and rename it to StressHdfsTest

Added:
    lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java
      - copied, changed from r1499019, lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/BasicHdfsTest.java
Removed:
    lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/BasicHdfsTest.java

Copied: lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java (from r1499019, lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/BasicHdfsTest.java)
URL: http://svn.apache.org/viewvc/lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java?p2=lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java&p1=lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/BasicHdfsTest.java&r1=1499019&r2=1504236&rev=1504236&view=diff
==============================================================================
--- lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/BasicHdfsTest.java (original)
+++ lucene/dev/trunk/solr/core/src/test/org/apache/solr/cloud/hdfs/StressHdfsTest.java Wed Jul 17 19:14:16 2013
@@ -20,6 +20,9 @@ package org.apache.solr.cloud.hdfs;
 import java.io.File;
 import java.io.IOException;
 import java.net.URI;
+import java.net.URISyntaxException;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FileSystem;
@@ -27,11 +30,15 @@ import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.lucene.util.LuceneTestCase.Slow;
 import org.apache.solr.client.solrj.SolrQuery;
+import org.apache.solr.client.solrj.SolrServer;
+import org.apache.solr.client.solrj.SolrServerException;
+import org.apache.solr.client.solrj.impl.HttpSolrServer;
 import org.apache.solr.client.solrj.request.QueryRequest;
 import org.apache.solr.cloud.BasicDistributedZkTest;
 import org.apache.solr.common.params.CollectionParams.CollectionAction;
 import org.apache.solr.common.params.ModifiableSolrParams;
 import org.apache.solr.common.util.NamedList;
+import org.apache.zookeeper.KeeperException;
 import org.junit.AfterClass;
 import org.junit.BeforeClass;
 
@@ -40,8 +47,9 @@ import com.carrotsearch.randomizedtestin
 
 @Slow
 @ThreadLeakScope(Scope.NONE) // hdfs client currently leaks thread(s)
-public class BasicHdfsTest extends BasicDistributedZkTest {
+public class StressHdfsTest extends BasicDistributedZkTest {
 
+  private static final String DELETE_DATA_DIR_COLLECTION = "delete_data_dir";
   private static MiniDFSCluster dfsCluster;
   
   @BeforeClass
@@ -66,10 +74,10 @@ public class BasicHdfsTest extends Basic
     return HdfsTestUtil.getDataDir(dfsCluster, dataDir);
   }
   
-  public BasicHdfsTest() {
+  public StressHdfsTest() {
     super();
     sliceCount = 1;
-    shardCount = 1;
+    shardCount = TEST_NIGHTLY ? 13 : random().nextInt(3) + 1;
   }
   
   protected String getSolrXml() {
@@ -78,29 +86,79 @@ public class BasicHdfsTest extends Basic
   
   @Override
   public void doTest() throws Exception {
-    createCollection("delete_data_dir", 1, 1, 1);
-    waitForRecoveriesToFinish("delete_data_dir", false);
-    cloudClient.setDefaultCollection("delete_data_dir");
-    cloudClient.getZkStateReader().updateClusterState(true);
-    NamedList<Object> response = cloudClient.query(
-        new SolrQuery().setRequestHandler("/admin/system")).getResponse();
-    NamedList<Object> coreInfo = (NamedList<Object>) response.get("core");
-    String dataDir = (String) ((NamedList<Object>) coreInfo.get("directory"))
-        .get("data");
+    int cnt = random().nextInt(2) + 1;
+    for (int i = 0; i < cnt; i++) {
+      createAndDeleteCollection();
+    }
+  }
 
+  private void createAndDeleteCollection() throws SolrServerException,
+      IOException, Exception, KeeperException, InterruptedException,
+      URISyntaxException {
+    
+    boolean overshard = random().nextBoolean();
+    if (overshard) {
+      createCollection(DELETE_DATA_DIR_COLLECTION, shardCount * 2, 1, 2);
+    } else {
+      int rep = shardCount / 2;
+      if (rep == 0) rep = 1;
+      createCollection(DELETE_DATA_DIR_COLLECTION, rep, 2, 1);
+    }
+
+    waitForRecoveriesToFinish(DELETE_DATA_DIR_COLLECTION, false);
+    cloudClient.setDefaultCollection(DELETE_DATA_DIR_COLLECTION);
+    cloudClient.getZkStateReader().updateClusterState(true);
+    
+    
+    // collect the data dirs
+    List<String> dataDirs = new ArrayList<String>();
+    
+    int i = 0;
+    for (SolrServer client : clients) {
+      HttpSolrServer c = new HttpSolrServer(getBaseUrl(client) + "/delete_data_dir");
+      c.add(getDoc("id", i++));
+      if (random().nextBoolean()) c.add(getDoc("id", i++));
+      if (random().nextBoolean()) c.add(getDoc("id", i++));
+      if (random().nextBoolean()) {
+        c.commit();
+      } else {
+        c.commit(true, true, true);
+      }
+      
+      c.query(new SolrQuery("id:" + i));
+      c.setSoTimeout(30000);
+      c.setConnectionTimeout(30000);
+      NamedList<Object> response = c.query(
+          new SolrQuery().setRequestHandler("/admin/system")).getResponse();
+      NamedList<Object> coreInfo = (NamedList<Object>) response.get("core");
+      String dataDir = (String) ((NamedList<Object>) coreInfo.get("directory"))
+          .get("data");
+      dataDirs.add(dataDir);
+      c.shutdown();
+    }
+    
+    if (random().nextBoolean()) {
+      cloudClient.deleteByQuery("*:*");
+      cloudClient.commit();
+      
+      assertEquals(0, cloudClient.query(new SolrQuery("*:*")).getResults().getNumFound());
+    }
+    
     ModifiableSolrParams params = new ModifiableSolrParams();
     params.set("action", CollectionAction.DELETE.toString());
-    params.set("name", "delete_data_dir");
+    params.set("name", DELETE_DATA_DIR_COLLECTION);
     QueryRequest request = new QueryRequest(params);
     request.setPath("/admin/collections");
     cloudClient.request(request);
     
-    Configuration conf = new Configuration();
-    conf.setBoolean("fs.hdfs.impl.disable.cache", true);
-    FileSystem fs = FileSystem.newInstance(new URI(dataDir), conf);
-    assertFalse(
-        "Data directory exists after collection removal : "
-            + dataDir, fs.exists(new Path(dataDir)));
-    fs.close();
+    // check that all dirs are gone
+    for (String dataDir : dataDirs) {
+      Configuration conf = new Configuration();
+      FileSystem fs = FileSystem.newInstance(new URI(dataDir), conf);
+      assertFalse(
+          "Data directory exists after collection removal : " + dataDir,
+          fs.exists(new Path(dataDir)));
+      fs.close();
+    }
   }
 }