You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@chukwa.apache.org by as...@apache.org on 2009/06/24 02:42:42 UTC
svn commit: r787877 - in
/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor:
TestExecAdaptor.java TestFileAdaptor.java
filetailer/TestFileTailingAdaptors.java
Author: asrabkin
Date: Wed Jun 24 00:42:41 2009
New Revision: 787877
URL: http://svn.apache.org/viewvc?rev=787877&view=rev
Log:
CHUKWA-323. Add some test code to look for leaks.
Added:
hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestFileAdaptor.java
Modified:
hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestExecAdaptor.java
hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java
Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestExecAdaptor.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestExecAdaptor.java?rev=787877&r1=787876&r2=787877&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestExecAdaptor.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestExecAdaptor.java Wed Jun 24 00:42:41 2009
@@ -19,30 +19,46 @@
import junit.framework.TestCase;
+import org.apache.hadoop.conf.*;
import org.apache.hadoop.chukwa.Chunk;
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
-import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
+import org.apache.hadoop.chukwa.datacollection.connector.*;
+import org.apache.hadoop.chukwa.datacollection.test.*;
public class TestExecAdaptor extends TestCase {
- ChunkCatcherConnector chunks;
+ Connector chunks;
- public TestExecAdaptor() {
- chunks = new ChunkCatcherConnector();
+
+ public void testWithPs() throws ChukwaAgent.AlreadyRunningException, InterruptedException {
+ Configuration conf = new Configuration();
+ conf.set("chukwaAgent.control.port", "0");
+ conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
+ ChukwaAgent agent = new ChukwaAgent(conf);
+ ChunkCatcherConnector chunks = new ChunkCatcherConnector();
chunks.start();
+ String psAgentID = agent.processAddCommand(
+ "add org.apache.hadoop.chukwa.datacollection.adaptor.ExecAdaptor ps ps aux 0");
+ assertNotNull(psAgentID);
+ Chunk c = chunks.waitForAChunk();
+ System.out.println(new String(c.getData()));
+ agent.shutdown();
}
+
+
+ public void testForLeaks() throws ChukwaAgent.AlreadyRunningException, InterruptedException {
+ Configuration conf = new Configuration();
+// conf.set("chukwaAgent.control.port", "0");
+ conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
+ ChukwaAgent agent = new ChukwaAgent(conf);
- public void testWithPs() throws ChukwaAgent.AlreadyRunningException {
- try {
- ChukwaAgent agent = new ChukwaAgent();
- String psAgentID = agent.processAddCommand(
- "add org.apache.hadoop.chukwa.datacollection.adaptor.ExecAdaptor ps ps aux 0");
- assertNotNull(psAgentID);
- Chunk c = chunks.waitForAChunk();
- System.out.println(new String(c.getData()));
- } catch (InterruptedException e) {
-
- }
+ chunks = new ConsoleOutConnector(agent, false);
+ chunks.start();
+ assertEquals(0, agent.adaptorCount());
+ String lsID = agent.processAddCommand(
+ "add exec= org.apache.hadoop.chukwa.datacollection.adaptor.ExecAdaptor Listing 100 /bin/sleep 1 0");
+ Thread.sleep( 60*1000);
+ System.out.println("stopped ok");
}
}
Added: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestFileAdaptor.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestFileAdaptor.java?rev=787877&view=auto
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestFileAdaptor.java (added)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/TestFileAdaptor.java Wed Jun 24 00:42:41 2009
@@ -0,0 +1,108 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.chukwa.datacollection.adaptor;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import java.io.PrintWriter;
+import org.apache.hadoop.chukwa.Chunk;
+import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
+import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.log4j.Level;
+import org.apache.log4j.Logger;
+import junit.framework.TestCase;
+
+public class TestFileAdaptor extends TestCase {
+
+ Configuration conf = new Configuration();
+ File baseDir;
+ File testFile;
+ ChunkCatcherConnector chunks;
+
+ public TestFileAdaptor() throws IOException {
+ baseDir = new File(System.getProperty("test.build.data", "/tmp"));
+ conf.set("chukwaAgent.checkpoint.dir", baseDir.getCanonicalPath());
+ conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
+ conf.setInt("chukwaAgent.adaptor.fileadaptor.timeoutperiod", 100);
+ conf.setInt("chukwaAgent.adaptor.context.switch.time", 100);
+ testFile = makeTestFile();
+
+ chunks = new ChunkCatcherConnector();
+ chunks.start();
+ Logger.getRootLogger().setLevel(Level.WARN);
+ }
+
+ public File makeTestFile() throws IOException {
+ File inDir = File.createTempFile("atemp", "file", baseDir);
+ inDir.deleteOnExit();
+ FileOutputStream fos = new FileOutputStream(inDir);
+
+ PrintWriter pw = new PrintWriter(fos);
+ for (int i = 0; i < 10; ++i) {
+ pw.print(i + " ");
+ pw.println("abcdefghijklmnopqrstuvwxyz");
+ }
+ pw.flush();
+ pw.close();
+ return inDir;
+ }
+
+ public void testOnce() throws IOException,
+ ChukwaAgent.AlreadyRunningException, InterruptedException {
+
+ ChukwaAgent agent = new ChukwaAgent(conf);
+
+ assertEquals(0, agent.adaptorCount());
+
+ agent.processAddCommand("add test = FileAdaptor raw " +testFile.getCanonicalPath() + " 0");
+ assertEquals(1, agent.adaptorCount());
+ Chunk c = chunks.waitForAChunk();
+ String dat = new String(c.getData());
+ assertTrue(dat.startsWith("0 abcdefghijklmnopqrstuvwxyz"));
+ assertTrue(dat.endsWith("9 abcdefghijklmnopqrstuvwxyz\n"));
+ assertTrue(c.getDataType().equals("raw"));
+ agent.shutdown();
+ }
+
+ public void testRepeatedly() throws IOException,
+ ChukwaAgent.AlreadyRunningException, InterruptedException {
+ int tests = 300; //SHOULD SET HIGHER AND WATCH WITH lsof to find leaks
+
+ ChukwaAgent agent = new ChukwaAgent(conf);
+ for(int i=0; i < tests; ++i) {
+ if(i % 100 == 0)
+ System.out.println("buzzed " + i + " times");
+
+ assertEquals(0, agent.adaptorCount());
+ agent.processAddCommand("add test = FileAdaptor raw " +testFile.getCanonicalPath() + " 0");
+ assertEquals(1, agent.adaptorCount());
+ Chunk c = chunks.waitForAChunk();
+ String dat = new String(c.getData());
+ assertTrue(dat.startsWith("0 abcdefghijklmnopqrstuvwxyz"));
+ assertTrue(dat.endsWith("9 abcdefghijklmnopqrstuvwxyz\n"));
+ assertTrue(c.getDataType().equals("raw"));
+ if(agent.adaptorCount() > 0)
+ agent.stopAdaptor("test", false);
+ }
+ agent.shutdown();
+ }
+
+}
Modified: hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java
URL: http://svn.apache.org/viewvc/hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java?rev=787877&r1=787876&r2=787877&view=diff
==============================================================================
--- hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java (original)
+++ hadoop/chukwa/trunk/src/test/org/apache/hadoop/chukwa/datacollection/adaptor/filetailer/TestFileTailingAdaptors.java Wed Jun 24 00:42:41 2009
@@ -28,30 +28,39 @@
import org.apache.hadoop.chukwa.datacollection.agent.ChukwaAgent;
import org.apache.hadoop.chukwa.datacollection.controller.ChukwaAgentController;
import org.apache.hadoop.chukwa.datacollection.connector.ChunkCatcherConnector;
+import org.apache.hadoop.conf.Configuration;
public class TestFileTailingAdaptors extends TestCase {
ChunkCatcherConnector chunks;
-
- public TestFileTailingAdaptors() {
+ Configuration conf = new Configuration();
+ File baseDir, testFile;
+
+ public TestFileTailingAdaptors() throws IOException {
chunks = new ChunkCatcherConnector();
chunks.start();
+ baseDir = new File(System.getProperty("test.build.data", "/tmp"));
+ conf.set("chukwaAgent.checkpoint.dir", baseDir.getCanonicalPath());
+ conf.setBoolean("chukwaAgent.checkpoint.enabled", false);
+ conf.setInt("chukwaAgent.adaptor.context.switch.time", 100);
+
+ testFile = makeTestFile("chukwaCrSepTest", 80);
+
}
public void testCrSepAdaptor() throws IOException, InterruptedException,
ChukwaAgent.AlreadyRunningException {
ChukwaAgent agent = new ChukwaAgent();
// Remove any adaptor left over from previous run
- ChukwaConfiguration cc = new ChukwaConfiguration();
- int portno = cc.getInt("chukwaAgent.control.port", 9093);
- ChukwaAgentController cli = new ChukwaAgentController("localhost", portno);
- cli.removeAll();
+
// sleep for some time to make sure we don't get chunk from existing streams
Thread.sleep(5000);
- File testFile = makeTestFile("chukwaCrSepTest", 80);
+ assertEquals(0, agent.adaptorCount());
String adaptorId = agent
.processAddCommand("add org.apache.hadoop.chukwa.datacollection.adaptor.filetailer.CharFileTailingAdaptorUTF8"
+ " lines " + testFile + " 0");
assertNotNull(adaptorId);
+ assertEquals(1, agent.adaptorCount());
+
System.out.println("getting a chunk...");
Chunk c = chunks.waitForAChunk();
System.out.println("got chunk");
@@ -64,7 +73,6 @@
for (int rec = 0; rec < c.getRecordOffsets().length; ++rec) {
String record = new String(c.getData(), recStart,
c.getRecordOffsets()[rec] - recStart + 1);
- System.out.println("record " + rec + " was: " + record);
assertTrue(record.equals(rec + " abcdefghijklmnopqrstuvwxyz\n"));
recStart = c.getRecordOffsets()[rec] + 1;
}
@@ -73,10 +81,33 @@
agent.shutdown();
Thread.sleep(2000);
}
+
+ public void testRepeatedlyOnBigFile() throws IOException,
+ ChukwaAgent.AlreadyRunningException, InterruptedException {
+ int tests = 1000; //SHOULD SET HIGHER AND WATCH WITH lsof to find leaks
+
+ ChukwaAgent agent = new ChukwaAgent(conf);
+ for(int i=0; i < tests; ++i) {
+ if(i % 100 == 0)
+ System.out.println("buzzed " + i + " times");
+
+ assertEquals(0, agent.adaptorCount());
+ agent.processAddCommand("add test = filetailer.FileTailingAdaptor raw " +testFile.getCanonicalPath() + " 0");
+ assertEquals(1, agent.adaptorCount());
+ Chunk c = chunks.waitForAChunk();
+ String dat = new String(c.getData());
+ assertTrue(dat.startsWith("0 abcdefghijklmnopqrstuvwxyz"));
+ assertTrue(dat.endsWith("9 abcdefghijklmnopqrstuvwxyz\n"));
+ assertTrue(c.getDataType().equals("raw"));
+ if(agent.adaptorCount() > 0)
+ agent.stopAdaptor("test", false);
+ }
+ agent.shutdown();
+ }
private File makeTestFile(String name, int size) throws IOException {
- File tmpOutput = new File(System.getProperty("test.build.data", "/tmp"),
- name);
+ File tmpOutput = new File(baseDir, name);
+ tmpOutput.deleteOnExit();
FileOutputStream fos = new FileOutputStream(tmpOutput);
PrintWriter pw = new PrintWriter(fos);