You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@sqoop.apache.org by ab...@apache.org on 2014/10/10 05:06:58 UTC
[06/50] [abbrv] SQOOP-1497: Sqoop2: Entity Nomenclature Revisited
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/spi/src/main/java/org/apache/sqoop/job/etl/Extractor.java
----------------------------------------------------------------------
diff --git a/spi/src/main/java/org/apache/sqoop/job/etl/Extractor.java b/spi/src/main/java/org/apache/sqoop/job/etl/Extractor.java
index 93b3643..34f02c9 100644
--- a/spi/src/main/java/org/apache/sqoop/job/etl/Extractor.java
+++ b/spi/src/main/java/org/apache/sqoop/job/etl/Extractor.java
@@ -21,18 +21,18 @@ package org.apache.sqoop.job.etl;
* This allows connector to extract data from a source system
* based on each partition.
*/
-public abstract class Extractor<ConnectionConfiguration, JobConfiguration, Partition> {
+public abstract class Extractor<LinkConfiguration, JobConfiguration, Partition> {
/**
- * Extract data from source and pass them into the framework.
+ * Extract data from source and pass them into the Sqoop.
*
* @param context Extractor context object
- * @param connectionConfiguration Connection configuration
+ * @param linkConfiguration link configuration
* @param jobConfiguration Job configuration
* @param partition Partition that this extract should work on
*/
public abstract void extract(ExtractorContext context,
- ConnectionConfiguration connectionConfiguration,
+ LinkConfiguration linkConfiguration,
JobConfiguration jobConfiguration,
Partition partition);
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/spi/src/main/java/org/apache/sqoop/job/etl/From.java
----------------------------------------------------------------------
diff --git a/spi/src/main/java/org/apache/sqoop/job/etl/From.java b/spi/src/main/java/org/apache/sqoop/job/etl/From.java
index 80f4f29..3dd8fb9 100644
--- a/spi/src/main/java/org/apache/sqoop/job/etl/From.java
+++ b/spi/src/main/java/org/apache/sqoop/job/etl/From.java
@@ -23,7 +23,7 @@ package org.apache.sqoop.job.etl;
* Initializer
* -> Partitioner
* -> Extractor
- * -> (framework-defined steps)
+ * -> (Sqoop-defined steps)
* -> Destroyer
*/
public class From extends Transferable {
@@ -50,7 +50,7 @@ public class From extends Transferable {
@Override
public String toString() {
- return "Importer{" + super.toString() +
+ return "From{" + super.toString() +
", partitioner=" + partitioner.getName() +
", extractor=" + extractor.getName() +
'}';
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/spi/src/main/java/org/apache/sqoop/job/etl/Initializer.java
----------------------------------------------------------------------
diff --git a/spi/src/main/java/org/apache/sqoop/job/etl/Initializer.java b/spi/src/main/java/org/apache/sqoop/job/etl/Initializer.java
index 88744ea..c9ae540 100644
--- a/spi/src/main/java/org/apache/sqoop/job/etl/Initializer.java
+++ b/spi/src/main/java/org/apache/sqoop/job/etl/Initializer.java
@@ -17,16 +17,16 @@
*/
package org.apache.sqoop.job.etl;
-import org.apache.sqoop.schema.Schema;
-
import java.util.LinkedList;
import java.util.List;
+import org.apache.sqoop.schema.Schema;
+
/**
* This allows connector to define initialization work for execution,
* for example, context configuration.
*/
-public abstract class Initializer<ConnectionConfiguration, JobConfiguration> {
+public abstract class Initializer<LinkConfiguration, JobConfiguration> {
/**
* Initialize new submission based on given configuration properties. Any
@@ -34,11 +34,11 @@ public abstract class Initializer<ConnectionConfiguration, JobConfiguration> {
* promoted to all other part of the workflow automatically.
*
* @param context Initializer context object
- * @param connectionConfiguration Connector's connection configuration object
+ * @param linkConfiguration Connector's link configuration object
* @param jobConfiguration Connector's job configuration object
*/
public abstract void initialize(InitializerContext context,
- ConnectionConfiguration connectionConfiguration,
+ LinkConfiguration linkConfiguration,
JobConfiguration jobConfiguration);
/**
@@ -49,13 +49,13 @@ public abstract class Initializer<ConnectionConfiguration, JobConfiguration> {
* @return
*/
public List<String> getJars(InitializerContext context,
- ConnectionConfiguration connectionConfiguration,
+ LinkConfiguration linkConfiguration,
JobConfiguration jobConfiguration) {
return new LinkedList<String>();
}
public abstract Schema getSchema(InitializerContext context,
- ConnectionConfiguration connectionConfiguration,
+ LinkConfiguration linkConfiguration,
JobConfiguration jobConfiguration);
}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/spi/src/main/java/org/apache/sqoop/job/etl/Loader.java
----------------------------------------------------------------------
diff --git a/spi/src/main/java/org/apache/sqoop/job/etl/Loader.java b/spi/src/main/java/org/apache/sqoop/job/etl/Loader.java
index 0b32314..434dcf6 100644
--- a/spi/src/main/java/org/apache/sqoop/job/etl/Loader.java
+++ b/spi/src/main/java/org/apache/sqoop/job/etl/Loader.java
@@ -20,18 +20,18 @@ package org.apache.sqoop.job.etl;
/**
* This allows connector to load data into a target system.
*/
-public abstract class Loader<ConnectionConfiguration, JobConfiguration> {
+public abstract class Loader<LinkConfiguration, JobConfiguration> {
/**
* Load data to target.
*
* @param context Loader context object
- * @param connectionConfiguration Connection configuration
+ * @param linkConfiguration link configuration
* @param jobConfiguration Job configuration
* @throws Exception
*/
public abstract void load(LoaderContext context,
- ConnectionConfiguration connectionConfiguration,
+ LinkConfiguration linkConfiguration,
JobConfiguration jobConfiguration) throws Exception;
}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/spi/src/main/java/org/apache/sqoop/job/etl/Partitioner.java
----------------------------------------------------------------------
diff --git a/spi/src/main/java/org/apache/sqoop/job/etl/Partitioner.java b/spi/src/main/java/org/apache/sqoop/job/etl/Partitioner.java
index f977233..8156762 100644
--- a/spi/src/main/java/org/apache/sqoop/job/etl/Partitioner.java
+++ b/spi/src/main/java/org/apache/sqoop/job/etl/Partitioner.java
@@ -23,7 +23,7 @@ import java.util.List;
* This allows connector to define how input data to be partitioned.
* The number of data partitions also determines the degree of parallelism.
*/
-public abstract class Partitioner<ConnectionConfiguration, JobConfiguration> {
+public abstract class Partitioner<LinkConfiguration, JobConfiguration> {
/**
* Partition input data into partitions.
@@ -31,12 +31,12 @@ public abstract class Partitioner<ConnectionConfiguration, JobConfiguration> {
* Each partition will be then processed in separate extractor.
*
* @param context Partitioner context object
- * @param connectionConfiguration Connection configuration
+ * @param linkConfiguration link configuration
* @param jobConfiguration Job configuration
* @return
*/
public abstract List<Partition> getPartitions(PartitionerContext context,
- ConnectionConfiguration connectionConfiguration,
+ LinkConfiguration linkConfiguration,
JobConfiguration jobConfiguration);
}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/spi/src/main/java/org/apache/sqoop/job/etl/To.java
----------------------------------------------------------------------
diff --git a/spi/src/main/java/org/apache/sqoop/job/etl/To.java b/spi/src/main/java/org/apache/sqoop/job/etl/To.java
index b8717ae..64ba225 100644
--- a/spi/src/main/java/org/apache/sqoop/job/etl/To.java
+++ b/spi/src/main/java/org/apache/sqoop/job/etl/To.java
@@ -21,7 +21,7 @@ package org.apache.sqoop.job.etl;
* This specifies classes that perform connector-defined steps
* within export execution:
* Initializer
- * -> (framework-defined steps)
+ * -> (Sqoop-defined steps)
* -> Loader
* -> Destroyer
*/
@@ -44,7 +44,7 @@ public class To extends Transferable {
@Override
public String toString() {
- return "Exporter{" + super.toString() +
+ return "To {" + super.toString() +
", loader=" + loader +
'}';
}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/spi/src/main/java/org/apache/sqoop/validation/Validator.java
----------------------------------------------------------------------
diff --git a/spi/src/main/java/org/apache/sqoop/validation/Validator.java b/spi/src/main/java/org/apache/sqoop/validation/Validator.java
index f31adb5..894f412 100644
--- a/spi/src/main/java/org/apache/sqoop/validation/Validator.java
+++ b/spi/src/main/java/org/apache/sqoop/validation/Validator.java
@@ -19,20 +19,20 @@ package org.apache.sqoop.validation;
/**
- * Connection and job metadata validator.
+ * Link and Job config validator.
*
* This class should be extended by connector to provide configuration
- * validation for connection and job configuration objects.
+ * validation for link and job configuration objects.
*/
public class Validator {
/**
- * Validate connection configuration object.
+ * Validate link configuration object.
*
- * @param connectionConfiguration Connection object to be validated
+ * @param linkConfiguration Connection object to be validated
* @return Validation status
*/
- public Validation validateConnection(Object connectionConfiguration) {
+ public Validation validateLink(Object linkConfiguration) {
return new Validation(EmptyClass.class);
}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/submission/mapreduce/src/main/java/org/apache/sqoop/submission/mapreduce/MapreduceSubmissionEngine.java
----------------------------------------------------------------------
diff --git a/submission/mapreduce/src/main/java/org/apache/sqoop/submission/mapreduce/MapreduceSubmissionEngine.java b/submission/mapreduce/src/main/java/org/apache/sqoop/submission/mapreduce/MapreduceSubmissionEngine.java
index 93741e6..fe92ac4 100644
--- a/submission/mapreduce/src/main/java/org/apache/sqoop/submission/mapreduce/MapreduceSubmissionEngine.java
+++ b/submission/mapreduce/src/main/java/org/apache/sqoop/submission/mapreduce/MapreduceSubmissionEngine.java
@@ -17,6 +17,12 @@
*/
package org.apache.sqoop.submission.mapreduce;
+import java.io.File;
+import java.io.FilenameFilter;
+import java.io.IOException;
+import java.net.MalformedURLException;
+import java.util.Map;
+
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobClient;
@@ -29,22 +35,16 @@ import org.apache.log4j.Logger;
import org.apache.sqoop.common.Direction;
import org.apache.sqoop.common.MapContext;
import org.apache.sqoop.common.SqoopException;
+import org.apache.sqoop.driver.SubmissionEngine;
import org.apache.sqoop.execution.mapreduce.MRJobRequest;
import org.apache.sqoop.execution.mapreduce.MapreduceExecutionEngine;
-import org.apache.sqoop.framework.JobRequest;
-import org.apache.sqoop.framework.SubmissionEngine;
+import org.apache.sqoop.driver.JobRequest;
import org.apache.sqoop.job.JobConstants;
import org.apache.sqoop.job.mr.ConfigurationUtils;
+import org.apache.sqoop.submission.SubmissionStatus;
import org.apache.sqoop.submission.counter.Counter;
import org.apache.sqoop.submission.counter.CounterGroup;
import org.apache.sqoop.submission.counter.Counters;
-import org.apache.sqoop.submission.SubmissionStatus;
-
-import java.io.File;
-import java.io.FilenameFilter;
-import java.io.IOException;
-import java.net.MalformedURLException;
-import java.util.Map;
/**
@@ -156,10 +156,10 @@ public class MapreduceSubmissionEngine extends SubmissionEngine {
// Clone global configuration
Configuration configuration = new Configuration(globalConfiguration);
- // Serialize framework context into job configuration
- for(Map.Entry<String, String> entry: request.getFrameworkContext()) {
+ // Serialize driver context into job configuration
+ for(Map.Entry<String, String> entry: request.getDriverContext()) {
if (entry.getValue() == null) {
- LOG.warn("Ignoring null framework context value for key " + entry.getKey());
+ LOG.warn("Ignoring null driver context value for key " + entry.getKey());
continue;
}
configuration.set(entry.getKey(), entry.getValue());
@@ -202,13 +202,13 @@ public class MapreduceSubmissionEngine extends SubmissionEngine {
Job job = new Job(configuration);
// And finally put all configuration objects to credentials cache
- ConfigurationUtils.setConnectorConnectionConfig(Direction.FROM, job, request.getConnectorConnectionConfig(Direction.FROM));
+ ConfigurationUtils.setConnectorConnectionConfig(Direction.FROM, job, request.getConnectorLinkConfig(Direction.FROM));
ConfigurationUtils.setConnectorJobConfig(Direction.FROM, job, request.getConnectorJobConfig(Direction.FROM));
- ConfigurationUtils.setConnectorConnectionConfig(Direction.TO, job, request.getConnectorConnectionConfig(Direction.TO));
+ ConfigurationUtils.setConnectorConnectionConfig(Direction.TO, job, request.getConnectorLinkConfig(Direction.TO));
ConfigurationUtils.setConnectorJobConfig(Direction.TO, job, request.getConnectorJobConfig(Direction.TO));
- ConfigurationUtils.setFrameworkConnectionConfig(Direction.FROM, job, request.getFrameworkConnectionConfig(Direction.FROM));
- ConfigurationUtils.setFrameworkConnectionConfig(Direction.TO, job, request.getFrameworkConnectionConfig(Direction.TO));
- ConfigurationUtils.setFrameworkJobConfig(job, request.getConfigFrameworkJob());
+ ConfigurationUtils.setFrameworkConnectionConfig(Direction.FROM, job, request.getFrameworkLinkConfig(Direction.FROM));
+ ConfigurationUtils.setFrameworkConnectionConfig(Direction.TO, job, request.getFrameworkLinkConfig(Direction.TO));
+ ConfigurationUtils.setFrameworkJobConfig(job, request.getFrameworkJobConfig());
// @TODO(Abe): Persist TO schema.
ConfigurationUtils.setConnectorSchema(Direction.FROM, job, request.getSummary().getFromSchema());
@@ -413,4 +413,4 @@ public class MapreduceSubmissionEngine extends SubmissionEngine {
return "local".equals(globalConfiguration.get("mapreduce.jobtracker.address"))
|| "local".equals(globalConfiguration.get("mapred.job.tracker"));
}
-}
+}
\ No newline at end of file
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/main/java/org/apache/sqoop/test/db/DatabaseProvider.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/db/DatabaseProvider.java b/test/src/main/java/org/apache/sqoop/test/db/DatabaseProvider.java
index e0cc7c9..3ebdd5e 100644
--- a/test/src/main/java/org/apache/sqoop/test/db/DatabaseProvider.java
+++ b/test/src/main/java/org/apache/sqoop/test/db/DatabaseProvider.java
@@ -43,7 +43,7 @@ abstract public class DatabaseProvider {
/**
* Internal connection to the database.
*/
- private Connection connection;
+ private Connection databaseConnection;
/**
* JDBC Url to the remote database system.
@@ -140,9 +140,9 @@ abstract public class DatabaseProvider {
*/
public void stop() {
// Close connection to the database server
- if(connection != null) {
+ if(databaseConnection != null) {
try {
- connection.close();
+ databaseConnection.close();
} catch (SQLException e) {
LOG.info("Ignored exception on closing connection", e);
}
@@ -155,7 +155,7 @@ abstract public class DatabaseProvider {
* @return
*/
public Connection getConnection() {
- return connection;
+ return databaseConnection;
}
/**
@@ -164,7 +164,7 @@ abstract public class DatabaseProvider {
* @param connection New connection object
*/
protected void setConnection(Connection connection) {
- this.connection = connection;
+ databaseConnection = connection;
}
/**
@@ -179,7 +179,7 @@ abstract public class DatabaseProvider {
Statement stmt = null;
try {
- stmt = connection.createStatement();
+ stmt = databaseConnection.createStatement();
stmt.executeUpdate(query);
} catch (SQLException e) {
LOG.error("Error in executing query", e);
@@ -208,7 +208,7 @@ abstract public class DatabaseProvider {
Statement stmt = null;
try {
- stmt = connection.createStatement();
+ stmt = databaseConnection.createStatement();
return stmt.executeQuery(query);
} catch (SQLException e) {
LOG.error("Error in executing query", e);
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/main/java/org/apache/sqoop/test/db/DatabaseProviderFactory.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/db/DatabaseProviderFactory.java b/test/src/main/java/org/apache/sqoop/test/db/DatabaseProviderFactory.java
index bc430f1..330863a 100644
--- a/test/src/main/java/org/apache/sqoop/test/db/DatabaseProviderFactory.java
+++ b/test/src/main/java/org/apache/sqoop/test/db/DatabaseProviderFactory.java
@@ -32,7 +32,7 @@ public class DatabaseProviderFactory {
return new DerbyProvider();
}
- Class klass = Class.forName(className);
+ Class<?> klass = Class.forName(className);
return (DatabaseProvider)klass.newInstance();
}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java b/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
index 8a699f5..6074d36 100644
--- a/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
+++ b/test/src/main/java/org/apache/sqoop/test/testcases/ConnectorTestCase.java
@@ -22,8 +22,8 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.log4j.Logger;
import org.apache.sqoop.client.SubmissionCallback;
import org.apache.sqoop.common.Direction;
-import org.apache.sqoop.connector.hdfs.configuration.OutputFormat;
-import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.connector.hdfs.configuration.ToFormat;
+import org.apache.sqoop.model.MLink;
import org.apache.sqoop.model.MFormList;
import org.apache.sqoop.model.MJob;
import org.apache.sqoop.model.MPersistableEntity;
@@ -121,39 +121,38 @@ abstract public class ConnectorTestCase extends TomcatTestCase {
}
/**
- * Fill connection form based on currently active provider.
+ * Fill link form based on currently active provider.
*
- * @param connection MConnection object to fill
+ * @param link MLink object to fill
*/
- protected void fillRdbmsConnectionForm(MConnection connection) {
- MFormList forms = connection.getConnectorPart();
- forms.getStringInput("connection.jdbcDriver").setValue(provider.getJdbcDriver());
- forms.getStringInput("connection.connectionString").setValue(provider.getConnectionUrl());
- forms.getStringInput("connection.username").setValue(provider.getConnectionUsername());
- forms.getStringInput("connection.password").setValue(provider.getConnectionPassword());
+ protected void fillRdbmsLinkForm(MLink link) {
+ MFormList forms = link.getConnectorPart();
+ forms.getStringInput("link.jdbcDriver").setValue(provider.getJdbcDriver());
+ forms.getStringInput("link.connectionString").setValue(provider.getConnectionUrl());
+ forms.getStringInput("link.username").setValue(provider.getConnectionUsername());
+ forms.getStringInput("link.password").setValue(provider.getConnectionPassword());
}
/**
- * Fill output form with specific storage and output type. Mapreduce output directory
- * will be set to default test value.
+ * Fill TO form with specific storage and output type.
*
- * @param job MJOb object to fill
+ * @param job MJob object to fill
* @param output Output type that should be set
*/
- protected void fillOutputForm(MJob job, OutputFormat output) {
- MFormList forms = job.getConnectorPart(Direction.TO);
- forms.getEnumInput("output.outputFormat").setValue(output);
- forms.getStringInput("output.outputDirectory").setValue(getMapreduceDirectory());
+ protected void fillToJobForm(MJob job, ToFormat output) {
+ MFormList toForms = job.getConnectorPart(Direction.TO);
+ toForms.getEnumInput("toJobConfig.outputFormat").setValue(output);
+ toForms.getStringInput("toJobConfig.outputDirectory").setValue(getMapreduceDirectory());
}
/**
- * Fill input form. Mapreduce input directory will be set to default test value.
+ * Fill FROM form
*
- * @param job MJOb object to fill
+ * @param job MJob object to fill
*/
- protected void fillInputForm(MJob job) {
+ protected void fillFromJobForm(MJob job) {
MFormList forms = job.getConnectorPart(Direction.FROM);
- forms.getStringInput("input.inputDirectory").setValue(getMapreduceDirectory());
+ forms.getStringInput("fromJobConfig.inputDirectory").setValue(getMapreduceDirectory());
}
/**
@@ -204,15 +203,15 @@ abstract public class ConnectorTestCase extends TomcatTestCase {
}
/**
- * Create connection.
+ * Create link.
*
* With asserts to make sure that it was created correctly.
*
- * @param connection
+ * @param link
*/
- protected void createConnection(MConnection connection) {
- assertEquals(Status.FINE, getClient().createConnection(connection));
- assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT, connection.getPersistenceId());
+ protected void saveLink(MLink link) {
+ assertEquals(Status.FINE, getClient().saveLink(link));
+ assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT, link.getPersistenceId());
}
/**
@@ -222,8 +221,8 @@ abstract public class ConnectorTestCase extends TomcatTestCase {
*
* @param job
*/
- protected void createJob(MJob job) {
- assertEquals(Status.FINE, getClient().createJob(job));
+ protected void saveJob(MJob job) {
+ assertEquals(Status.FINE, getClient().saveJob(job));
assertNotSame(MPersistableEntity.PERSISTANCE_ID_DEFAULT, job.getPersistenceId());
}
@@ -233,7 +232,7 @@ abstract public class ConnectorTestCase extends TomcatTestCase {
* @param jid Job id
* @throws Exception
*/
- protected void runJob(long jid) throws Exception {
+ protected void executeJob(long jid) throws Exception {
getClient().startSubmission(jid, DEFAULT_SUBMISSION_CALLBACKS, 100);
}
@@ -243,7 +242,7 @@ abstract public class ConnectorTestCase extends TomcatTestCase {
* @param job Job object
* @throws Exception
*/
- protected void runJob(MJob job) throws Exception {
- runJob(job.getPersistenceId());
+ protected void executeJob(MJob job) throws Exception {
+ executeJob(job.getPersistenceId());
}
}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
----------------------------------------------------------------------
diff --git a/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java b/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
index 7e2558f..63736ab 100644
--- a/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
+++ b/test/src/main/java/org/apache/sqoop/test/testcases/TomcatTestCase.java
@@ -156,7 +156,7 @@ abstract public class TomcatTestCase {
}
/**
- * Assert that mapreduce has generated following lines.
+ * Assert that execution has generated following lines.
*
* As the lines can be spread between multiple files the ordering do not make
* a difference.
@@ -164,27 +164,29 @@ abstract public class TomcatTestCase {
* @param lines
* @throws IOException
*/
- protected void assertMapreduceOutput(String... lines) throws IOException {
+ protected void assertTo(String... lines) throws IOException {
+ // TODO(VB): fix this to be not directly dependent on mapreduce
HdfsAsserts.assertMapreduceOutput(hdfsClient, getMapreduceDirectory(), lines);
}
/**
- * Verify number of output mapreduce files.
+ * Verify number of TO files.
*
* @param expectedFiles Expected number of files
*/
- protected void assertMapreduceOutputFiles(int expectedFiles) throws IOException {
+ protected void assertToFiles(int expectedFiles) throws IOException {
+ // TODO(VB): fix this to be not directly dependent on mapreduce
HdfsAsserts.assertMapreduceOutputFiles(hdfsClient, getMapreduceDirectory(), expectedFiles);
}
/**
- * Create mapreduce input file with specified content.
+ * Create FROM file with specified content.
*
* @param filename Input file name
* @param lines Individual lines that should be written into the file
* @throws IOException
*/
- protected void createInputMapreduceFile(String filename, String...lines) throws IOException {
+ protected void createFromFile(String filename, String...lines) throws IOException {
HdfsUtils.createFile(hdfsClient, HdfsUtils.joinPathFragments(getMapreduceDirectory(), filename), lines);
}
}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
index c01aa80..5e1abc1 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromHDFSToRDBMSTest.java
@@ -17,10 +17,9 @@
*/
package org.apache.sqoop.integration.connector.jdbc.generic;
-import org.apache.log4j.Logger;
import org.apache.sqoop.common.Direction;
import org.apache.sqoop.test.testcases.ConnectorTestCase;
-import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.model.MLink;
import org.apache.sqoop.model.MFormList;
import org.apache.sqoop.model.MJob;
import org.junit.Test;
@@ -32,38 +31,35 @@ import static org.junit.Assert.assertEquals;
*/
public class FromHDFSToRDBMSTest extends ConnectorTestCase {
- private static final Logger LOG = Logger.getLogger(FromHDFSToRDBMSTest.class);
-
@Test
public void testBasic() throws Exception {
createTableCities();
- createInputMapreduceFile("input-0001",
+ createFromFile("input-0001",
"1,'USA','San Francisco'",
"2,'USA','Sunnyvale'",
"3,'Czech Republic','Brno'",
"4,'USA','Palo Alto'"
);
- // RDBMS connection
- MConnection rdbmsConnection = getClient().newConnection("generic-jdbc-connector");
- fillRdbmsConnectionForm(rdbmsConnection);
- createConnection(rdbmsConnection);
+ // RDBMS link
+ MLink rdbmsLink = getClient().createLink("generic-jdbc-connector");
+ fillRdbmsLinkForm(rdbmsLink);
+ saveLink(rdbmsLink);
- // HDFS connection
- MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
- createConnection(hdfsConnection);
+ // HDFS link
+ MLink hdfsLink = getClient().createLink("hdfs-connector");
+ saveLink(hdfsLink);
// Job creation
- MJob job = getClient().newJob(hdfsConnection.getPersistenceId(), rdbmsConnection.getPersistenceId());
+ MJob job = getClient().createJob(hdfsLink.getPersistenceId(), rdbmsLink.getPersistenceId());
// Connector values
- MFormList fromForms = job.getConnectorPart(Direction.FROM);
MFormList toForms = job.getConnectorPart(Direction.TO);
- toForms.getStringInput("toTable.tableName").setValue(provider.escapeTableName(getTableName()));
- fillInputForm(job);
- createJob(job);
+ toForms.getStringInput("toJobConfig.tableName").setValue(provider.escapeTableName(getTableName()));
+ fillFromJobForm(job);
+ saveJob(job);
- runJob(job);
+ executeJob(job);
assertEquals(4L, rowCount());
assertRowInCities(1, "USA", "San Francisco");
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
index f976f29..2dc0613 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/FromRDBMSToHDFSTest.java
@@ -17,10 +17,9 @@
*/
package org.apache.sqoop.integration.connector.jdbc.generic;
-import org.apache.log4j.Logger;
import org.apache.sqoop.common.Direction;
-import org.apache.sqoop.connector.hdfs.configuration.OutputFormat;
-import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.connector.hdfs.configuration.ToFormat;
+import org.apache.sqoop.model.MLink;
import org.apache.sqoop.model.MFormList;
import org.apache.sqoop.model.MJob;
import org.apache.sqoop.model.MSubmission;
@@ -34,35 +33,33 @@ import static org.junit.Assert.assertTrue;
*/
public class FromRDBMSToHDFSTest extends ConnectorTestCase {
- private static final Logger LOG = Logger.getLogger(FromRDBMSToHDFSTest.class);
-
@Test
public void testBasic() throws Exception {
createAndLoadTableCities();
- // RDBMS connection
- MConnection rdbmsConnection = getClient().newConnection("generic-jdbc-connector");
- fillRdbmsConnectionForm(rdbmsConnection);
- createConnection(rdbmsConnection);
+ // RDBMS link
+ MLink rdbmsConnection = getClient().createLink("generic-jdbc-connector");
+ fillRdbmsLinkForm(rdbmsConnection);
+ saveLink(rdbmsConnection);
- // HDFS connection
- MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
- createConnection(hdfsConnection);
+ // HDFS link
+ MLink hdfsConnection = getClient().createLink("hdfs-connector");
+ saveLink(hdfsConnection);
// Job creation
- MJob job = getClient().newJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
+ MJob job = getClient().createJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
// Connector values
MFormList forms = job.getConnectorPart(Direction.FROM);
- forms.getStringInput("fromTable.tableName").setValue(provider.escapeTableName(getTableName()));
- forms.getStringInput("fromTable.partitionColumn").setValue(provider.escapeColumnName("id"));
- fillOutputForm(job, OutputFormat.TEXT_FILE);
- createJob(job);
+ forms.getStringInput("fromJobConfig.tableName").setValue(provider.escapeTableName(getTableName()));
+ forms.getStringInput("fromJobConfig.partitionColumn").setValue(provider.escapeColumnName("id"));
+ fillToJobForm(job, ToFormat.TEXT_FILE);
+ saveJob(job);
- runJob(job);
+ executeJob(job);
// Assert correct output
- assertMapreduceOutput(
+ assertTo(
"1,'USA','San Francisco'",
"2,'USA','Sunnyvale'",
"3,'Czech Republic','Brno'",
@@ -77,25 +74,25 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
public void testColumns() throws Exception {
createAndLoadTableCities();
- // RDBMS connection
- MConnection rdbmsConnection = getClient().newConnection("generic-jdbc-connector");
- fillRdbmsConnectionForm(rdbmsConnection);
- createConnection(rdbmsConnection);
+ // RDBMS link
+ MLink rdbmsLink = getClient().createLink("generic-jdbc-connector");
+ fillRdbmsLinkForm(rdbmsLink);
+ saveLink(rdbmsLink);
- // HDFS connection
- MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
- createConnection(hdfsConnection);
+ // HDFS link
+ MLink hdfsLink = getClient().createLink("hdfs-connector");
+ saveLink(hdfsLink);
// Job creation
- MJob job = getClient().newJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
+ MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
// Connector values
MFormList forms = job.getConnectorPart(Direction.FROM);
- forms.getStringInput("fromTable.tableName").setValue(provider.escapeTableName(getTableName()));
- forms.getStringInput("fromTable.partitionColumn").setValue(provider.escapeColumnName("id"));
- forms.getStringInput("fromTable.columns").setValue(provider.escapeColumnName("id") + "," + provider.escapeColumnName("country"));
- fillOutputForm(job, OutputFormat.TEXT_FILE);
- createJob(job);
+ forms.getStringInput("fromJobConfig.tableName").setValue(provider.escapeTableName(getTableName()));
+ forms.getStringInput("fromJobConfig.partitionColumn").setValue(provider.escapeColumnName("id"));
+ forms.getStringInput("fromJobConfig.columns").setValue(provider.escapeColumnName("id") + "," + provider.escapeColumnName("country"));
+ fillToJobForm(job, ToFormat.TEXT_FILE);
+ saveJob(job);
MSubmission submission = getClient().startSubmission(job.getPersistenceId());
assertTrue(submission.getStatus().isRunning());
@@ -108,7 +105,7 @@ public class FromRDBMSToHDFSTest extends ConnectorTestCase {
} while(submission.getStatus().isRunning());
// Assert correct output
- assertMapreduceOutput(
+ assertTo(
"1,'USA'",
"2,'USA'",
"3,'Czech Republic'",
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
index fac7e8b..729f95e 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/PartitionerTest.java
@@ -17,10 +17,9 @@
*/
package org.apache.sqoop.integration.connector.jdbc.generic;
-import org.apache.log4j.Logger;
import org.apache.sqoop.common.Direction;
-import org.apache.sqoop.connector.hdfs.configuration.OutputFormat;
-import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.connector.hdfs.configuration.ToFormat;
+import org.apache.sqoop.model.MLink;
import org.apache.sqoop.model.MFormList;
import org.apache.sqoop.model.MJob;
import org.apache.sqoop.test.testcases.ConnectorTestCase;
@@ -35,8 +34,6 @@ import org.junit.Test;
@RunWith(Parameterized.class)
public class PartitionerTest extends ConnectorTestCase {
- private static final Logger LOG = Logger.getLogger(PartitionerTest.class);
-
/**
* Columns that we will use as partition column with maximal number of
* partitions that can be created for such column.
@@ -75,32 +72,32 @@ public class PartitionerTest extends ConnectorTestCase {
public void testSplitter() throws Exception {
createAndLoadTableUbuntuReleases();
- // RDBMS connection
- MConnection rdbmsConnection = getClient().newConnection("generic-jdbc-connector");
- fillRdbmsConnectionForm(rdbmsConnection);
- createConnection(rdbmsConnection);
+ // RDBMS link
+ MLink rdbmsLink = getClient().createLink("generic-jdbc-connector");
+ fillRdbmsLinkForm(rdbmsLink);
+ saveLink(rdbmsLink);
- // HDFS connection
- MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
- createConnection(hdfsConnection);
+ // HDFS link
+ MLink hdfsLink = getClient().createLink("hdfs-connector");
+ saveLink(hdfsLink);
// Job creation
- MJob job = getClient().newJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
+ MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
// Connector values
MFormList forms = job.getConnectorPart(Direction.FROM);
- forms.getStringInput("fromTable.tableName").setValue(provider.escapeTableName(getTableName()));
- forms.getStringInput("fromTable.partitionColumn").setValue(provider.escapeColumnName(partitionColumn));
- fillOutputForm(job, OutputFormat.TEXT_FILE);
+ forms.getStringInput("fromJobConfig.tableName").setValue(provider.escapeTableName(getTableName()));
+ forms.getStringInput("fromJobConfig.partitionColumn").setValue(provider.escapeColumnName(partitionColumn));
+ fillToJobForm(job, ToFormat.TEXT_FILE);
forms = job.getFrameworkPart();
forms.getIntegerInput("throttling.extractors").setValue(extractors);
- createJob(job);
+ saveJob(job);
- runJob(job);
+ executeJob(job);
// Assert correct output
- assertMapreduceOutputFiles((extractors > maxOutputFiles) ? maxOutputFiles : extractors);
- assertMapreduceOutput(
+ assertToFiles((extractors > maxOutputFiles) ? maxOutputFiles : extractors);
+ assertTo(
"1,'Warty Warthog',4.10,2004-10-20,false",
"2,'Hoary Hedgehog',5.04,2005-04-08,false",
"3,'Breezy Badger',5.10,2005-10-13,false",
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
index 1af0cdc..562a6a6 100644
--- a/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/connector/jdbc/generic/TableStagedRDBMSTest.java
@@ -18,7 +18,7 @@
package org.apache.sqoop.integration.connector.jdbc.generic;
import org.apache.sqoop.common.Direction;
-import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.model.MLink;
import org.apache.sqoop.model.MFormList;
import org.apache.sqoop.model.MJob;
import org.apache.sqoop.test.data.Cities;
@@ -36,7 +36,7 @@ public class TableStagedRDBMSTest extends ConnectorTestCase {
public void testStagedTransfer() throws Exception {
final String stageTableName = "STAGE_" + getTableName();
createTableCities();
- createInputMapreduceFile("input-0001",
+ createFromFile("input-0001",
"1,'USA','San Francisco'",
"2,'USA','Sunnyvale'",
"3,'Czech Republic','Brno'",
@@ -44,29 +44,29 @@ public class TableStagedRDBMSTest extends ConnectorTestCase {
);
new Cities(provider, stageTableName).createTables();
- // RDBMS connection
- MConnection rdbmsConnection = getClient().newConnection("generic-jdbc-connector");
- fillRdbmsConnectionForm(rdbmsConnection);
- createConnection(rdbmsConnection);
+ // RDBMS link
+ MLink rdbmsLink = getClient().createLink("generic-jdbc-connector");
+ fillRdbmsLinkForm(rdbmsLink);
+ saveLink(rdbmsLink);
- // HDFS connection
- MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
- createConnection(hdfsConnection);
+ // HDFS link
+ MLink hdfsLink = getClient().createLink("hdfs-connector");
+ saveLink(hdfsLink);
// Job creation
- MJob job = getClient().newJob(hdfsConnection.getPersistenceId(),
- rdbmsConnection.getPersistenceId());
+ MJob job = getClient().createJob(hdfsLink.getPersistenceId(),
+ rdbmsLink.getPersistenceId());
// Connector values
MFormList forms = job.getConnectorPart(Direction.TO);
- forms.getStringInput("toTable.tableName").setValue(
+ forms.getStringInput("toJobConfig.tableName").setValue(
provider.escapeTableName(getTableName()));
- forms.getStringInput("toTable.stageTableName").setValue(
+ forms.getStringInput("toJobConfig.stageTableName").setValue(
provider.escapeTableName(stageTableName));
- fillInputForm(job);
- createJob(job);
+ fillFromJobForm(job);
+ saveJob(job);
- runJob(job);
+ executeJob(job);
assertEquals(0L, provider.rowCount(stageTableName));
assertEquals(4L, rowCount());
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java b/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
index 3c01cb0..507ac53 100644
--- a/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/server/SubmissionWithDisabledModelObjectsTest.java
@@ -20,9 +20,9 @@ package org.apache.sqoop.integration.server;
import org.apache.sqoop.client.ClientError;
import org.apache.sqoop.common.Direction;
import org.apache.sqoop.common.SqoopException;
-import org.apache.sqoop.connector.hdfs.configuration.OutputFormat;
-import org.apache.sqoop.framework.FrameworkError;
-import org.apache.sqoop.model.MConnection;
+import org.apache.sqoop.connector.hdfs.configuration.ToFormat;
+import org.apache.sqoop.driver.DriverError;
+import org.apache.sqoop.model.MLink;
import org.apache.sqoop.model.MFormList;
import org.apache.sqoop.model.MJob;
import org.apache.sqoop.test.testcases.ConnectorTestCase;
@@ -39,7 +39,7 @@ import static org.junit.Assert.fail;
/**
* Ensure that server will reject starting job when either job itself
- * or corresponding connection is disabled.
+ * or corresponding link is disabled.
*/
@RunWith(Parameterized.class)
public class SubmissionWithDisabledModelObjectsTest extends ConnectorTestCase {
@@ -53,11 +53,11 @@ public class SubmissionWithDisabledModelObjectsTest extends ConnectorTestCase {
});
}
- private boolean enabledConnection;
+ private boolean enabledLink;
private boolean enabledJob;
- public SubmissionWithDisabledModelObjectsTest(boolean enabledConnection, boolean enabledJob) {
- this.enabledConnection = enabledConnection;
+ public SubmissionWithDisabledModelObjectsTest(boolean enabledLink, boolean enabledJob) {
+ this.enabledLink = enabledLink;
this.enabledJob = enabledJob;
}
@@ -65,33 +65,33 @@ public class SubmissionWithDisabledModelObjectsTest extends ConnectorTestCase {
public void testWithDisabledObjects() throws Exception {
createAndLoadTableCities();
- // RDBMS connection
- MConnection rdbmsConnection = getClient().newConnection("generic-jdbc-connector");
- fillRdbmsConnectionForm(rdbmsConnection);
- createConnection(rdbmsConnection);
+ // RDBMS link
+ MLink rdbmsLink = getClient().createLink("generic-jdbc-connector");
+ fillRdbmsLinkForm(rdbmsLink);
+ saveLink(rdbmsLink);
- // HDFS connection
- MConnection hdfsConnection = getClient().newConnection("hdfs-connector");
- createConnection(hdfsConnection);
+ // HDFS link
+ MLink hdfsLink = getClient().createLink("hdfs-connector");
+ saveLink(hdfsLink);
// Job creation
- MJob job = getClient().newJob(rdbmsConnection.getPersistenceId(), hdfsConnection.getPersistenceId());
+ MJob job = getClient().createJob(rdbmsLink.getPersistenceId(), hdfsLink.getPersistenceId());
// Connector values
MFormList forms = job.getConnectorPart(Direction.FROM);
- forms.getStringInput("fromTable.tableName").setValue(provider.escapeTableName(getTableName()));
- forms.getStringInput("fromTable.partitionColumn").setValue(provider.escapeColumnName("id"));
+ forms.getStringInput("fromJobConfig.tableName").setValue(provider.escapeTableName(getTableName()));
+ forms.getStringInput("fromJobConfig.partitionColumn").setValue(provider.escapeColumnName("id"));
// Framework values
- fillOutputForm(job, OutputFormat.TEXT_FILE);
- createJob(job);
+ fillToJobForm(job, ToFormat.TEXT_FILE);
+ saveJob(job);
// Disable model entities as per parametrized run
- getClient().enableConnection(rdbmsConnection.getPersistenceId(), enabledConnection);
+ getClient().enableLink(rdbmsLink.getPersistenceId(), enabledLink);
getClient().enableJob(job.getPersistenceId(), enabledJob);
- // Try to run the job and verify that the it was not executed
+ // Try to execute the job and verify that the it was not executed
try {
- runJob(job);
+ executeJob(job);
fail("Expected exception as the model classes are disabled.");
} catch(SqoopException ex) {
// Top level exception should be CLIENT_0001
@@ -104,9 +104,9 @@ public class SubmissionWithDisabledModelObjectsTest extends ConnectorTestCase {
assertNotNull(cause);
if(!enabledJob) {
- assertTrue(cause.getMessage().startsWith(FrameworkError.FRAMEWORK_0009.toString()));
- } else if(!enabledConnection) {
- assertTrue(cause.getMessage().startsWith(FrameworkError.FRAMEWORK_0010.toString()));
+ assertTrue(cause.getMessage().startsWith(DriverError.DRIVER_0009.toString()));
+ } else if(!enabledLink) {
+ assertTrue(cause.getMessage().startsWith(DriverError.DRIVER_0010.toString()));
} else {
fail("Unexpected expception retrieved from server " + cause);
}
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java
----------------------------------------------------------------------
diff --git a/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java b/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java
index cea24b9..beed82e 100644
--- a/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java
+++ b/test/src/test/java/org/apache/sqoop/integration/server/VersionTest.java
@@ -17,13 +17,13 @@
*/
package org.apache.sqoop.integration.server;
-import org.apache.sqoop.client.request.VersionRequest;
+import org.apache.sqoop.client.request.VersionResourceRequest;
import org.apache.sqoop.common.VersionInfo;
import org.apache.sqoop.test.testcases.TomcatTestCase;
import org.apache.sqoop.json.VersionBean;
import org.junit.Test;
-import static junit.framework.Assert.assertEquals;
+import static org.junit.Assert.assertEquals;
/**
* Basic test to check that server is working and returning correct version info.
@@ -32,8 +32,8 @@ public class VersionTest extends TomcatTestCase {
@Test
public void testVersion() {
- VersionRequest versionRequest = new VersionRequest();
- VersionBean versionBean = versionRequest.doGet(getServerUrl());
+ VersionResourceRequest versionRequest = new VersionResourceRequest();
+ VersionBean versionBean = versionRequest.read(getServerUrl());
assertEquals(versionBean.getVersion(), VersionInfo.getVersion());
assertEquals(versionBean.getDate(), VersionInfo.getDate());
http://git-wip-us.apache.org/repos/asf/sqoop/blob/049994a0/tools/src/main/java/org/apache/sqoop/tools/tool/UpgradeTool.java
----------------------------------------------------------------------
diff --git a/tools/src/main/java/org/apache/sqoop/tools/tool/UpgradeTool.java b/tools/src/main/java/org/apache/sqoop/tools/tool/UpgradeTool.java
index b8a15cb..f117411 100644
--- a/tools/src/main/java/org/apache/sqoop/tools/tool/UpgradeTool.java
+++ b/tools/src/main/java/org/apache/sqoop/tools/tool/UpgradeTool.java
@@ -18,16 +18,15 @@
package org.apache.sqoop.tools.tool;
import org.apache.sqoop.connector.ConnectorManager;
-import org.apache.sqoop.framework.FrameworkManager;
+import org.apache.sqoop.driver.Driver;
import org.apache.sqoop.repository.RepositoryManager;
import org.apache.sqoop.tools.ConfiguredTool;
-
import org.apache.log4j.Logger;
/**
* Upgrade all versionable components inside Sqoop2. This includes any
- * structural changes inside repository and the Connector and Framework
- * metadata. This tool is idempotent.
+ * structural changes inside repository and the Connector and Driver entity data
+ * This tool is idempotent.
*/
public class UpgradeTool extends ConfiguredTool {
@@ -39,24 +38,24 @@ public class UpgradeTool extends ConfiguredTool {
LOG.info("Initializing the RepositoryManager with immutable option turned off.");
RepositoryManager.getInstance().initialize(false);
- LOG.info("Initializing the FrameworkManager with upgrade option turned on.");
- FrameworkManager.getInstance().initialize(true);
+ LOG.info("Initializing the Driver with upgrade option turned on.");
+ Driver.getInstance().initialize(true);
- LOG.info("Initializing the FrameworkManager with upgrade option turned on.");
+ LOG.info("Initializing the Connection Manager with upgrade option turned on.");
ConnectorManager.getInstance().initialize(true);
LOG.info("Upgrade completed successfully.");
LOG.info("Tearing all managers down.");
ConnectorManager.getInstance().destroy();
- FrameworkManager.getInstance().destroy();
+ Driver.getInstance().destroy();
RepositoryManager.getInstance().destroy();
return true;
} catch (Exception ex) {
- LOG.error("Can't finish upgrading all components:", ex);
+ LOG.error("Can't finish upgrading RepositoryManager, Driver and ConnectionManager:", ex);
System.out.println("Upgrade has failed, please check Server logs for further details.");
return false;
}
}
-}
+}
\ No newline at end of file