You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by to...@apache.org on 2009/12/07 22:42:40 UTC

svn commit: r888144 [1/3] - in /hadoop/mapreduce/trunk: ./ src/contrib/sqoop/doc/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/ src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ s...

Author: tomwhite
Date: Mon Dec  7 21:42:38 2009
New Revision: 888144

URL: http://svn.apache.org/viewvc?rev=888144&view=rev
Log:
MAPREDUCE-1168. Export data to databases via Sqoop. Contributed by Aaron Kimball.

Added:
    hadoop/mapreduce/trunk/src/contrib/sqoop/doc/export.txt
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ExportJobContext.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/ExportJob.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/SequenceFileExportMapper.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/TextExportMapper.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ExportException.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportException.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestExport.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSqoopOptions.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/BaseSqoopTestCase.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ExportJobTestCase.java
Removed:
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ImportOptions.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/ImportError.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestImportOptions.java
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/sqoop/doc/Sqoop-manpage.txt
    hadoop/mapreduce/trunk/src/contrib/sqoop/doc/SqoopUserGuide.txt
    hadoop/mapreduce/trunk/src/contrib/sqoop/doc/api-reference.txt
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ConnFactory.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/HiveImport.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DefaultManagerFactory.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/GenericJdbcManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/HsqldbManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ImportJobContext.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/LocalMySQLManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ManagerFactory.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/SqlManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapred/ImportJob.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/mapreduce/DataDrivenImportJob.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/ClassWriter.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/CompilationManager.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/orm/TableClassName.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/util/DirectImportUtils.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/SmokeTests.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestConnFactory.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestMultiMaps.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestSplitBy.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/TestWhere.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/hive/TestHiveImport.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/LocalMySQLTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/MySQLAuthTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/OracleManagerTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/manager/PostgresqlTest.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestClassWriter.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/orm/TestParseMethods.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/HsqldbTestServer.java
    hadoop/mapreduce/trunk/src/contrib/sqoop/src/test/org/apache/hadoop/sqoop/testutil/ImportJobTestCase.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Mon Dec  7 21:42:38 2009
@@ -13,6 +13,9 @@
     transfers using a shared JobTracker generated key.
     (Boris Shkolnik via ddas)
 
+    MAPREDUCE-1168. Export data to databases via Sqoop. (Aaron Kimball via
+    tomwhite)
+
   IMPROVEMENTS
 
     MAPREDUCE-1198. Alternatively schedule different types of tasks in

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/doc/Sqoop-manpage.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/doc/Sqoop-manpage.txt?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/doc/Sqoop-manpage.txt (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/doc/Sqoop-manpage.txt Mon Dec  7 21:42:38 2009
@@ -113,6 +113,12 @@
   When using direct mode, write to multiple files of
   approximately _size_ bytes each.
 
+Export control options
+~~~~~~~~~~~~~~~~~~~~~~
+
+--export-dir (dir)::
+  Export from an HDFS path into a table (set with
+  --table)
 
 Output line formatting options
 ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
@@ -145,6 +151,14 @@
 --package-name (package)::
   Puts auto-generated classes in the named Java package
 
+Library loading options
+~~~~~~~~~~~~~~~~~~~~~~~
+--jar-file (file)::
+  Disable code generation; use specified jar
+
+--class-name (name)::
+  The class within the jar that represents the table to import/export
+
 Additional commands
 ~~~~~~~~~~~~~~~~~~~
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/doc/SqoopUserGuide.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/doc/SqoopUserGuide.txt?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/doc/SqoopUserGuide.txt (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/doc/SqoopUserGuide.txt Mon Dec  7 21:42:38 2009
@@ -59,6 +59,8 @@
 
 include::hive.txt[]
 
+include::export.txt[]
+
 include::supported-dbs.txt[]
 
 include::api-reference.txt[]

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/doc/api-reference.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/doc/api-reference.txt?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/doc/api-reference.txt (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/doc/api-reference.txt Mon Dec  7 21:42:38 2009
@@ -81,14 +81,14 @@
 rewrite the majority of +getColNames()+.
 
 +ConnManager+ implementations receive a lot of their configuration data from a
-Sqoop-specific class, +ImportOptions+. While +ImportOptions+ does not currently
-contain many setter methods, clients should not assume +ImportOptions+ are
-immutable. More setter methods may be added in the future.  +ImportOptions+ does
+Sqoop-specific class, +SqoopOptions+. While +SqoopOptions+ does not currently
+contain many setter methods, clients should not assume +SqoopOptions+ are
+immutable. More setter methods may be added in the future.  +SqoopOptions+ does
 not directly store specific per-manager options. Instead, it contains a
 reference to the +Configuration+ returned by +Tool.getConf()+ after parsing
 command-line arguments with the +GenericOptionsParser+. This allows extension
 arguments via "+-D any.specific.param=any.value+" without requiring any layering
-of options parsing or modification of +ImportOptions+.
+of options parsing or modification of +SqoopOptions+.
 
 All existing +ConnManager+ implementations are stateless. Thus, the system which
 instantiates +ConnManagers+ may implement multiple instances of the same
@@ -102,7 +102,7 @@
 +ManagerFactory+ implementation should be provided with the new ConnManager.
 +ManagerFactory+ has a single method of note, named +accept()+. This method will
 determine whether it can instantiate a +ConnManager+ for the user's
-+ImportOptions+. If so, it returns the +ConnManager+ instance. Otherwise, it
++SqoopOptions+. If so, it returns the +ConnManager+ instance. Otherwise, it
 returns +null+.
 
 The +ManagerFactory+ implementations used are governed by the
@@ -110,7 +110,7 @@
 libraries can install the 3rd-party library containing a new +ManagerFactory+
 and +ConnManager+(s), and configure sqoop-site.xml to use the new
 +ManagerFactory+.  The +DefaultManagerFactory+ principly discriminates between
-databases by parsing the connect string stored in +ImportOptions+.
+databases by parsing the connect string stored in +SqoopOptions+.
 
 Extension authors may make use of classes in the +org.apache.hadoop.sqoop.io+,
 +mapred+, +mapreduce+, and +util+ packages to facilitate their implementations.
@@ -124,7 +124,7 @@
 This section describes the internal architecture of Sqoop.
 
 The Sqoop program is driven by the +org.apache.hadoop.sqoop.Sqoop+ main class.
-A limited number of additional classes are in the same package; +ImportOptions+
+A limited number of additional classes are in the same package; +SqoopOptions+
 (described earlier) and +ConnFactory+ (which manipulates +ManagerFactory+
 instances).
 
@@ -135,11 +135,11 @@
 
 +org.apache.hadoop.sqoop.Sqoop+ is the main class and implements _Tool_. A new
 instance is launched with +ToolRunner+. It parses its arguments using the
-+ImportOptions+ class.  Within the +ImportOptions+, an +ImportAction+ will be
++SqoopOptions+ class.  Within the +SqoopOptions+, an +ImportAction+ will be
 chosen by the user. This may be import all tables, import one specific table,
 execute a SQL statement, or others.
 
-A +ConnManager+ is then instantiated based on the data in the +ImportOptions+.
+A +ConnManager+ is then instantiated based on the data in the +SqoopOptions+.
 The +ConnFactory+ is used to get a +ConnManager+ from a +ManagerFactory+; the
 mechanics of this were described in an earlier section.
 
@@ -161,7 +161,7 @@
 extended with additional parameters in the future, which optionally further
 direct the import operation. Similarly, the +exportTable()+ method receives an
 argument of type +ExportJobContext+. These classes contain the name of the table
-to import/export, a reference to the +ImportOptions+ object, and other related
+to import/export, a reference to the +SqoopOptions+ object, and other related
 data.
 
 Subpackages
@@ -207,8 +207,8 @@
   importers.
 * +Executor+ launches external processes and connects these to stream handlers
   generated by an AsyncSink (see more detail below).
-* +ExportError+ is thrown by +ConnManagers+ when exports fail.
-* +ImportError+ is thrown by +ConnManagers+ when imports fail.
+* +ExportException+ is thrown by +ConnManagers+ when exports fail.
+* +ImportException+ is thrown by +ConnManagers+ when imports fail.
 * +JdbcUrl+ handles parsing of connect strings, which are URL-like but not
   specification-conforming. (In particular, JDBC connect strings may have
   +multi:part:scheme://+ components.)

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/doc/export.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/doc/export.txt?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/doc/export.txt (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/doc/export.txt Mon Dec  7 21:42:38 2009
@@ -0,0 +1,58 @@
+
+////
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+////
+
+
+Exporting to a Database
+-----------------------
+
+In addition to importing database tables into HDFS, Sqoop can also
+work in "reverse," reading the contents of a file or directory in
+HDFS, interpreting the data as database rows, and inserting them
+into a specified database table.
+
+To run an export, invoke Sqoop with the +--export-dir+ and
++--table+ options. e.g.:
+
+----
+$ sqoop --connect jdbc:mysql://db.example.com/foo --table bar  \
+    --export-dir /results/bar_data
+----
+
+This will take the files in +/results/bar_data+ and inject their
+contents in to the +bar+ table in the +foo+ database on +db.example.com+.
+The target table must already exist in the database. Sqoop will perform
+a set of +INSERT INTO+ operations, without regard for existing content. If
+Sqoop attempts to insert rows which violate constraints in the database
+(e.g., a particular primary key value already exists), then the export
+will fail.
+
+As in import mode, Sqoop will auto-generate an interoperability class
+to use with the particular table in question. This will be used to parse
+the records in HDFS files before loading their contents into the database.
+You must specify the same delimiters (e.g., with +--fields-terminated-by+,
+etc.) as are used in the files to export in order to parse the data
+correctly. If your data is stored in SequenceFiles (created with an import
+in the +--as-sequencefile+ format), then you do not need to specify
+delimiters.
+
+If you have an existing auto-generated jar and class that you intend to use
+with Sqoop, you can specify these with the +--jar-file+ and +--class-name+
+parameters. Providing these options will disable autogeneration of a new
+class based on the target table.
+
+

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ConnFactory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ConnFactory.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ConnFactory.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/ConnFactory.java Mon Dec  7 21:42:38 2009
@@ -87,7 +87,7 @@
    * @return a ConnManager instance for the appropriate database
    * @throws IOException if it cannot find a ConnManager for this schema
    */
-  public ConnManager getManager(ImportOptions opts) throws IOException {
+  public ConnManager getManager(SqoopOptions opts) throws IOException {
     // Try all the available manager factories.
     for (ManagerFactory factory : factories) {
       LOG.debug("Trying ManagerFactory: " + factory.getClass().getName());

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/Sqoop.java Mon Dec  7 21:42:38 2009
@@ -19,6 +19,8 @@
 package org.apache.hadoop.sqoop;
 
 import java.io.IOException;
+import java.util.ArrayList;
+import java.util.List;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -29,15 +31,17 @@
 
 import org.apache.hadoop.sqoop.hive.HiveImport;
 import org.apache.hadoop.sqoop.manager.ConnManager;
+import org.apache.hadoop.sqoop.manager.ExportJobContext;
 import org.apache.hadoop.sqoop.manager.ImportJobContext;
 import org.apache.hadoop.sqoop.orm.ClassWriter;
 import org.apache.hadoop.sqoop.orm.CompilationManager;
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.util.ExportException;
+import org.apache.hadoop.sqoop.util.ImportException;
 
 /**
  * Main entry-point for Sqoop
  * Usage: hadoop jar (this_jar_name) org.apache.hadoop.sqoop.Sqoop (options)
- * See the ImportOptions class for options.
+ * See the SqoopOptions class for options.
  */
 public class Sqoop extends Configured implements Tool {
 
@@ -53,41 +57,58 @@
     Configuration.addDefaultResource("sqoop-site.xml");
   }
 
-  private ImportOptions options;
+  private SqoopOptions options;
   private ConnManager manager;
   private HiveImport hiveImport;
+  private List<String> generatedJarFiles;
 
   public Sqoop() {
+    generatedJarFiles = new ArrayList<String>();
   }
 
-  public ImportOptions getOptions() {
+  public SqoopOptions getOptions() {
     return options;
   }
 
   /**
+   * @return a list of jar files generated as part of this im/export process
+   */
+  public List<String> getGeneratedJarFiles() {
+    ArrayList<String> out = new ArrayList<String>(generatedJarFiles);
+    return out;
+  }
+
+  /**
    * Generate the .class and .jar files
    * @return the filename of the emitted jar file.
    * @throws IOException
    */
   private String generateORM(String tableName) throws IOException {
+    String existingJar = options.getExistingJarName();
+    if (existingJar != null) {
+      // The user has pre-specified a jar and class to use. Don't generate.
+      LOG.info("Using existing jar: " + existingJar);
+      return existingJar;
+    }
+
     LOG.info("Beginning code generation");
     CompilationManager compileMgr = new CompilationManager(options);
     ClassWriter classWriter = new ClassWriter(options, manager, tableName, compileMgr);
     classWriter.generate();
     compileMgr.compile();
     compileMgr.jar();
-    return compileMgr.getJarFilename();
+    String jarFile = compileMgr.getJarFilename();
+    this.generatedJarFiles.add(jarFile);
+    return jarFile;
   }
 
-  private void importTable(String tableName) throws IOException, ImportError {
+  private void importTable(String tableName) throws IOException, ImportException {
     String jarFile = null;
 
     // Generate the ORM code for the tables.
-    // TODO(aaron): Allow this to be bypassed if the user has already generated code,
-    // or if they're using a non-MapReduce import method (e.g., mysqldump).
     jarFile = generateORM(tableName);
 
-    if (options.getAction() == ImportOptions.ControlAction.FullImport) {
+    if (options.getAction() == SqoopOptions.ControlAction.FullImport) {
       // Proceed onward to do the import.
       ImportJobContext context = new ImportJobContext(tableName, jarFile, options);
       manager.importTable(context);
@@ -99,17 +120,26 @@
     }
   }
 
+  private void exportTable(String tableName) throws ExportException, IOException {
+    String jarFile = null;
+
+    // Generate the ORM code for the tables.
+    jarFile = generateORM(tableName);
+
+    ExportJobContext context = new ExportJobContext(tableName, jarFile, options);
+    manager.exportTable(context);
+  }
 
   /**
    * Actual main entry-point for the program
    */
   public int run(String [] args) {
-    options = new ImportOptions();
+    options = new SqoopOptions();
     options.setConf(getConf());
     try {
       options.parse(args);
       options.validate();
-    } catch (ImportOptions.InvalidOptionsException e) {
+    } catch (SqoopOptions.InvalidOptionsException e) {
       // display the error msg
       System.err.println(e.getMessage());
       return 1; // exit on exception here
@@ -131,8 +161,8 @@
       hiveImport = new HiveImport(options, manager, getConf());
     }
 
-    ImportOptions.ControlAction action = options.getAction();
-    if (action == ImportOptions.ControlAction.ListTables) {
+    SqoopOptions.ControlAction action = options.getAction();
+    if (action == SqoopOptions.ControlAction.ListTables) {
       String [] tables = manager.listTables();
       if (null == tables) {
         System.err.println("Could not retrieve tables list from server");
@@ -143,7 +173,7 @@
           System.out.println(tbl);
         }
       }
-    } else if (action == ImportOptions.ControlAction.ListDatabases) {
+    } else if (action == SqoopOptions.ControlAction.ListDatabases) {
       String [] databases = manager.listDatabases();
       if (null == databases) {
         System.err.println("Could not retrieve database list from server");
@@ -154,10 +184,29 @@
           System.out.println(db);
         }
       }
-    } else if (action == ImportOptions.ControlAction.DebugExec) {
+    } else if (action == SqoopOptions.ControlAction.DebugExec) {
       // just run a SQL statement for debugging purposes.
       manager.execAndPrint(options.getDebugSqlCmd());
       return 0;
+    } else if (action == SqoopOptions.ControlAction.Export) {
+      // Export a table.
+      try {
+        exportTable(options.getTableName());
+      } catch (IOException ioe) {
+        LOG.error("Encountered IOException running export job: " + ioe.toString());
+        if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
+          throw new RuntimeException(ioe);
+        } else {
+          return 1;
+        }
+      } catch (ExportException ee) {
+        LOG.error("Error during export: " + ee.toString());
+        if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
+          throw new RuntimeException(ee);
+        } else {
+          return 1;
+        }
+      }
     } else {
       // This is either FullImport or GenerateOnly.
 
@@ -184,7 +233,7 @@
         } else {
           return 1;
         }
-      } catch (ImportError ie) {
+      } catch (ImportException ie) {
         LOG.error("Error during import: " + ie.toString());
         if (System.getProperty(SQOOP_RETHROW_PROPERTY) != null) {
           throw new RuntimeException(ie);

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/SqoopOptions.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,955 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+package org.apache.hadoop.sqoop;
+
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.IOException;
+import java.io.InputStream;
+import java.util.Arrays;
+import java.util.ArrayList;
+import java.util.Properties;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.ToolRunner;
+
+/**
+ * Command-line arguments used by Sqoop
+ */
+public class SqoopOptions {
+
+  public static final Log LOG = LogFactory.getLog(SqoopOptions.class.getName());
+
+  /**
+   * Thrown when invalid cmdline options are given
+   */
+  @SuppressWarnings("serial")
+  public static class InvalidOptionsException extends Exception {
+
+    private String message;
+
+    public InvalidOptionsException(final String msg) {
+      this.message = msg;
+    }
+
+    public String getMessage() {
+      return message;
+    }
+
+    public String toString() {
+      return getMessage();
+    }
+  }
+
+  // control-flow selector based on command-line switches.
+  public enum ControlAction {
+    ListDatabases,  // list available databases and exit.
+    ListTables,     // list available tables and exit.
+    GenerateOnly,   // generate ORM code but do not import.
+    FullImport,     // generate code (as needed) and import.
+    DebugExec,      // just execute a single sql command and print its results.
+    Export          // export a table from HDFS to a database.
+  }
+
+  // selects in-HDFS destination file format
+  public enum FileLayout {
+    TextFile,
+    SequenceFile
+  }
+
+
+  // TODO(aaron): Adding something here? Add a getter, a cmdline switch, and a properties file
+  // entry in loadFromProperties(). Add a default value in initDefaults() if you need one.
+  // Make sure you add the stub to the testdata/sqoop.properties.template file.
+  private String connectString;
+  private String tableName;
+  private String [] columns;
+  private boolean allTables;
+  private String username;
+  private String password;
+  private String codeOutputDir;
+  private String jarOutputDir;
+  private ControlAction action;
+  private String hadoopHome;
+  private String splitByCol;
+  private String whereClause;
+  private String debugSqlCmd;
+  private String driverClassName;
+  private String warehouseDir;
+  private FileLayout layout;
+  private boolean direct; // if true and conn is mysql, use mysqldump.
+  private String tmpDir; // where temp data goes; usually /tmp
+  private String hiveHome;
+  private boolean hiveImport;
+  private String packageName; // package to prepend to auto-named classes.
+  private String className; // package+class to apply to individual table import.
+                            // also used as an *input* class with existingJarFile.
+  private String existingJarFile; // Name of a jar containing existing table definition
+                                  // class to use.
+  private int numMappers;
+  private boolean useCompression;
+  private long directSplitSize; // In direct mode, open a new stream every X bytes.
+
+  private String exportDir; // HDFS path to read from when performing an export
+
+  private char inputFieldDelim;
+  private char inputRecordDelim;
+  private char inputEnclosedBy;
+  private char inputEscapedBy;
+  private boolean inputMustBeEnclosed;
+
+  private char outputFieldDelim;
+  private char outputRecordDelim;
+  private char outputEnclosedBy;
+  private char outputEscapedBy;
+  private boolean outputMustBeEnclosed;
+
+  private boolean areDelimsManuallySet;
+
+  private Configuration conf;
+
+  public static final int DEFAULT_NUM_MAPPERS = 4;
+
+  private static final String DEFAULT_CONFIG_FILE = "sqoop.properties";
+
+  private String [] extraArgs;
+
+  public SqoopOptions() {
+    initDefaults();
+  }
+
+  /**
+   * Alternate SqoopOptions interface used mostly for unit testing
+   * @param connect JDBC connect string to use
+   * @param database Database to read
+   * @param table Table to read
+   */
+  public SqoopOptions(final String connect, final String table) {
+    initDefaults();
+
+    this.connectString = connect;
+    this.tableName = table;
+  }
+
+  private boolean getBooleanProperty(Properties props, String propName, boolean defaultValue) {
+    String str = props.getProperty(propName,
+        Boolean.toString(defaultValue)).toLowerCase();
+    return "true".equals(str) || "yes".equals(str) || "1".equals(str);
+  }
+
+  private long getLongProperty(Properties props, String propName, long defaultValue) {
+    String str = props.getProperty(propName,
+        Long.toString(defaultValue)).toLowerCase();
+    try {
+      return Long.parseLong(str);
+    } catch (NumberFormatException nfe) {
+      LOG.warn("Could not parse integer value for config parameter " + propName);
+      return defaultValue;
+    }
+  }
+
+  private void loadFromProperties() {
+    File configFile = new File(DEFAULT_CONFIG_FILE);
+    if (!configFile.canRead()) {
+      return; //can't do this.
+    }
+
+    Properties props = new Properties();
+    InputStream istream = null;
+    try {
+      LOG.info("Loading properties from " + configFile.getAbsolutePath());
+      istream = new FileInputStream(configFile);
+      props.load(istream);
+
+      this.hadoopHome = props.getProperty("hadoop.home", this.hadoopHome);
+      this.codeOutputDir = props.getProperty("out.dir", this.codeOutputDir);
+      this.jarOutputDir = props.getProperty("bin.dir", this.jarOutputDir);
+      this.username = props.getProperty("db.username", this.username);
+      this.password = props.getProperty("db.password", this.password);
+      this.tableName = props.getProperty("db.table", this.tableName);
+      this.connectString = props.getProperty("db.connect.url", this.connectString);
+      this.splitByCol = props.getProperty("db.split.column", this.splitByCol);
+      this.whereClause = props.getProperty("db.where.clause", this.whereClause);
+      this.driverClassName = props.getProperty("jdbc.driver", this.driverClassName);
+      this.warehouseDir = props.getProperty("hdfs.warehouse.dir", this.warehouseDir);
+      this.hiveHome = props.getProperty("hive.home", this.hiveHome);
+      this.className = props.getProperty("java.classname", this.className);
+      this.packageName = props.getProperty("java.packagename", this.packageName);
+      this.existingJarFile = props.getProperty("java.jar.file", this.existingJarFile);
+      this.exportDir = props.getProperty("export.dir", this.exportDir);
+
+      this.direct = getBooleanProperty(props, "direct.import", this.direct);
+      this.hiveImport = getBooleanProperty(props, "hive.import", this.hiveImport);
+      this.useCompression = getBooleanProperty(props, "compression", this.useCompression);
+      this.directSplitSize = getLongProperty(props, "direct.split.size",
+          this.directSplitSize);
+    } catch (IOException ioe) {
+      LOG.error("Could not read properties file " + DEFAULT_CONFIG_FILE + ": " + ioe.toString());
+    } finally {
+      if (null != istream) {
+        try {
+          istream.close();
+        } catch (IOException ioe) {
+          // ignore this; we're closing.
+        }
+      }
+    }
+  }
+
+  /**
+   * @return the temp directory to use; this is guaranteed to end with
+   * the file separator character (e.g., '/')
+   */
+  public String getTempDir() {
+    return this.tmpDir;
+  }
+
+  private void initDefaults() {
+    // first, set the true defaults if nothing else happens.
+    // default action is to run the full pipeline.
+    this.action = ControlAction.FullImport;
+    this.hadoopHome = System.getenv("HADOOP_HOME");
+
+    // Set this with $HIVE_HOME, but -Dhive.home can override.
+    this.hiveHome = System.getenv("HIVE_HOME");
+    this.hiveHome = System.getProperty("hive.home", this.hiveHome);
+
+    // Set this to cwd, but -Dsqoop.src.dir can override.
+    this.codeOutputDir = System.getProperty("sqoop.src.dir", ".");
+
+    String myTmpDir = System.getProperty("test.build.data", "/tmp/");
+    if (!myTmpDir.endsWith(File.separator)) {
+      myTmpDir = myTmpDir + File.separator;
+    }
+
+    this.tmpDir = myTmpDir;
+    this.jarOutputDir = tmpDir + "sqoop/compile";
+    this.layout = FileLayout.TextFile;
+
+    this.inputFieldDelim = '\000';
+    this.inputRecordDelim = '\000';
+    this.inputEnclosedBy = '\000';
+    this.inputEscapedBy = '\000';
+    this.inputMustBeEnclosed = false;
+
+    this.outputFieldDelim = ',';
+    this.outputRecordDelim = '\n';
+    this.outputEnclosedBy = '\000';
+    this.outputEscapedBy = '\000';
+    this.outputMustBeEnclosed = false;
+
+    this.areDelimsManuallySet = false;
+
+    this.numMappers = DEFAULT_NUM_MAPPERS;
+    this.useCompression = false;
+    this.directSplitSize = 0;
+
+    this.conf = new Configuration();
+
+    this.extraArgs = null;
+
+    loadFromProperties();
+  }
+
+  /**
+   * Allow the user to enter his password on the console without printing characters.
+   * @return the password as a string
+   */
+  private String securePasswordEntry() {
+    return new String(System.console().readPassword("Enter password: "));
+  }
+
+  /**
+   * Print usage strings for the program's arguments.
+   */
+  public static void printUsage() {
+    System.out.println("Usage: hadoop sqoop.jar org.apache.hadoop.sqoop.Sqoop (options)");
+    System.out.println("");
+    System.out.println("Database connection options:");
+    System.out.println("--connect (jdbc-uri)         Specify JDBC connect string");
+    System.out.println("--driver (class-name)        Manually specify JDBC driver class to use");
+    System.out.println("--username (username)        Set authentication username");
+    System.out.println("--password (password)        Set authentication password");
+    System.out.println("-P                           Read password from console");
+    System.out.println("--direct                     Use direct import fast path (mysql only)");
+    System.out.println("");
+    System.out.println("Import control options:");
+    System.out.println("--table (tablename)          Table to read");
+    System.out.println("--columns (col,col,col...)   Columns to export from table");
+    System.out.println("--split-by (column-name)     Column of the table used to split work units");
+    System.out.println("--where (where clause)       Where clause to use during export");
+    System.out.println("--hadoop-home (dir)          Override $HADOOP_HOME");
+    System.out.println("--hive-home (dir)            Override $HIVE_HOME");
+    System.out.println("--warehouse-dir (dir)        HDFS path for table destination");
+    System.out.println("--as-sequencefile            Imports data to SequenceFiles");
+    System.out.println("--as-textfile                Imports data as plain text (default)");
+    System.out.println("--all-tables                 Import all tables in database");
+    System.out.println("                             (Ignores --table, --columns and --split-by)");
+    System.out.println("--hive-import                If set, then import the table into Hive.");
+    System.out.println("                    (Uses Hive's default delimiters if none are set.)");
+    System.out.println("-m, --num-mappers (n)        Use 'n' map tasks to import in parallel");
+    System.out.println("-z, --compress               Enable compression");
+    System.out.println("--direct-split-size (n)      Split the input stream every 'n' bytes");
+    System.out.println("                             when importing in direct mode.");
+    System.out.println("");
+    System.out.println("Export options:");
+    System.out.println("--export-dir (dir)           Export from an HDFS path into a table");
+    System.out.println("                             (set with --table)");
+    System.out.println("");
+    System.out.println("Output line formatting options:");
+    System.out.println("--fields-terminated-by (char)    Sets the field separator character");
+    System.out.println("--lines-terminated-by (char)     Sets the end-of-line character");
+    System.out.println("--optionally-enclosed-by (char)  Sets a field enclosing character");
+    System.out.println("--enclosed-by (char)             Sets a required field enclosing char");
+    System.out.println("--escaped-by (char)              Sets the escape character");
+    System.out.println("--mysql-delimiters               Uses MySQL's default delimiter set");
+    System.out.println("  fields: ,  lines: \\n  escaped-by: \\  optionally-enclosed-by: '");
+    System.out.println("");
+    System.out.println("Input parsing options:");
+    System.out.println("--input-fields-terminated-by (char)    Sets the input field separator");
+    System.out.println("--input-lines-terminated-by (char)     Sets the input end-of-line char");
+    System.out.println("--input-optionally-enclosed-by (char)  Sets a field enclosing character");
+    System.out.println("--input-enclosed-by (char)             Sets a required field encloser");
+    System.out.println("--input-escaped-by (char)              Sets the input escape character");
+    System.out.println("");
+    System.out.println("Code generation options:");
+    System.out.println("--outdir (dir)               Output directory for generated code");
+    System.out.println("--bindir (dir)               Output directory for compiled objects");
+    System.out.println("--generate-only              Stop after code generation; do not import");
+    System.out.println("--package-name (name)        Put auto-generated classes in this package");
+    System.out.println("--class-name (name)          When generating one class, use this name.");
+    System.out.println("                             This overrides --package-name.");
+    System.out.println("");
+    System.out.println("Library loading options:");
+    System.out.println("--jar-file (file)            Disable code generation; use specified jar");
+    System.out.println("--class-name (name)          The class within the jar that represents");
+    System.out.println("                             the table to import/export");
+    System.out.println("");
+    System.out.println("Additional commands:");
+    System.out.println("--list-tables                List tables in database and exit");
+    System.out.println("--list-databases             List all databases available and exit");
+    System.out.println("--debug-sql (statement)      Execute 'statement' in SQL and exit");
+    System.out.println("");
+    System.out.println("Database-specific options:");
+    System.out.println("Arguments may be passed to the database manager after a lone '-':");
+    System.out.println("  MySQL direct mode: arguments passed directly to mysqldump");
+    System.out.println("");
+    System.out.println("Generic Hadoop command-line options:");
+    ToolRunner.printGenericCommandUsage(System.out);
+    System.out.println("");
+    System.out.println("At minimum, you must specify --connect "
+        + "and either --table or --all-tables.");
+    System.out.println("Alternatively, you can specify --generate-only or one of the additional");
+    System.out.println("commands.");
+  }
+
+  /**
+   * Given a string containing a single character or an escape sequence representing
+   * a char, return that char itself.
+   *
+   * Normal literal characters return themselves: "x" -&gt; 'x', etc.
+   * Strings containing a '\' followed by one of t, r, n, or b escape to the usual
+   * character as seen in Java: "\n" -&gt; (newline), etc.
+   *
+   * Strings like "\0ooo" return the character specified by the octal sequence 'ooo'
+   * Strings like "\0xhhh" or "\0Xhhh" return the character specified by the hex sequence 'hhh'
+   */
+  static char toChar(String charish) throws InvalidOptionsException {
+    if (null == charish) {
+      throw new InvalidOptionsException("Character argument expected." 
+          + "\nTry --help for usage instructions.");
+    } else if (charish.startsWith("\\0x") || charish.startsWith("\\0X")) {
+      if (charish.length() == 3) {
+        throw new InvalidOptionsException("Base-16 value expected for character argument."
+          + "\nTry --help for usage instructions.");
+      } else {
+        String valStr = charish.substring(3);
+        int val = Integer.parseInt(valStr, 16);
+        return (char) val;
+      }
+    } else if (charish.startsWith("\\0")) {
+      if (charish.equals("\\0")) {
+        // it's just '\0', which we can take as shorthand for nul.
+        return '\000';
+      } else {
+        // it's an octal value.
+        String valStr = charish.substring(2);
+        int val = Integer.parseInt(valStr, 8);
+        return (char) val;
+      }
+    } else if (charish.startsWith("\\")) {
+      if (charish.length() == 1) {
+        // it's just a '\'. Keep it literal.
+        return '\\';
+      } else if (charish.length() > 2) {
+        // we don't have any 3+ char escape strings. 
+        throw new InvalidOptionsException("Cannot understand character argument: " + charish
+            + "\nTry --help for usage instructions.");
+      } else {
+        // this is some sort of normal 1-character escape sequence.
+        char escapeWhat = charish.charAt(1);
+        switch(escapeWhat) {
+        case 'b':
+          return '\b';
+        case 'n':
+          return '\n';
+        case 'r':
+          return '\r';
+        case 't':
+          return '\t';
+        case '\"':
+          return '\"';
+        case '\'':
+          return '\'';
+        case '\\':
+          return '\\';
+        default:
+          throw new InvalidOptionsException("Cannot understand character argument: " + charish
+              + "\nTry --help for usage instructions.");
+        }
+      }
+    } else if (charish.length() == 0) {
+      throw new InvalidOptionsException("Character argument expected." 
+          + "\nTry --help for usage instructions.");
+    } else {
+      // it's a normal character.
+      if (charish.length() > 1) {
+        LOG.warn("Character argument " + charish + " has multiple characters; "
+            + "only the first will be used.");
+      }
+
+      return charish.charAt(0);
+    }
+  }
+
+  /**
+   * Read args from the command-line into member fields.
+   * @throws Exception if there's a problem parsing arguments.
+   */
+  public void parse(String [] args) throws InvalidOptionsException {
+
+    if (LOG.isDebugEnabled()) {
+      LOG.debug("Parsing sqoop arguments:");
+      for (String arg : args) {
+        LOG.debug("  " + arg);
+      }
+    }
+
+    int i = 0;
+    try {
+      for (i = 0; i < args.length; i++) {
+        if (args[i].equals("--connect")) {
+          this.connectString = args[++i];
+        } else if (args[i].equals("--driver")) {
+          this.driverClassName = args[++i];
+        } else if (args[i].equals("--table")) {
+          this.tableName = args[++i];
+        } else if (args[i].equals("--columns")) {
+          String columnString = args[++i];
+          this.columns = columnString.split(",");
+        } else if (args[i].equals("--split-by")) {
+          this.splitByCol = args[++i];
+        } else if (args[i].equals("--where")) {
+          this.whereClause = args[++i];
+        } else if (args[i].equals("--list-tables")) {
+          this.action = ControlAction.ListTables;
+        } else if (args[i].equals("--all-tables")) {
+          this.allTables = true;
+        } else if (args[i].equals("--export-dir")) {
+          this.exportDir = args[++i];
+          this.action = ControlAction.Export;
+        } else if (args[i].equals("--local")) {
+          // TODO(aaron): Remove this after suitable deprecation time period.
+          LOG.warn("--local is deprecated; use --direct instead.");
+          this.direct = true;
+        } else if (args[i].equals("--direct")) {
+          this.direct = true;
+        } else if (args[i].equals("--username")) {
+          this.username = args[++i];
+          if (null == this.password) {
+            // Set password to empty if the username is set first,
+            // to ensure that they're either both null or neither.
+            this.password = "";
+          }
+        } else if (args[i].equals("--password")) {
+          LOG.warn("Setting your password on the command-line is insecure. "
+              + "Consider using -P instead.");
+          this.password = args[++i];
+        } else if (args[i].equals("-P")) {
+          this.password = securePasswordEntry();
+        } else if (args[i].equals("--hadoop-home")) {
+          this.hadoopHome = args[++i];
+        } else if (args[i].equals("--hive-home")) {
+          this.hiveHome = args[++i];
+        } else if (args[i].equals("--hive-import")) {
+          this.hiveImport = true;
+        } else if (args[i].equals("--num-mappers") || args[i].equals("-m")) {
+          String numMappersStr = args[++i];
+          this.numMappers = Integer.valueOf(numMappersStr);
+        } else if (args[i].equals("--fields-terminated-by")) {
+          this.outputFieldDelim = SqoopOptions.toChar(args[++i]);
+          this.areDelimsManuallySet = true;
+        } else if (args[i].equals("--lines-terminated-by")) {
+          this.outputRecordDelim = SqoopOptions.toChar(args[++i]);
+          this.areDelimsManuallySet = true;
+        } else if (args[i].equals("--optionally-enclosed-by")) {
+          this.outputEnclosedBy = SqoopOptions.toChar(args[++i]);
+          this.outputMustBeEnclosed = false;
+          this.areDelimsManuallySet = true;
+        } else if (args[i].equals("--enclosed-by")) {
+          this.outputEnclosedBy = SqoopOptions.toChar(args[++i]);
+          this.outputMustBeEnclosed = true;
+          this.areDelimsManuallySet = true;
+        } else if (args[i].equals("--escaped-by")) {
+          this.outputEscapedBy = SqoopOptions.toChar(args[++i]);
+          this.areDelimsManuallySet = true;
+        } else if (args[i].equals("--mysql-delimiters")) {
+          this.outputFieldDelim = ',';
+          this.outputRecordDelim = '\n';
+          this.outputEnclosedBy = '\'';
+          this.outputEscapedBy = '\\';
+          this.outputMustBeEnclosed = false;
+          this.areDelimsManuallySet = true;
+        } else if (args[i].equals("--input-fields-terminated-by")) {
+          this.inputFieldDelim = SqoopOptions.toChar(args[++i]);
+        } else if (args[i].equals("--input-lines-terminated-by")) {
+          this.inputRecordDelim = SqoopOptions.toChar(args[++i]);
+        } else if (args[i].equals("--input-optionally-enclosed-by")) {
+          this.inputEnclosedBy = SqoopOptions.toChar(args[++i]);
+          this.inputMustBeEnclosed = false;
+        } else if (args[i].equals("--input-enclosed-by")) {
+          this.inputEnclosedBy = SqoopOptions.toChar(args[++i]);
+          this.inputMustBeEnclosed = true;
+        } else if (args[i].equals("--input-escaped-by")) {
+          this.inputEscapedBy = SqoopOptions.toChar(args[++i]);
+        } else if (args[i].equals("--outdir")) {
+          this.codeOutputDir = args[++i];
+        } else if (args[i].equals("--as-sequencefile")) {
+          this.layout = FileLayout.SequenceFile;
+        } else if (args[i].equals("--as-textfile")) {
+          this.layout = FileLayout.TextFile;
+        } else if (args[i].equals("--bindir")) {
+          this.jarOutputDir = args[++i];
+        } else if (args[i].equals("--warehouse-dir")) {
+          this.warehouseDir = args[++i];
+        } else if (args[i].equals("--package-name")) {
+          this.packageName = args[++i];
+        } else if (args[i].equals("--class-name")) {
+          this.className = args[++i];
+        } else if (args[i].equals("-z") || args[i].equals("--compress")) {
+          this.useCompression = true;
+        } else if (args[i].equals("--direct-split-size")) {
+          this.directSplitSize = Long.parseLong(args[++i]);
+        } else if (args[i].equals("--jar-file")) {
+          this.existingJarFile = args[++i];
+        } else if (args[i].equals("--list-databases")) {
+          this.action = ControlAction.ListDatabases;
+        } else if (args[i].equals("--generate-only")) {
+          this.action = ControlAction.GenerateOnly;
+        } else if (args[i].equals("--debug-sql")) {
+          this.action = ControlAction.DebugExec;
+          // read the entire remainder of the commandline into the debug sql statement.
+          if (null == this.debugSqlCmd) {
+            this.debugSqlCmd = "";
+          }
+          for (i++; i < args.length; i++) {
+            this.debugSqlCmd = this.debugSqlCmd + args[i] + " ";
+          }
+        } else if (args[i].equals("--help")) {
+          printUsage();
+          throw new InvalidOptionsException("");
+        } else if (args[i].equals("-")) {
+          // Everything after a '--' goes into extraArgs.
+          ArrayList<String> extra = new ArrayList<String>();
+          for (i++; i < args.length; i++) {
+            extra.add(args[i]);
+          }
+          this.extraArgs = extra.toArray(new String[0]);
+        } else {
+          throw new InvalidOptionsException("Invalid argument: " + args[i] + ".\n"
+              + "Try --help for usage.");
+        }
+      }
+    } catch (ArrayIndexOutOfBoundsException oob) {
+      throw new InvalidOptionsException("Error: " + args[--i] + " expected argument.\n"
+          + "Try --help for usage.");
+    } catch (NumberFormatException nfe) {
+      throw new InvalidOptionsException("Error: " + args[--i] + " expected numeric argument.\n"
+          + "Try --help for usage.");
+    }
+  }
+
+  private static final String HELP_STR = "\nTry --help for usage instructions.";
+
+  /**
+   * Validates options and ensures that any required options are
+   * present and that any mutually-exclusive options are not selected.
+   * @throws Exception if there's a problem.
+   */
+  public void validate() throws InvalidOptionsException {
+    if (this.allTables && this.columns != null) {
+      // If we're reading all tables in a database, can't filter column names.
+      throw new InvalidOptionsException("--columns and --all-tables are incompatible options."
+          + HELP_STR);
+    } else if (this.allTables && this.splitByCol != null) {
+      // If we're reading all tables in a database, can't set pkey
+      throw new InvalidOptionsException("--split-by and --all-tables are incompatible options."
+          + HELP_STR);
+    } else if (this.allTables && this.className != null) {
+      // If we're reading all tables, can't set individual class name
+      throw new InvalidOptionsException("--class-name and --all-tables are incompatible options."
+          + HELP_STR);
+    } else if (this.connectString == null) {
+      throw new InvalidOptionsException("Error: Required argument --connect is missing."
+          + HELP_STR);
+    } else if (this.className != null && this.packageName != null) {
+      throw new InvalidOptionsException(
+          "--class-name overrides --package-name. You cannot use both." + HELP_STR);
+    } else if (this.action == ControlAction.FullImport && !this.allTables
+        && this.tableName == null) {
+      throw new InvalidOptionsException(
+          "One of --table or --all-tables is required for import." + HELP_STR);
+    } else if (this.action == ControlAction.Export && this.allTables) {
+      throw new InvalidOptionsException("You cannot export with --all-tables." + HELP_STR);
+    } else if (this.action == ControlAction.Export && this.tableName == null) {
+      throw new InvalidOptionsException("Export requires a --table argument." + HELP_STR);
+    } else if (this.existingJarFile != null && this.className == null) {
+      throw new InvalidOptionsException("Jar specified with --jar-file, but no "
+          + "class specified with --class-name." + HELP_STR);
+    } else if (this.existingJarFile != null && this.action == ControlAction.GenerateOnly) {
+      throw new InvalidOptionsException("Cannot generate code using existing jar." + HELP_STR);
+    }
+
+    if (this.hiveImport) {
+      if (!areDelimsManuallySet) {
+        // user hasn't manually specified delimiters, and wants to import straight to Hive.
+        // Use Hive-style delimiters.
+        LOG.info("Using Hive-specific delimiters for output. You can override");
+        LOG.info("delimiters with --fields-terminated-by, etc.");
+        this.outputFieldDelim = (char)0x1; // ^A
+        this.outputRecordDelim = '\n';
+        this.outputEnclosedBy = '\000'; // no enclosing in Hive.
+        this.outputEscapedBy = '\000'; // no escaping in Hive
+        this.outputMustBeEnclosed = false;
+      }
+
+      if (this.getOutputEscapedBy() != '\000') {
+        LOG.warn("Hive does not support escape characters in fields;");
+        LOG.warn("parse errors in Hive may result from using --escaped-by.");
+      }
+
+      if (this.getOutputEnclosedBy() != '\000') {
+        LOG.warn("Hive does not support quoted strings; parse errors");
+        LOG.warn("in Hive may result from using --enclosed-by.");
+      }
+    }
+  }
+
+  /** get the temporary directory; guaranteed to end in File.separator
+   * (e.g., '/')
+   */
+  public String getTmpDir() {
+    return tmpDir;
+  }
+
+  public String getConnectString() {
+    return connectString;
+  }
+
+  public String getTableName() {
+    return tableName;
+  }
+
+  public String getExportDir() {
+    return exportDir;
+  }
+
+  public String getExistingJarName() {
+    return existingJarFile;
+  }
+
+  public String[] getColumns() {
+    if (null == columns) {
+      return null;
+    } else {
+      return Arrays.copyOf(columns, columns.length);
+    }
+  }
+
+  public String getSplitByCol() {
+    return splitByCol;
+  }
+  
+  public String getWhereClause() {
+    return whereClause;
+  }
+
+  public ControlAction getAction() {
+    return action;
+  }
+
+  public boolean isAllTables() {
+    return allTables;
+  }
+
+  public String getUsername() {
+    return username;
+  }
+
+  public String getPassword() {
+    return password;
+  }
+
+  public boolean isDirect() {
+    return direct;
+  }
+
+  /**
+   * @return the number of map tasks to use for import
+   */
+  public int getNumMappers() {
+    return this.numMappers;
+  }
+
+  /**
+   * @return the user-specified absolute class name for the table
+   */
+  public String getClassName() {
+    return className;
+  }
+
+  /**
+   * @return the user-specified package to prepend to table names via --package-name.
+   */
+  public String getPackageName() {
+    return packageName;
+  }
+
+  public String getHiveHome() {
+    return hiveHome;
+  }
+
+  /** @return true if we should import the table into Hive */
+  public boolean doHiveImport() {
+    return hiveImport;
+  }
+
+  /**
+   * @return location where .java files go; guaranteed to end with '/'
+   */
+  public String getCodeOutputDir() {
+    if (codeOutputDir.endsWith(File.separator)) {
+      return codeOutputDir;
+    } else {
+      return codeOutputDir + File.separator;
+    }
+  }
+
+  /**
+   * @return location where .jar and .class files go; guaranteed to end with '/'
+   */
+  public String getJarOutputDir() {
+    if (jarOutputDir.endsWith(File.separator)) {
+      return jarOutputDir;
+    } else {
+      return jarOutputDir + File.separator;
+    }
+  }
+
+  /**
+   * Return the value of $HADOOP_HOME
+   * @return $HADOOP_HOME, or null if it's not set.
+   */
+  public String getHadoopHome() {
+    return hadoopHome;
+  }
+
+  /**
+   * @return a sql command to execute and exit with.
+   */
+  public String getDebugSqlCmd() {
+    return debugSqlCmd;
+  }
+
+  /**
+   * @return The JDBC driver class name specified with --driver
+   */
+  public String getDriverClassName() {
+    return driverClassName;
+  }
+
+  /**
+   * @return the base destination path for table uploads.
+   */
+  public String getWarehouseDir() {
+    return warehouseDir;
+  }
+
+  /**
+   * @return the destination file format
+   */
+  public FileLayout getFileLayout() {
+    return this.layout;
+  }
+
+  public void setUsername(String name) {
+    this.username = name;
+  }
+
+  public void setPassword(String pass) {
+    this.password = pass;
+  }
+
+  /**
+   * @return the field delimiter to use when parsing lines. Defaults to the field delim
+   * to use when printing lines
+   */
+  public char getInputFieldDelim() {
+    if (inputFieldDelim == '\000') {
+      return this.outputFieldDelim;
+    } else {
+      return this.inputFieldDelim;
+    }
+  }
+
+  /**
+   * @return the record delimiter to use when parsing lines. Defaults to the record delim
+   * to use when printing lines.
+   */
+  public char getInputRecordDelim() {
+    if (inputRecordDelim == '\000') {
+      return this.outputRecordDelim;
+    } else {
+      return this.inputRecordDelim;
+    }
+  }
+
+  /**
+   * @return the character that may enclose fields when parsing lines. Defaults to the
+   * enclosing-char to use when printing lines.
+   */
+  public char getInputEnclosedBy() {
+    if (inputEnclosedBy == '\000') {
+      return this.outputEnclosedBy;
+    } else {
+      return this.inputEnclosedBy;
+    }
+  }
+
+  /**
+   * @return the escape character to use when parsing lines. Defaults to the escape
+   * character used when printing lines.
+   */
+  public char getInputEscapedBy() {
+    if (inputEscapedBy == '\000') {
+      return this.outputEscapedBy;
+    } else {
+      return this.inputEscapedBy;
+    }
+  }
+
+  /**
+   * @return true if fields must be enclosed by the --enclosed-by character when parsing.
+   * Defaults to false. Set true when --input-enclosed-by is used.
+   */
+  public boolean isInputEncloseRequired() {
+    if (inputEnclosedBy == '\000') {
+      return this.outputMustBeEnclosed;
+    } else {
+      return this.inputMustBeEnclosed;
+    }
+  }
+
+  /**
+   * @return the character to print between fields when importing them to text.
+   */
+  public char getOutputFieldDelim() {
+    return this.outputFieldDelim;
+  }
+
+
+  /**
+   * @return the character to print between records when importing them to text.
+   */
+  public char getOutputRecordDelim() {
+    return this.outputRecordDelim;
+  }
+
+  /**
+   * @return a character which may enclose the contents of fields when imported to text.
+   */
+  public char getOutputEnclosedBy() {
+    return this.outputEnclosedBy;
+  }
+
+  /**
+   * @return a character which signifies an escape sequence when importing to text.
+   */
+  public char getOutputEscapedBy() {
+    return this.outputEscapedBy;
+  }
+
+  /**
+   * @return true if fields imported to text must be enclosed by the EnclosedBy char.
+   * default is false; set to true if --enclosed-by is used instead of --optionally-enclosed-by.
+   */
+  public boolean isOutputEncloseRequired() {
+    return this.outputMustBeEnclosed;
+  }
+
+  /**
+   * @return true if the user wants imported results to be compressed.
+   */
+  public boolean shouldUseCompression() {
+    return this.useCompression;
+  }
+
+  /**
+   * @return the file size to split by when using --direct mode.
+   */
+  public long getDirectSplitSize() {
+    return this.directSplitSize;
+  }
+
+  public Configuration getConf() {
+    return conf;
+  }
+
+  public void setConf(Configuration config) {
+    this.conf = config;
+  }
+
+  /**
+   * @return command-line arguments after a '-'
+   */
+  public String [] getExtraArgs() {
+    if (extraArgs == null) {
+      return null;
+    }
+
+    String [] out = new String[extraArgs.length];
+    for (int i = 0; i < extraArgs.length; i++) {
+      out[i] = extraArgs[i];
+    }
+    return out;
+  }
+}

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/HiveImport.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/HiveImport.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/HiveImport.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/HiveImport.java Mon Dec  7 21:42:38 2009
@@ -32,7 +32,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.util.Executor;
 import org.apache.hadoop.sqoop.util.LoggingAsyncSink;
@@ -46,11 +46,11 @@
 
   public static final Log LOG = LogFactory.getLog(HiveImport.class.getName());
 
-  private ImportOptions options;
+  private SqoopOptions options;
   private ConnManager connManager;
   private Configuration configuration;
 
-  public HiveImport(final ImportOptions opts, final ConnManager connMgr, final Configuration conf) {
+  public HiveImport(final SqoopOptions opts, final ConnManager connMgr, final Configuration conf) {
     this.options = opts;
     this.connManager = connMgr;
     this.configuration = conf;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/hive/TableDefWriter.java Mon Dec  7 21:42:38 2009
@@ -22,7 +22,7 @@
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.manager.ConnManager;
 import org.apache.hadoop.sqoop.hive.HiveTypes;
 
@@ -47,7 +47,7 @@
 
   public static final Log LOG = LogFactory.getLog(TableDefWriter.class.getName());
 
-  private ImportOptions options;
+  private SqoopOptions options;
   private ConnManager connManager;
   private Configuration configuration;
   private String tableName;
@@ -62,7 +62,7 @@
    * @param withComments if true, then tables will be created with a
    *        timestamp comment.
    */
-  public TableDefWriter(final ImportOptions opts, final ConnManager connMgr,
+  public TableDefWriter(final SqoopOptions opts, final ConnManager connMgr,
       final String table, final Configuration config, final boolean withComments) {
     this.options = opts;
     this.connManager = connMgr;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ConnManager.java Mon Dec  7 21:42:38 2009
@@ -26,7 +26,8 @@
 
 import org.apache.hadoop.conf.Configuration;
 
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.util.ExportException;
+import org.apache.hadoop.sqoop.util.ImportException;
 
 /**
  * Abstract interface that manages connections to a database.
@@ -91,11 +92,19 @@
    * Perform an import of a table from the database into HDFS
    */
   public abstract void importTable(ImportJobContext context)
-      throws IOException, ImportError;
+      throws IOException, ImportException;
 
   /**
    * Perform any shutdown operations on the connection.
    */
   public abstract void close() throws SQLException;
+
+  /**
+   * Export data stored in HDFS into a table in a database
+   */
+  public void exportTable(ExportJobContext context)
+      throws IOException, ExportException {
+    throw new ExportException("This database does not support exports");
+  }
 }
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DefaultManagerFactory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DefaultManagerFactory.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DefaultManagerFactory.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DefaultManagerFactory.java Mon Dec  7 21:42:38 2009
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.sqoop.manager;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -31,7 +31,7 @@
 
   public static final Log LOG = LogFactory.getLog(DefaultManagerFactory.class.getName());
 
-  public ConnManager accept(ImportOptions options) {
+  public ConnManager accept(SqoopOptions options) {
     String manualDriver = options.getDriverClassName();
     if (manualDriver != null) {
       // User has manually specified JDBC implementation with --driver.

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/DirectPostgresqlManager.java Mon Dec  7 21:42:38 2009
@@ -34,14 +34,14 @@
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.io.SplittableBufferedWriter;
 import org.apache.hadoop.sqoop.util.AsyncSink;
 import org.apache.hadoop.sqoop.util.DirectImportUtils;
 import org.apache.hadoop.sqoop.util.ErrorableAsyncSink;
 import org.apache.hadoop.sqoop.util.ErrorableThread;
 import org.apache.hadoop.sqoop.util.Executor;
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.util.ImportException;
 import org.apache.hadoop.sqoop.util.JdbcUrl;
 import org.apache.hadoop.sqoop.util.LoggingAsyncSink;
 import org.apache.hadoop.sqoop.util.PerfCounters;
@@ -53,7 +53,7 @@
 public class DirectPostgresqlManager extends PostgresqlManager {
   public static final Log LOG = LogFactory.getLog(DirectPostgresqlManager.class.getName());
 
-  public DirectPostgresqlManager(final ImportOptions opts) {
+  public DirectPostgresqlManager(final SqoopOptions opts) {
     // Inform superclass that we're overriding import method via alt. constructor.
     super(opts, true);
   }
@@ -66,9 +66,9 @@
   static class PostgresqlAsyncSink extends ErrorableAsyncSink {
     private final SplittableBufferedWriter writer;
     private final PerfCounters counters;
-    private final ImportOptions options;
+    private final SqoopOptions options;
 
-    PostgresqlAsyncSink(final SplittableBufferedWriter w, final ImportOptions opts,
+    PostgresqlAsyncSink(final SplittableBufferedWriter w, final SqoopOptions opts,
         final PerfCounters ctrs) {
       this.writer = w;
       this.options = opts;
@@ -85,11 +85,11 @@
 
       private final SplittableBufferedWriter writer;
       private final InputStream stream;
-      private final ImportOptions options;
+      private final SqoopOptions options;
       private final PerfCounters counters;
 
       PostgresqlStreamThread(final InputStream is, final SplittableBufferedWriter w,
-          final ImportOptions opts, final PerfCounters ctrs) {
+          final SqoopOptions opts, final PerfCounters ctrs) {
         this.stream = is;
         this.writer = w;
         this.options = opts;
@@ -278,15 +278,15 @@
    * via COPY FILE TO STDOUT.
    */
   public void importTable(ImportJobContext context)
-    throws IOException, ImportError {
+    throws IOException, ImportException {
 
     String tableName = context.getTableName();
     String jarFile = context.getJarFile();
-    ImportOptions options = context.getOptions();
+    SqoopOptions options = context.getOptions();
 
     LOG.info("Beginning psql fast path import");
 
-    if (options.getFileLayout() != ImportOptions.FileLayout.TextFile) {
+    if (options.getFileLayout() != SqoopOptions.FileLayout.TextFile) {
       // TODO(aaron): Support SequenceFile-based load-in
       LOG.warn("File import layout" + options.getFileLayout()
           + " is not supported by");
@@ -323,7 +323,7 @@
       int port = JdbcUrl.getPort(connectString);
 
       if (null == databaseName) {
-        throw new ImportError("Could not determine database name");
+        throw new ImportException("Could not determine database name");
       }
 
       LOG.info("Performing import of table " + tableName + " from database " + databaseName);

Added: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ExportJobContext.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ExportJobContext.java?rev=888144&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ExportJobContext.java (added)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ExportJobContext.java Mon Dec  7 21:42:38 2009
@@ -0,0 +1,56 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.sqoop.manager;
+
+import org.apache.hadoop.sqoop.SqoopOptions;
+
+/**
+ * A set of parameters describing an export operation; this is passed to
+ * ConnManager.exportTable() as its argument.
+ */
+public class ExportJobContext {
+
+  private String tableName;
+  private String jarFile;
+  private SqoopOptions options;
+
+  public ExportJobContext(final String table, final String jar, final SqoopOptions opts) {
+    this.tableName = table;
+    this.jarFile = jar;
+    this.options = opts;
+  }
+
+  /** @return the name of the table to export. */
+  public String getTableName() {
+    return tableName;
+  }
+
+  /** @return the name of the jar file containing the user's compiled
+   * ORM classes to use during the export.
+   */
+  public String getJarFile() {
+    return jarFile;
+  }
+
+  /** @return the SqoopOptions configured by the user */
+  public SqoopOptions getOptions() {
+    return options;
+  }
+}
+

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/GenericJdbcManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/GenericJdbcManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/GenericJdbcManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/GenericJdbcManager.java Mon Dec  7 21:42:38 2009
@@ -24,7 +24,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 
 /**
  * Database manager that is connects to a generic JDBC-compliant
@@ -38,7 +38,7 @@
   private String jdbcDriverClass;
   private Connection connection;
 
-  public GenericJdbcManager(final String driverClass, final ImportOptions opts) {
+  public GenericJdbcManager(final String driverClass, final SqoopOptions opts) {
     super(opts);
 
     this.jdbcDriverClass = driverClass;

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/HsqldbManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/HsqldbManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/HsqldbManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/HsqldbManager.java Mon Dec  7 21:42:38 2009
@@ -21,7 +21,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 
 /**
  * Manages connections to hsqldb databases.
@@ -38,7 +38,7 @@
   // "PUBLIC";
   private static final String HSQL_SCHEMA_NAME = "PUBLIC";
 
-  public HsqldbManager(final ImportOptions opts) {
+  public HsqldbManager(final SqoopOptions opts) {
     super(DRIVER_CLASS, opts);
   }
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ImportJobContext.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ImportJobContext.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ImportJobContext.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ImportJobContext.java Mon Dec  7 21:42:38 2009
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.sqoop.manager;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 
 /**
  * A set of parameters describing an import operation; this is passed to
@@ -28,9 +28,9 @@
 
   private String tableName;
   private String jarFile;
-  private ImportOptions options;
+  private SqoopOptions options;
 
-  public ImportJobContext(final String table, final String jar, final ImportOptions opts) {
+  public ImportJobContext(final String table, final String jar, final SqoopOptions opts) {
     this.tableName = table;
     this.jarFile = jar;
     this.options = opts;
@@ -48,8 +48,8 @@
     return jarFile;
   }
 
-  /** @return the ImportOptions configured by the user */
-  public ImportOptions getOptions() {
+  /** @return the SqoopOptions configured by the user */
+  public SqoopOptions getOptions() {
     return options;
   }
 }

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/LocalMySQLManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/LocalMySQLManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/LocalMySQLManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/LocalMySQLManager.java Mon Dec  7 21:42:38 2009
@@ -34,7 +34,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.io.SplittableBufferedWriter;
 import org.apache.hadoop.sqoop.lib.FieldFormatter;
 import org.apache.hadoop.sqoop.lib.RecordParser;
@@ -42,7 +42,7 @@
 import org.apache.hadoop.sqoop.util.DirectImportUtils;
 import org.apache.hadoop.sqoop.util.ErrorableAsyncSink;
 import org.apache.hadoop.sqoop.util.ErrorableThread;
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.util.ImportException;
 import org.apache.hadoop.sqoop.util.JdbcUrl;
 import org.apache.hadoop.sqoop.util.LoggingAsyncSink;
 import org.apache.hadoop.sqoop.util.PerfCounters;
@@ -154,11 +154,11 @@
    */
   static class ReparsingAsyncSink extends ErrorableAsyncSink {
     private final SplittableBufferedWriter writer;
-    private final ImportOptions options;
+    private final SqoopOptions options;
     private final PerfCounters counters;
 
     ReparsingAsyncSink(final SplittableBufferedWriter w,
-        final ImportOptions opts, final PerfCounters ctrs) {
+        final SqoopOptions opts, final PerfCounters ctrs) {
       this.writer = w;
       this.options = opts;
       this.counters = ctrs;
@@ -174,12 +174,12 @@
           ReparsingStreamThread.class.getName());
 
       private final SplittableBufferedWriter writer;
-      private final ImportOptions options;
+      private final SqoopOptions options;
       private final InputStream stream;
       private final PerfCounters counters;
 
       ReparsingStreamThread(final InputStream is,
-          final SplittableBufferedWriter w, final ImportOptions opts,
+          final SplittableBufferedWriter w, final SqoopOptions opts,
           final PerfCounters ctrs) {
         this.writer = w;
         this.options = opts;
@@ -291,7 +291,7 @@
   }
 
 
-  public LocalMySQLManager(final ImportOptions options) {
+  public LocalMySQLManager(final SqoopOptions options) {
     super(options, false);
   }
 
@@ -343,15 +343,15 @@
    * the database and upload the files directly to HDFS.
    */
   public void importTable(ImportJobContext context)
-      throws IOException, ImportError {
+      throws IOException, ImportException {
 
     String tableName = context.getTableName();
     String jarFile = context.getJarFile();
-    ImportOptions options = context.getOptions();
+    SqoopOptions options = context.getOptions();
 
     LOG.info("Beginning mysqldump fast path import");
 
-    if (options.getFileLayout() != ImportOptions.FileLayout.TextFile) {
+    if (options.getFileLayout() != SqoopOptions.FileLayout.TextFile) {
       // TODO(aaron): Support SequenceFile-based load-in
       LOG.warn("File import layout " + options.getFileLayout()
           + " is not supported by");
@@ -370,7 +370,7 @@
     int port = JdbcUrl.getPort(connectString);
 
     if (null == databaseName) {
-      throw new ImportError("Could not determine database name");
+      throw new ImportException("Could not determine database name");
     }
 
     LOG.info("Performing import of table " + tableName + " from database " + databaseName);

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ManagerFactory.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ManagerFactory.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ManagerFactory.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/ManagerFactory.java Mon Dec  7 21:42:38 2009
@@ -18,7 +18,7 @@
 
 package org.apache.hadoop.sqoop.manager;
 
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 
 /**
  * Interface for factory classes for ConnManager implementations.
@@ -28,6 +28,6 @@
  * one such call returns a non-null ConnManager instance.
  */
 public abstract class ManagerFactory {
-  public abstract ConnManager accept(ImportOptions options);
+  public abstract ConnManager accept(SqoopOptions options);
 }
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/MySQLManager.java Mon Dec  7 21:42:38 2009
@@ -28,8 +28,8 @@
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.sqoop.ImportOptions;
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.util.ImportException;
 
 /**
  * Manages connections to MySQL databases
@@ -44,11 +44,11 @@
   // set to true after we warn the user that we can use direct fastpath.
   private static boolean warningPrinted = false;
 
-  public MySQLManager(final ImportOptions opts) {
+  public MySQLManager(final SqoopOptions opts) {
     super(DRIVER_CLASS, opts);
   }
 
-  protected MySQLManager(final ImportOptions opts, boolean ignored) {
+  protected MySQLManager(final SqoopOptions opts, boolean ignored) {
     // constructor used by subclasses to avoid the --direct warning.
     super(DRIVER_CLASS, opts);
   }
@@ -93,7 +93,7 @@
 
   @Override
   public void importTable(ImportJobContext context)
-        throws IOException, ImportError {
+        throws IOException, ImportException {
 
     // Check that we're not doing a MapReduce from localhost. If we are, point
     // out that we could use mysqldump.

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/OracleManager.java Mon Dec  7 21:42:38 2009
@@ -29,9 +29,9 @@
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.sqoop.ImportOptions;
+import org.apache.hadoop.sqoop.SqoopOptions;
 import org.apache.hadoop.sqoop.mapred.ImportJob;
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.util.ImportException;
 
 /**
  * Manages connections to Oracle databases.
@@ -44,7 +44,7 @@
   // driver class to ensure is loaded when making db connection.
   private static final String DRIVER_CLASS = "oracle.jdbc.OracleDriver";
 
-  public OracleManager(final ImportOptions opts) {
+  public OracleManager(final SqoopOptions opts) {
     super(DRIVER_CLASS, opts);
   }
 
@@ -91,11 +91,11 @@
    * because DataDrivenDBInputFormat does not currently work with Oracle.
    */
   public void importTable(ImportJobContext context)
-      throws IOException, ImportError {
+      throws IOException, ImportException {
 
     String tableName = context.getTableName();
     String jarFile = context.getJarFile();
-    ImportOptions options = context.getOptions();
+    SqoopOptions options = context.getOptions();
     ImportJob importer = new ImportJob(options);
     String splitCol = options.getSplitByCol();
     if (null == splitCol) {
@@ -105,7 +105,7 @@
 
     if (null == splitCol) {
       // Can't infer a primary key.
-      throw new ImportError("No primary key could be found for table " + tableName
+      throw new ImportException("No primary key could be found for table " + tableName
           + ". Please specify one with --split-by.");
     }
 

Modified: hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java?rev=888144&r1=888143&r2=888144&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/sqoop/src/java/org/apache/hadoop/sqoop/manager/PostgresqlManager.java Mon Dec  7 21:42:38 2009
@@ -28,8 +28,8 @@
 import org.apache.commons.logging.LogFactory;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.sqoop.ImportOptions;
-import org.apache.hadoop.sqoop.util.ImportError;
+import org.apache.hadoop.sqoop.SqoopOptions;
+import org.apache.hadoop.sqoop.util.ImportException;
 
 /**
  * Manages connections to Postgresql databases
@@ -46,11 +46,11 @@
   // set to true after we warn the user that we can use direct fastpath.
   private static boolean warningPrinted = false;
 
-  public PostgresqlManager(final ImportOptions opts) {
+  public PostgresqlManager(final SqoopOptions opts) {
     super(DRIVER_CLASS, opts);
   }
 
-  protected PostgresqlManager(final ImportOptions opts, boolean ignored) {
+  protected PostgresqlManager(final SqoopOptions opts, boolean ignored) {
     // constructor used by subclasses to avoid the --direct warning.
     super(DRIVER_CLASS, opts);
   }
@@ -72,7 +72,7 @@
 
   @Override
   public void importTable(ImportJobContext context)
-        throws IOException, ImportError {
+        throws IOException, ImportException {
 
     // The user probably should have requested --direct to invoke pg_dump.
     // Display a warning informing them of this fact.