You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hbase.apache.org by st...@apache.org on 2015/05/11 19:03:53 UTC

hbase git commit: HBASE-13599 The Example Provided in Section 69: Examples of the Documentation Does Not Compile

Repository: hbase
Updated Branches:
  refs/heads/master e1628106a -> 671ae8f15


HBASE-13599 The Example Provided in Section 69: Examples of the
Documentation Does Not Compile

Reapply after fixing the commit message.

Signed-off-by: stack <st...@apache.org>


Project: http://git-wip-us.apache.org/repos/asf/hbase/repo
Commit: http://git-wip-us.apache.org/repos/asf/hbase/commit/671ae8f1
Tree: http://git-wip-us.apache.org/repos/asf/hbase/tree/671ae8f1
Diff: http://git-wip-us.apache.org/repos/asf/hbase/diff/671ae8f1

Branch: refs/heads/master
Commit: 671ae8f150ceeb24fcc99743c865fccfb772b130
Parents: e162810
Author: Lars Francke <la...@gmail.com>
Authored: Thu Apr 30 09:37:49 2015 +0200
Committer: stack <st...@apache.org>
Committed: Mon May 11 10:03:19 2015 -0700

----------------------------------------------------------------------
 src/main/asciidoc/_chapters/hbase_apis.adoc | 109 +++++++++++------------
 1 file changed, 53 insertions(+), 56 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hbase/blob/671ae8f1/src/main/asciidoc/_chapters/hbase_apis.adoc
----------------------------------------------------------------------
diff --git a/src/main/asciidoc/_chapters/hbase_apis.adoc b/src/main/asciidoc/_chapters/hbase_apis.adoc
index 85dbad1..6d2777b 100644
--- a/src/main/asciidoc/_chapters/hbase_apis.adoc
+++ b/src/main/asciidoc/_chapters/hbase_apis.adoc
@@ -36,102 +36,99 @@ See <<external_apis>> for more information.
 
 == Examples
 
-.Create a Table Using Java
+.Create, modify and delete a Table Using Java
 ====
 
 [source,java]
 ----
 package com.example.hbase.admin;
 
+package util;
+
 import java.io.IOException;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hbase.HBaseConfiguration;
 import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.hbase.HConstants;
 import org.apache.hadoop.hbase.HTableDescriptor;
 import org.apache.hadoop.hbase.TableName;
 import org.apache.hadoop.hbase.client.Admin;
+import org.apache.hadoop.hbase.client.Connection;
+import org.apache.hadoop.hbase.client.ConnectionFactory;
 import org.apache.hadoop.hbase.io.compress.Compression.Algorithm;
-import org.apache.hadoop.conf.Configuration;
 
-import static com.example.hbase.Constants.*;
+public class Example {
 
-public class CreateSchema {
+  private static final String TABLE_NAME = "MY_TABLE_NAME_TOO";
+  private static final String CF_DEFAULT = "DEFAULT_COLUMN_FAMILY";
 
   public static void createOrOverwrite(Admin admin, HTableDescriptor table) throws IOException {
-    if (admin.tableExists(table.getName())) {
-      admin.disableTable(table.getName());
-      admin.deleteTable(table.getName());
+    if (admin.tableExists(table.getTableName())) {
+      admin.disableTable(table.getTableName());
+      admin.deleteTable(table.getTableName());
     }
     admin.createTable(table);
   }
 
-  public static void createSchemaTables (Configuration config) {
-    try {
-      final Admin admin = new Admin(config);
+  public static void createSchemaTables(Configuration config) throws IOException {
+    try (Connection connection = ConnectionFactory.createConnection(config);
+         Admin admin = connection.getAdmin()) {
+
       HTableDescriptor table = new HTableDescriptor(TableName.valueOf(TABLE_NAME));
       table.addFamily(new HColumnDescriptor(CF_DEFAULT).setCompressionType(Algorithm.SNAPPY));
 
       System.out.print("Creating table. ");
       createOrOverwrite(admin, table);
       System.out.println(" Done.");
-
-      admin.close();
-    } catch (Exception e) {
-      e.printStackTrace();
-      System.exit(-1);
     }
   }
 
-}
-----
-====
-
-.Add, Modify, and Delete a Table
-====
-
-[source,java]
-----
-public static void upgradeFrom0 (Configuration config) {
-
-  try {
-    final Admin admin = new Admin(config);
-    TableName tableName = TableName.valueOf(TABLE_ASSETMETA);
-    HTableDescriptor table_assetmeta = new HTableDescriptor(tableName);
-    table_assetmeta.addFamily(new HColumnDescriptor(CF_DEFAULT).setCompressionType(Algorithm.SNAPPY));
+  public static void modifySchema (Configuration config) throws IOException {
+    try (Connection connection = ConnectionFactory.createConnection(config);
+         Admin admin = connection.getAdmin()) {
 
-    // Create a new table.
+      TableName tableName = TableName.valueOf(TABLE_NAME);
+      if (admin.tableExists(tableName)) {
+        System.out.println("Table does not exist.");
+        System.exit(-1);
+      }
 
-    System.out.print("Creating table_assetmeta. ");
-    admin.createTable(table_assetmeta);
-    System.out.println(" Done.");
+      HTableDescriptor table = new HTableDescriptor(tableName);
 
-    // Update existing table
-    HColumnDescriptor newColumn = new HColumnDescriptor("NEWCF");
-    newColumn.setCompactionCompressionType(Algorithm.GZ);
-    newColumn.setMaxVersions(HConstants.ALL_VERSIONS);
-    admin.addColumn(tableName, newColumn);
+      // Update existing table
+      HColumnDescriptor newColumn = new HColumnDescriptor("NEWCF");
+      newColumn.setCompactionCompressionType(Algorithm.GZ);
+      newColumn.setMaxVersions(HConstants.ALL_VERSIONS);
+      admin.addColumn(tableName, newColumn);
 
-    // Update existing column family
-    HColumnDescriptor existingColumn = new HColumnDescriptor(CF_DEFAULT);
-    existingColumn.setCompactionCompressionType(Algorithm.GZ);
-    existingColumn.setMaxVersions(HConstants.ALL_VERSIONS);
-    table_assetmeta.modifyFamily(existingColumn)
-    admin.modifyTable(tableName, table_assetmeta);
+      // Update existing column family
+      HColumnDescriptor existingColumn = new HColumnDescriptor(CF_DEFAULT);
+      existingColumn.setCompactionCompressionType(Algorithm.GZ);
+      existingColumn.setMaxVersions(HConstants.ALL_VERSIONS);
+      table.modifyFamily(existingColumn);
+      admin.modifyTable(tableName, table);
 
-    // Disable an existing table
-    admin.disableTable(tableName);
+      // Disable an existing table
+      admin.disableTable(tableName);
 
-    // Delete an existing column family
-    admin.deleteColumn(tableName, CF_DEFAULT);
+      // Delete an existing column family
+      admin.deleteColumn(tableName, CF_DEFAULT.getBytes("UTF-8"));
 
-    // Delete a table (Need to be disabled first)
-    admin.deleteTable(tableName);
+      // Delete a table (Need to be disabled first)
+      admin.deleteTable(tableName);
+    }
+  }
 
+  public static void main(String... args) throws IOException {
+    Configuration config = HBaseConfiguration.create();
 
-    admin.close();
-  } catch (Exception e) {
-    e.printStackTrace();
-    System.exit(-1);
+    //Add any necessary configuration files (hbase-site.xml, core-site.xml)
+    config.addResource(new Path(System.getenv("HBASE_CONF_DIR"), "hbase-site.xml"));
+    config.addResource(new Path(System.getenv("HADOOP_CONF_DIR"), "core-site.xml"));
+    createSchemaTables(config);
+    modifySchema(config);
   }
 }
 ----