You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by st...@apache.org on 2007/09/13 06:01:07 UTC
svn commit: r575156 [1/3] - in /lucene/hadoop/trunk/src/contrib/hbase: ./
src/java/org/apache/hadoop/hbase/ src/java/org/apache/hadoop/hbase/shell/
src/java/org/apache/hadoop/hbase/shell/generated/
src/test/org/apache/hadoop/hbase/ src/test/org/apache/...
Author: stack
Date: Wed Sep 12 21:01:05 2007
New Revision: 575156
URL: http://svn.apache.org/viewvc?rev=575156&view=rev
Log:
HADOOP-1720 Addition of HQL (Hbase Query Language) support in Hbase Shell
Added:
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/TestConsoleTable.java
Modified:
lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SelectCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ShowCommand.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/Parser.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserConstants.java
lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/generated/ParserTokenManager.java
lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/TestToString.java
lucene/hadoop/trunk/src/contrib/hbase/src/test/org/apache/hadoop/hbase/shell/TestHBaseShell.java
Modified: lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/CHANGES.txt Wed Sep 12 21:01:05 2007
@@ -13,6 +13,11 @@
older than passed timestamp. Fixed compaction so deleted cells
do not make it out into compacted output. Ensure also that
versions > column max are dropped compacting.
+ HADOOP-1720 Addition of HQL (Hbase Query Language) support in Hbase Shell.
+ The old shell syntax has been replaced by HQL, a small SQL-like
+ set of operators, for creating, altering, dropping, inserting,
+ deleting, and selecting, etc., data in hbase.
+ (Inchul Song and Edward Yoon via Stack)
OPTIMIZATIONS
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HColumnDescriptor.java Wed Sep 12 21:01:05 2007
@@ -214,10 +214,13 @@
/** {@inheritDoc} */
@Override
public String toString() {
- return "(" + name + ", max versions: " + maxVersions + ", compression: "
- + this.compressionType + ", in memory: " + inMemory +
- ", max value length: " + maxValueLength + ", bloom filter: "
- + (bloomFilterSpecified ? bloomFilter.toString() : "none") + ")";
+ // Output a name minus ':'.
+ String tmp = name.toString();
+ return "{name: " + tmp.substring(0, tmp.length() - 1) +
+ ", max versions: " + maxVersions +
+ ", compression: " + this.compressionType + ", in memory: " + inMemory +
+ ", max length: " + maxValueLength + ", bloom filter: " +
+ (bloomFilterSpecified ? bloomFilter.toString() : "none") + "}";
}
/** {@inheritDoc} */
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HRegiondirReader.java Wed Sep 12 21:01:05 2007
@@ -171,7 +171,7 @@
HRegion r = new HRegion(this.parentdir, null,
FileSystem.get(this.conf), conf, info, null);
Text [] families = info.tableDesc.families().keySet().toArray(
- new Text [info.tableDesc.families.size()]);
+ new Text [info.tableDesc.getFamilies().size()]);
HInternalScannerInterface scanner =
r.getScanner(families, new Text(), System.currentTimeMillis(), null);
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/HTableDescriptor.java Wed Sep 12 21:01:05 2007
@@ -22,8 +22,10 @@
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
+import java.util.Collections;
import java.util.Iterator;
import java.util.Map;
+import java.util.SortedMap;
import java.util.TreeMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@@ -36,8 +38,9 @@
* column families.
*/
public class HTableDescriptor implements WritableComparable {
- Text name;
- TreeMap<Text, HColumnDescriptor> families;
+ private Text name;
+ // TODO: Does this need to be a treemap? Can it be a HashMap?
+ private final TreeMap<Text, HColumnDescriptor> families;
/*
* Legal table names can only contain 'word characters':
@@ -182,5 +185,12 @@
}
}
return result;
+ }
+
+ /**
+ * @return Immutable sorted map of families.
+ */
+ public SortedMap<Text, HColumnDescriptor> getFamilies() {
+ return Collections.unmodifiableSortedMap(this.families);
}
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/Shell.java Wed Sep 12 21:01:05 2007
@@ -34,18 +34,19 @@
/**
* An hbase shell.
*
- * @see <a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseShell">HBaseShell</a>
+ * @see <a href="http://wiki.apache.org/lucene-hadoop/Hbase/HbaseShell">HbaseShell</a>
*/
public class Shell {
/** audible keyboard bells */
public static final boolean DEFAULT_BELL_ENABLED = true;
- /** Main method
- *
+ /**
+ * Main method
* @param args not used
* @throws IOException
*/
- public static void main(@SuppressWarnings("unused") String args[]) throws IOException {
+ public static void main(@SuppressWarnings("unused") String args[])
+ throws IOException {
Configuration conf = new HBaseConfiguration();
ConsoleReader reader = new ConsoleReader();
reader.setBellEnabled(conf.getBoolean("hbaseshell.jline.bell.enabled",
@@ -73,7 +74,6 @@
}
long end = System.currentTimeMillis();
-
if (rs != null && rs.getType() > -1)
System.out.println(rs.getMsg()
+ executeTime((rs.getType() == 1), start, end));
@@ -92,7 +92,7 @@
/** Return the string of prompt start string */
private static String getPrompt(final StringBuilder queryStr) {
- return (queryStr.toString().equals("")) ? "HBase > " : " --> ";
+ return (queryStr.toString().equals("")) ? "Hbase > " : " --> ";
}
/**
@@ -105,4 +105,4 @@
"(" + String.format("%.2f", (end - start) * 0.001) + " sec)" :
"";
}
-}
+}
\ No newline at end of file
Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java?rev=575156&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/AlterCommand.java Wed Sep 12 21:01:05 2007
@@ -0,0 +1,127 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.io.Text;
+
+/**
+ * Alters tables.
+ */
+public class AlterCommand extends SchemaModificationCommand {
+
+ public enum OperationType {ADD, DROP, CHANGE, NOOP}
+ private OperationType operationType = OperationType.NOOP;
+ private Map<String, Map<String, Object>> columnSpecMap =
+ new HashMap<String, Map<String, Object>>();
+ private String table;
+ private String column; // column to be dropped
+
+ public ReturnMsg execute(Configuration conf) {
+ try {
+ HBaseAdmin admin = new HBaseAdmin(conf);
+ Set<String> columns = null;
+ HColumnDescriptor columnDesc = null;
+ switch (operationType) {
+ case ADD:
+ disableTable(admin, table);
+ columns = columnSpecMap.keySet();
+ for (String c : columns) {
+ columnDesc = getColumnDescriptor(c, columnSpecMap.get(c));
+ System.out.println("Adding " + c + " to " + table +
+ "... Please wait.");
+ admin.addColumn(new Text(table), columnDesc);
+ }
+ enableTable(admin, table);
+ break;
+ case DROP:
+ disableTable(admin, table);
+ System.out.println("Dropping " + column + " from " + table +
+ "... Please wait.");
+ column = appendDelimiter(column);
+ admin.deleteColumn(new Text(table), new Text(column));
+ enableTable(admin, table);
+ break;
+ case CHANGE:
+ // Not yet supported
+ return new ReturnMsg(0, "" + operationType + " is not yet supported.");
+ case NOOP:
+ return new ReturnMsg(0, "Invalid operation type.");
+ }
+ return new ReturnMsg(0, "Table altered successfully.");
+ } catch (Exception e) {
+ return new ReturnMsg(0, extractErrMsg(e));
+ }
+ }
+
+ private void disableTable(HBaseAdmin admin, String t) throws IOException {
+ System.out.println("Disabling " + t + "... Please wait.");
+ admin.disableTable(new Text(t));
+ }
+
+ private void enableTable(HBaseAdmin admin, String t) throws IOException {
+ System.out.println("Enabling " + t + "... Please wait.");
+ admin.enableTable(new Text(t));
+ }
+
+ /**
+ * Sets the table to be altered.
+ *
+ * @param t Table to be altered.
+ */
+ public void setTable(String t) {
+ this.table = t;
+ }
+
+ /**
+ * Adds a column specification.
+ *
+ * @param columnSpec Column specification
+ */
+ public void addColumnSpec(String c, Map<String, Object> columnSpec) {
+ columnSpecMap.put(c, columnSpec);
+ }
+
+ /**
+ * Sets the column to be dropped. Only applicable to the DROP operation.
+ *
+ * @param c Column to be dropped.
+ */
+ public void setColumn(String c) {
+ this.column = c;
+ }
+
+ /**
+ * Sets the operation type of this alteration.
+ *
+ * @param operationType Operation type
+ * @see OperationType
+ */
+ public void setOperationType(OperationType operationType) {
+ this.operationType = operationType;
+ }
+}
\ No newline at end of file
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/BasicCommand.java Wed Sep 12 21:01:05 2007
@@ -33,4 +33,22 @@
return this;
}
+ protected String extractErrMsg(String msg) {
+ int index = msg.indexOf(":");
+ int eofIndex = msg.indexOf("\n");
+ return msg.substring(index + 1, eofIndex);
+ }
+
+ protected String extractErrMsg(Exception e) {
+ return extractErrMsg(e.getMessage());
+ }
+
+ /**
+ * Appends, if it does not exist, a delimiter (colon)
+ * at the end of the column name.
+ */
+ protected String appendDelimiter(String column) {
+ return (!column.endsWith(FAMILY_INDICATOR))?
+ column + FAMILY_INDICATOR: column;
+ }
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ClearCommand.java Wed Sep 12 21:01:05 2007
@@ -23,9 +23,11 @@
import org.apache.hadoop.conf.Configuration;
+/**
+ * Clears the console screen.
+ */
public class ClearCommand extends BasicCommand {
-
- public ReturnMsg execute(Configuration conf) {
+ public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
clear();
return null;
}
@@ -42,5 +44,4 @@
System.out.print("\033c");
}
}
-
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/Command.java Wed Sep 12 21:01:05 2007
@@ -22,11 +22,12 @@
import org.apache.hadoop.conf.Configuration;
public interface Command {
-
/** family indicator */
public static final String FAMILY_INDICATOR = ":";
- /** Execute a command */
+ /** Execute a command
+ * @param conf Configuration
+ * @return Result of command execution
+ */
public ReturnMsg execute(Configuration conf);
-
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CommandFactory.java Wed Sep 12 21:01:05 2007
@@ -23,7 +23,5 @@
* Parser uses command factories to create command.
*/
public interface CommandFactory {
-
Command getCommand();
-
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ConsoleTable.java Wed Sep 12 21:01:05 2007
@@ -26,7 +26,11 @@
* Manufactures console table, but stupid.
*/
public class ConsoleTable {
+
private static PrintStream out;
+ private static final String sBar = "+------+----------------------+";
+ private static final String lBar = "----------------------+----------------------+";
+
static {
try {
out = new PrintStream(System.out, true, "UTF-8");
@@ -36,29 +40,24 @@
}
public static void printHead(String name) {
- out.println("+------+----------------------+");
+ out.println(sBar);
out.print("| No. | ");
- out.printf("%-20s", name);
- out.println(" |");
+ printCell(name, " |", true);
}
public static void printFoot() {
- out.println("+------+----------------------+");
+ out.println(sBar);
out.println();
}
public static void printTable(int count, String name) {
- out.println("+------+----------------------+");
-
+ out.println(sBar);
if (name.length() > 20) {
int interval = 20;
-
out.print("| ");
- out.printf("%-4s", count + 1);
+ out.printf("%-4s", Integer.valueOf(count + 1));
out.print(" | ");
- out.printf("%-20s", name.substring(0, interval));
- out.println(" |");
-
+ printCell(name.substring(0, interval), " |", true);
for (int i = 0; i < name.length() / interval; i++) {
out.print("| ");
out.printf("%-4s", "");
@@ -66,64 +65,42 @@
int end = ((interval * i) + interval + interval);
if (end > name.length()) {
- out.printf("%-20s", name.substring(end - interval,
- name.length()));
+ printCell(name.substring(end - interval, name.length()), " |", true);
} else {
- out.printf("%-20s", name.substring(end - interval, end));
+ printCell(name.substring(end - interval, end), " |", true);
}
- out.println(" |");
}
-
} else {
out.print("| ");
- out.printf("%-4s", count + 1);
+ out.printf("%-4s", Integer.valueOf(count + 1));
out.print(" | ");
- out.printf("%-20s", name);
- out.println(" |");
+ printCell(name, " |", true);
}
}
public static void selectHead() {
- out.println("+------+----------------------+" +
- "----------------------+----------------------+");
+ out.println(sBar + lBar);
out.print("| No. | ");
- out.printf("%-20s", "Row");
- out.printf(" | ");
- out.printf("%-20s", "Column");
- out.printf(" | ");
- out.printf("%-20s", "Cell");
- out.println(" | ");
+ printCell("Row", " | ", false);
+ printCell("Column", " | ", false);
+ printCell("Cell", " | ", true);
}
public static void printLine(int count, String key, String column,
String cellData) {
- out.println("+------+----------------------+" +
- "----------------------+----------------------+");
-
+ out.println(sBar + lBar);
if (key.length() > 20 || column.length() > 20 || cellData.length() > 20) {
int interval = 20;
out.print("| ");
- out.printf("%-4s", count + 1);
- out.print(" | ");
- if (key.length() > 20)
- out.printf("%-20s", key.substring(0, interval));
- else
- out.printf("%-20s", key);
+ out.printf("%-4s", Integer.valueOf(count + 1));
out.print(" | ");
- if (column.length() > 20)
- out.printf("%-20s", column.substring(0, interval));
- else
- out.printf("%-20s", column);
- out.print(" | ");
- if (cellData.length() > 20)
- out.printf("%-20s", cellData.substring(0, interval));
- else
- out.printf("%-20s", cellData);
- out.println(" |");
- // out.println(getBiggerInt(new int[]{ 3, 1, 9}));
+ printLongCell(key, interval);
+ printLongCell(column, interval);
+ printLongCell(cellData, interval);
+
int biggerStrLength = getBiggerInt(new int[] { key.length(),
- column.length(), cellData.length() });
+ column.length(), cellData.length() });
for (int i = 0; i < (biggerStrLength / interval); i++) {
out.print("| ");
@@ -132,58 +109,50 @@
int end = ((interval * i) + interval + interval);
- if (end > key.length()) {
- if (key.length() > interval && end - interval < key.length()) {
- out.printf("%-20s", key.substring(end - interval,
- key.length()));
- } else {
- out.printf("%-20s", "");
- }
- } else {
- out.printf("%-20s", key.substring(end - interval, end));
- }
-
- out.print(" | ");
-
- if (end > column.length()) {
- if (column.length() > interval && end - interval < column.length()) {
- out.printf("%-20s", column.substring(end - interval,
- column.length()));
- } else {
- out.printf("%-20s", "");
- }
- } else {
- out.printf("%-20s", column.substring(end - interval, end));
- }
-
- out.print(" | ");
- if (end > cellData.length()) {
- if (cellData.length() > interval &&
- end - interval < cellData.length()) {
- out.printf("%-20s",
- cellData.substring(end - interval, cellData.length()));
- } else {
- out.printf("%-20s", "");
- }
- } else {
- out.printf("%-20s", cellData.substring(end - interval, end));
- }
- out.println(" |");
+ printLongCellData(key, end, interval, false);
+ printLongCellData(column, end, interval, false);
+ printLongCellData(cellData, end, interval, false);
}
-
} else {
out.print("| ");
- out.printf("%-4s", count + 1);
- out.print(" | ");
- out.printf("%-20s", key);
+ out.printf("%-4s", Integer.valueOf(count + 1));
out.print(" | ");
- out.printf("%-20s", column);
- out.print(" | ");
- out.printf("%-20s", cellData);
- out.println(" |");
+ printCell(key, " | ", false);
+ printCell(column, " | ", false);
+ printCell(cellData, " |", true);
}
}
+ private static void printLongCellData(String key, int end, int interval,
+ boolean newLine) {
+ if (end > key.length()) {
+ if (key.length() > interval && end - interval < key.length()) {
+ out.printf("%-20s", key.substring(end - interval, key.length()));
+ } else {
+ out.printf("%-20s", "");
+ }
+ } else {
+ out.printf("%-20s", key.substring(end - interval, end));
+ }
+ out.print(" | ");
+ if (newLine)
+ out.println();
+ }
+
+ private static void printLongCell(String iKey, int interval) {
+ if (iKey.length() > 20)
+ printCell(iKey.substring(0, interval), " | ", true);
+ else
+ printCell(iKey, " | ", true);
+ }
+
+ private static void printCell(String data, String end, boolean newLine) {
+ out.printf("%-20s", data);
+ out.printf(end);
+ if (newLine)
+ out.println();
+ }
+
public static int getBiggerInt(int[] integers) {
int result = -1;
for (int i = 0; i < integers.length; i++) {
@@ -195,9 +164,7 @@
}
public static void selectFoot() {
- out.println("+------+----------------------+" +
- "----------------------+----------------------+");
+ out.println(sBar + lBar);
out.println();
}
-
-}
+}
\ No newline at end of file
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/CreateCommand.java Wed Sep 12 21:01:05 2007
@@ -19,60 +19,58 @@
*/
package org.apache.hadoop.hbase.shell;
-import java.io.IOException;
-import java.util.List;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Set;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HColumnDescriptor;
-import org.apache.hadoop.hbase.HConnection;
-import org.apache.hadoop.hbase.HConnectionManager;
import org.apache.hadoop.hbase.HTableDescriptor;
-import org.apache.hadoop.io.Text;
-public class CreateCommand extends BasicCommand {
+/**
+ * Creates tables.
+ */
+public class CreateCommand extends SchemaModificationCommand {
- private Text table;
- private List<String> columnfamilies;
- @SuppressWarnings("unused")
- private int limit;
+ private String tableName;
+ private Map<String, Map<String, Object>> columnSpecMap =
+ new HashMap<String, Map<String, Object>>();
public ReturnMsg execute(Configuration conf) {
- if (this.table == null || this.columnfamilies == null)
- return new ReturnMsg(0, "Syntax error : Please check 'Create' syntax.");
-
try {
- HConnection conn = HConnectionManager.getConnection(conf);
HBaseAdmin admin = new HBaseAdmin(conf);
-
- if (conn.tableExists(this.table)) {
- return new ReturnMsg(0, "Table was already exsits.");
- }
- HTableDescriptor desc = new HTableDescriptor(this.table.toString());
- for (int i = 0; i < this.columnfamilies.size(); i++) {
- String columnFamily = columnfamilies.get(i);
- if (columnFamily.lastIndexOf(':') == (columnFamily.length() - 1)) {
- columnFamily = columnFamily.substring(0, columnFamily.length() - 1);
- }
- desc.addFamily(new HColumnDescriptor(columnFamily + FAMILY_INDICATOR));
+ HTableDescriptor tableDesc = new HTableDescriptor(tableName);
+ HColumnDescriptor columnDesc = null;
+ Set<String> columns = columnSpecMap.keySet();
+ for (String column : columns) {
+ columnDesc = getColumnDescriptor(column, columnSpecMap.get(column));
+ tableDesc.addFamily(columnDesc);
}
- admin.createTable(desc);
- return new ReturnMsg(1, "Table created successfully.");
- } catch (IOException e) {
- return new ReturnMsg(0, "error msg : " + e.toString());
+
+ System.out.println("Creating table... Please wait.");
+
+ admin.createTable(tableDesc);
+ return new ReturnMsg(0, "Table created successfully.");
+ }
+ catch (Exception e) {
+ return new ReturnMsg(0, extractErrMsg(e));
}
}
+ /**
+ * Sets the table to be created.
+ * @param table Table to be created
+ */
public void setTable(String table) {
- this.table = new Text(table);
- }
-
- public void setColumnfamilies(List<String> columnfamilies) {
- this.columnfamilies = columnfamilies;
+ this.tableName = table;
}
- public void setLimit(int limit) {
- this.limit = limit;
- }
-
-}
+ /**
+ * Adds a column specification.
+ * @param columnSpec Column specification
+ */
+ public void addColumnSpec(String column, Map<String, Object> columnSpec) {
+ columnSpecMap.put(column, columnSpec);
+ }
+}
\ No newline at end of file
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DeleteCommand.java Wed Sep 12 21:01:05 2007
@@ -20,64 +20,84 @@
package org.apache.hadoop.hbase.shell;
import java.io.IOException;
+import java.util.ArrayList;
import java.util.List;
-import java.util.Map;
-import java.util.Set;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.hbase.HTable;
import org.apache.hadoop.io.Text;
+/**
+ * Deletes values from tables.
+ */
public class DeleteCommand extends BasicCommand {
- private Text table;
- private Map<String, List<String>> condition;
+ private String tableName;
+ private String rowKey;
+ private List<String> columnList;
public ReturnMsg execute(Configuration conf) {
- if (this.table == null || condition == null)
- return new ReturnMsg(0, "Syntax error : Please check 'Delete' syntax.");
-
+ if (columnList == null) {
+ throw new IllegalArgumentException("Column list is null");
+ }
try {
- HTable table = new HTable(conf, this.table);
- long lockId = table.startUpdate(getRow());
-
- if (getColumn() != null) {
- table.delete(lockId, getColumn());
- } else {
- Set<Text> keySet = table.getRow(getRow()).keySet();
- Text[] columnKey = keySet.toArray(new Text[keySet.size()]);
-
- for (int i = 0; i < columnKey.length; i++) {
- table.delete(lockId, columnKey[i]);
- }
+ HBaseAdmin admin = new HBaseAdmin(conf);
+ HTable hTable = new HTable(conf, new Text(tableName));
+ long lockID = hTable.startUpdate(new Text(rowKey));
+ for (Text column : getColumnList(admin, hTable)) {
+ hTable.delete(lockID, new Text(column));
}
-
- table.commit(lockId);
-
- return new ReturnMsg(1, "1 deleted successfully. ");
+ hTable.commit(lockID);
+ return new ReturnMsg(1, "Column(s) deleted successfully.");
} catch (IOException e) {
- return new ReturnMsg(0, "error msg : " + e.toString());
+ String[] msg = e.getMessage().split("[\n]");
+ return new ReturnMsg(0, msg[0]);
}
}
public void setTable(String table) {
- this.table = new Text(table);
+ this.tableName = table;
}
- public void setCondition(Map<String, List<String>> cond) {
- this.condition = cond;
+ public void setRow(String row) {
+ this.rowKey = row;
}
- public Text getRow() {
- return new Text(this.condition.get("row").get(1));
+ /**
+ * Sets the column list.
+ * @param columnList
+ */
+ public void setColumnList(List<String> columnList) {
+ this.columnList = columnList;
}
- public Text getColumn() {
- if (this.condition.containsKey("column")) {
- return new Text(this.condition.get("column").get(1));
- } else {
- return null;
+ /**
+ * @param admin
+ * @param hTable
+ * @return return the column list.
+ */
+ public Text[] getColumnList(HBaseAdmin admin, HTable hTable) {
+ Text[] columns = null;
+ try {
+ if (this.columnList.contains("*")) {
+ columns = hTable.getRow(new Text(this.rowKey)).keySet().toArray(new Text[] {});
+ } else {
+ List<Text> tmpList = new ArrayList<Text>();
+ for (int i = 0; i < this.columnList.size(); i++) {
+ Text column = null;
+ if (this.columnList.get(i).contains(":"))
+ column = new Text(this.columnList.get(i));
+ else
+ column = new Text(this.columnList.get(i) + ":");
+
+ tmpList.add(column);
+ }
+ columns = tmpList.toArray(new Text[] {});
+ }
+ } catch (IOException e) {
+ e.printStackTrace();
}
+ return columns;
}
-
-}
+}
\ No newline at end of file
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DescCommand.java Wed Sep 12 21:01:05 2007
@@ -22,42 +22,42 @@
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConnection;
import org.apache.hadoop.hbase.HConnectionManager;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.io.Text;
+/**
+ * Prints information about tables.
+ */
public class DescCommand extends BasicCommand {
- private Text table;
+ private Text tableName;
public ReturnMsg execute(Configuration conf) {
- if (this.table == null)
+ if (this.tableName == null)
return new ReturnMsg(0, "Syntax error : Please check 'Describe' syntax.");
-
try {
HConnection conn = HConnectionManager.getConnection(conf);
-
- if (!conn.tableExists(this.table)) {
+ if (!conn.tableExists(this.tableName)) {
return new ReturnMsg(0, "Table not found.");
}
-
- HTableDescriptor[] tables = conn.listTables();
- Text[] columns = null;
-
+ HTableDescriptor [] tables = conn.listTables();
+ HColumnDescriptor [] columns = null;
for (int i = 0; i < tables.length; i++) {
- if (tables[i].getName().equals(this.table)) {
- columns = tables[i].families().keySet().toArray(new Text[] {});
+ if (tables[i].getName().equals(this.tableName)) {
+ columns = tables[i].getFamilies().values().
+ toArray(new HColumnDescriptor [] {});
+ break;
}
}
-
- ConsoleTable.printHead("ColumnFamily Name");
+ ConsoleTable.printHead("ColumnFamily");
for (int ii = 0; ii < columns.length; ii++) {
- String familyName = columns[ii].toString().replace(FAMILY_INDICATOR, "");
- ConsoleTable.printTable(ii, familyName);
+ String tmp = columns[ii].toString();
+ ConsoleTable.printTable(ii, tmp.substring(1, tmp.length() - 1));
}
ConsoleTable.printFoot();
-
return new ReturnMsg(1, columns.length + " columnfamilie(s) found.");
} catch (IOException e) {
return new ReturnMsg(0, "error msg : " + e.toString());
@@ -65,7 +65,6 @@
}
public void setArgument(String table) {
- this.table = new Text(table);
- }
-
-}
+ this.tableName = new Text(table);
+ }
+}
\ No newline at end of file
Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java?rev=575156&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DisableCommand.java Wed Sep 12 21:01:05 2007
@@ -0,0 +1,51 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.io.Text;
+
+/**
+ * Disables tables.
+ */
+public class DisableCommand extends BasicCommand {
+ private String tableName;
+
+ public ReturnMsg execute(Configuration conf) {
+ assert tableName != null;
+
+ try {
+ HBaseAdmin admin = new HBaseAdmin(conf);
+ admin.disableTable(new Text(tableName));
+
+ return new ReturnMsg(1, "Table disabled successfully.");
+ } catch (IOException e) {
+ String[] msg = e.getMessage().split("[\n]");
+ return new ReturnMsg(0, msg[0]);
+ }
+ }
+
+ public void setTable(String table) {
+ this.tableName = table;
+ }
+}
\ No newline at end of file
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/DropCommand.java Wed Sep 12 21:01:05 2007
@@ -20,31 +20,38 @@
package org.apache.hadoop.hbase.shell;
import java.io.IOException;
+import java.util.List;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseAdmin;
import org.apache.hadoop.io.Text;
+/**
+ * Drops tables.
+ */
public class DropCommand extends BasicCommand {
-
- private Text table;
+ private List<String> tableList;
public ReturnMsg execute(Configuration conf) {
- if (this.table == null)
- return new ReturnMsg(0, "Syntax error : Please check 'Drop' syntax.");
-
+ if (tableList == null) {
+ throw new IllegalArgumentException("List of tables is null");
+ }
+
try {
HBaseAdmin admin = new HBaseAdmin(conf);
- admin.deleteTable(this.table);
- return new ReturnMsg(1, "Table droped successfully.");
+ for (String table : tableList) {
+ System.out.println("Dropping " + table + "... Please wait.");
+ admin.deleteTable(new Text(table));
+ }
+
+ return new ReturnMsg(1, "Table(s) dropped successfully.");
} catch (IOException e) {
- return new ReturnMsg(0, "error msg : " + e.toString());
+ return new ReturnMsg(0, extractErrMsg(e));
}
}
- public void setArgument(String table) {
- this.table = new Text(table);
+ public void setTableList(List<String> tableList) {
+ this.tableList = tableList;
}
-
-}
+}
\ No newline at end of file
Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java?rev=575156&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/EnableCommand.java Wed Sep 12 21:01:05 2007
@@ -0,0 +1,50 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hbase.HBaseAdmin;
+import org.apache.hadoop.io.Text;
+
+/**
+ * Enables tables.
+ */
+public class EnableCommand extends BasicCommand {
+ private String tableName;
+
+ public ReturnMsg execute(Configuration conf) {
+ assert tableName != null;
+ try {
+ HBaseAdmin admin = new HBaseAdmin(conf);
+ admin.enableTable(new Text(tableName));
+
+ return new ReturnMsg(1, "Table enabled successfully.");
+ } catch (IOException e) {
+ String[] msg = e.getMessage().split("[\n]");
+ return new ReturnMsg(0, msg[0]);
+ }
+ }
+
+ public void setTable(String table) {
+ this.tableName = table;
+ }
+}
\ No newline at end of file
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ExitCommand.java Wed Sep 12 21:01:05 2007
@@ -23,9 +23,10 @@
public class ExitCommand extends BasicCommand {
- public ReturnMsg execute(Configuration conf) {
+ public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
+ // TOD: Is this the best way to exit? Would be a problem if shell is run
+ // inside another program -- St.Ack 09/11/2007
System.exit(1);
return null;
}
-
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/FsCommand.java Wed Sep 12 21:01:05 2007
@@ -25,10 +25,13 @@
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.util.ToolRunner;
+/**
+ * Run hadoop filesystem commands.
+ */
public class FsCommand extends BasicCommand {
private List<String> query;
- public ReturnMsg execute(Configuration conf) {
+ public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
FsShell shell = new FsShell();
try {
ToolRunner.run(shell, getQuery());
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HBaseShell.jj Wed Sep 12 21:01:05 2007
@@ -4,6 +4,8 @@
}
PARSER_BEGIN(Parser)
+package org.apache.hadoop.hbase.shell.generated;
+
/**
* Copyright 2007 The Apache Software Foundation
*
@@ -23,7 +25,6 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-package org.apache.hadoop.hbase.shell.generated;
import java.util.ArrayList;
import java.util.List;
@@ -60,43 +61,72 @@
| "\n"
}
-TOKEN:
+TOKEN: /** for HQL statements */
{
<HELP: "help">
- | <CLEAR: "clear">
- | <SHOW: "show">
- | <DESCRIBE: "describe">
- | <CREATE: "create">
- | <DROP: "drop">
- | <FS: "fs">
- | <EXIT: "exit">
- | <INSERT: "insert">
- | <DELETE: "delete">
- | <SELECT: "select">
- | <ROW: "row">
- | <COLUMN: "column">
- | <TIME: "time">
- | <VALUES: "values">
- | <COLUMNFAMILIES: "columnfamilies">
- | <WHERE: "where">
- | <LIMIT: "limit">
- | <AND: "and">
- | <OR: "or">
- | <COMMA: ",">
- | <DOT: ".">
- | <LPAREN: "(">
- | <RPAREN: ")">
- | <EQUALS: "=">
- | <NOTEQUAL: "<>">
- | <OPTIONS: "-">
-}
-
-TOKEN :
-{
- <ID: ["a"-"z","A"-"Z","_","-",".","/"] ( ["a"-"z","A"-"Z","_","0"-"9","-",".","/"] )* >
- | <NUM: ( ["0"-"9"] )+ >
- | <STRING: (["A"-"Z","a"-"z","0"-"9"])+ >
- | <QUOTED_STRING: "\"" (~["\""])+ "\"" >
+ | <ALTER: "alter">
+ | <CLEAR: "clear">
+ | <SHOW: "show">
+ | <DESCRIBE: "describe">
+ | <DESC: "desc">
+ | <CREATE: "create">
+ | <DROP: "drop">
+ | <FS: "fs">
+ | <EXIT: "exit">
+ | <INSERT: "insert">
+ | <INTO: "into">
+ | <TABLE: "table">
+ | <DELETE: "delete">
+ | <SELECT: "select">
+ | <ENABLE: "enable">
+ | <DISABLE: "disable">
+ | <STARTING: "starting">
+ | <WHERE: "where">
+ | <FROM: "from">
+ | <ROW: "row">
+ | <VALUES: "values">
+ | <COLUMNFAMILIES: "columnfamilies">
+ | <TIMESTAMP: "timestamp">
+ | <NUM_VERSIONS: "num_versions">
+ | <LIMIT: "limit">
+ | <AND: "and">
+ | <OR: "or">
+ | <COMMA: ",">
+ | <DOT: ".">
+ | <LPAREN: "(">
+ | <RPAREN: ")">
+ | <EQUALS: "=">
+ | <NOTEQUAL: "<>">
+ | <ASTERISK: "*">
+ | <MAX_VERSIONS: "max_versions">
+ | <MAX_LENGTH: "max_length">
+ | <COMPRESSION: "compression">
+ | <NONE: "none">
+ | <BLOCK: "block">
+ | <RECORD: "record">
+ | <IN_MEMORY: "in_memory">
+ | <BLOOMFILTER: "bloomfilter">
+ | <COUNTING_BLOOMFILTER: "counting_bloomfilter">
+ | <RETOUCHED_BLOOMFILTER: "retouched_bloomfilter">
+ | <VECTOR_SIZE: "vector_size">
+ | <NUM_HASH: "num_hash">
+ | <NUM_ENTRIES: "num_entries">
+ | <ADD: "add">
+ | <CHANGE: "change">
+}
+
+TOKEN : /** Literals */
+{
+ <ID: (["A"-"Z","a"-"z","_","-",".",":","/"])+ >
+ | <INTEGER_LITERAL: (["0"-"9"])+ >
+ | <FLOATING_POINT_LITERAL:
+ (["0"-"9"])+ "." (["0"-"9"])+ (<EXPONENT>)?
+ | "." (["0"-"9"])+ (<EXPONENT>)?
+ | (["0"-"9"])+ <EXPONENT>
+ | (["0"-"9"])+ (<EXPONENT>)?
+ >
+ | <#EXPONENT: ["e","E"] (["+","-"])? (["0"-"9"])+ >
+ | <QUOTED_IDENTIFIER: "\"" (~["\""])+ "\"" >
| <STRING_LITERAL: "'" (~["'"])* ( "''" (~["'"])* )* "'" >
}
@@ -120,17 +150,20 @@
}
{
(
- cmd = exitCommand()
- | cmd = helpCommand()
- | cmd = showCommand()
- | cmd = descCommand()
- | cmd = createCommand()
- | cmd = dropCommand()
- | cmd = insertCommand()
- | cmd = deleteCommand()
- | cmd = selectCommand()
- | cmd = clearCommand()
- | cmd = fsCommand()
+ cmd = exitCommand()
+ | cmd = helpCommand()
+ | cmd = showCommand()
+ | cmd = descCommand()
+ | cmd = createCommand()
+ | cmd = dropCommand()
+ | cmd = alterCommand()
+ | cmd = insertCommand()
+ | cmd = deleteCommand()
+ | cmd = selectCommand()
+ | cmd = enableCommand()
+ | cmd = disableCommand()
+ | cmd = clearCommand()
+ | cmd = fsCommand()
)
{
return cmd;
@@ -182,6 +215,7 @@
| t=<INSERT>
| t=<DELETE>
| t=<SELECT>
+ | t=<ALTER>
| t=<CLEAR>
| t=<FS>
| t=<ID>
@@ -202,9 +236,8 @@
{
<SHOW>
[
- argument = getString()
+ argument = Identifier()
]
-
{
show.setArgument(argument);
return show;
@@ -217,265 +250,352 @@
String argument = null;
}
{
- <DESCRIBE>
- [
- argument = getString()
- ]
-
+ ( <DESCRIBE> | <DESC> )
+ argument = Identifier()
{
- desc.setArgument(argument);
- return desc;
+ desc.setArgument(argument);
+ return desc;
}
}
-CreateCommand createCommand() :
+Map<String, Object> ColumnSpec() :
{
- CreateCommand create = new CreateCommand();
- String argument = null;
- List<String> columnfamilies = null;
- int limit = 1;
+ Map<String, Object> columnSpec = new HashMap<String, Object>();
+ int n = -1;
+ Token t = null;
}
{
- <CREATE>
- argument = getString()
+ (
+ <MAX_VERSIONS>
+ <EQUALS> n = Number()
{
- create.setTable(argument);
+ columnSpec.put("MAX_VERSIONS", n);
}
-
- <COLUMNFAMILIES>
- columnfamilies = getLiteralValues()
+ |
+ <MAX_LENGTH>
+ <EQUALS> n = Number()
+ {
+ columnSpec.put("MAX_LENGTH", n);
+ }
+ |
+ <COMPRESSION>
+ <EQUALS>
+ ( t=<NONE>
+ | t=<BLOCK>
+ | t=<RECORD> )
{
- create.setColumnfamilies(columnfamilies);
+ columnSpec.put("COMPRESSION", t.image.toString());
}
+ |
+ <IN_MEMORY>
+ {
+ columnSpec.put("IN_MEMORY", true);
+ }
+ |
+ <BLOOMFILTER>
+ <EQUALS>
+ ( t=<BLOOMFILTER>
+ | t=<COUNTING_BLOOMFILTER>
+ | t=<RETOUCHED_BLOOMFILTER>
+ )
+ {
+ columnSpec.put("BLOOMFILTER", t.image.toString());
+ }
+ |
+ <VECTOR_SIZE>
+ <EQUALS> n = Number()
+ {
+ columnSpec.put("VECTOR_SIZE", n);
+ }
+ |
+ <NUM_HASH>
+ <EQUALS> n = Number()
+ {
+ columnSpec.put("NUM_HASH", n);
+ }
+ |
+ <NUM_ENTRIES> <EQUALS> n = Number()
+ {
+ columnSpec.put("NUM_ENTRIES", n);
+ }
+ )*
- [ <LIMIT><EQUALS> limit = getInt() {
- try{
- create.setLimit(limit);
- }catch(ClassCastException ce) {
- throw generateParseException();
- }
- } ]
- { return create; }
+ { return columnSpec; }
}
-DropCommand dropCommand() :
+CreateCommand createCommand() :
{
- DropCommand drop = new DropCommand();
- String argument = null;
+ CreateCommand createCommand = new CreateCommand();
+ String table = null;
+ Map<String, Object> columnSpec = null;
+ String column = null;
}
{
- <DROP>
- [
- argument = getString()
- ]
-
+ <CREATE>
+ <TABLE>
+ table = Identifier()
+ {
+ createCommand.setTable(table);
+ }
+
+ <LPAREN>
+
+ column = Identifier()
+ columnSpec = ColumnSpec()
{
- drop.setArgument(argument);
- return drop;
+ createCommand.addColumnSpec(column, columnSpec);
}
+
+ (
+ <COMMA>
+ column = Identifier()
+ columnSpec = ColumnSpec()
+ {
+ createCommand.addColumnSpec(column, columnSpec);
+ }
+ )*
+
+ <RPAREN>
+ { return createCommand; }
}
-InsertCommand insertCommand() :
+AlterCommand alterCommand() :
{
- InsertCommand in = new InsertCommand();
- Map<String, List<String>> cond = null;
- List<String> columnfamilies = null;
- List<String> values = null;
- String table = null;
+ AlterCommand alterCommand = new AlterCommand();
+ String table = null;
+ String column = null;
+ Map<String, Object> columnSpec = null;
}
{
- <INSERT>
- table = getString()
+ <ALTER>
+ <TABLE> table = Identifier()
+ { alterCommand.setTable(table); }
+
+ (
+ LOOKAHEAD(2)
+ <ADD> column = Identifier() columnSpec = ColumnSpec()
+ {
+ alterCommand.setOperationType(AlterCommand.OperationType.ADD);
+ alterCommand.addColumnSpec(column, columnSpec);
+ }
+ |
+ <ADD>
+ <LPAREN>
{
- in.setTable(table);
- }
-
- columnfamilies = getLiteralValues()
- {
- in.setColumnfamilies(columnfamilies);
+ alterCommand.setOperationType(AlterCommand.OperationType.ADD);
}
-
- <VALUES> values = getLiteralValues()
- {
- in.setValues(values);
+
+ column = Identifier() columnSpec = ColumnSpec()
+ {
+ alterCommand.addColumnSpec(column, columnSpec);
}
- <WHERE> cond = WhereClause()
- {
- try{
- in.setCondition(cond);
- }catch(ClassCastException ce) {
- throw generateParseException();
+ (
+ <COMMA>
+ column = Identifier()
+ columnSpec = ColumnSpec()
+ {
+ alterCommand.addColumnSpec(column, columnSpec);
}
- }
- {
- return in;
+ )*
+ <RPAREN>
+ |
+ <DROP> column = Identifier()
+ {
+ alterCommand.setOperationType(AlterCommand.OperationType.DROP);
+ alterCommand.setColumn(column);
}
+ |
+ <CHANGE> column = Identifier() columnSpec = ColumnSpec()
+ {
+ alterCommand.setOperationType(AlterCommand.OperationType.CHANGE);
+ alterCommand.addColumnSpec(column, columnSpec);
+ }
+ )
+ { return alterCommand; }
}
-DeleteCommand deleteCommand() :
+DropCommand dropCommand() :
{
- DeleteCommand del = new DeleteCommand();
- Map<String, List<String>> cond = null;
- String argument = null;
+ DropCommand drop = new DropCommand();
+ List<String> tableList = null;
}
{
- <DELETE>
- argument = getString()
- {
- del.setTable(argument);
- }
+ <DROP>
+ <TABLE>
+ tableList = TableList()
+ {
+ drop.setTableList(tableList);
+ return drop;
+ }
+}
+
+InsertCommand insertCommand() :
+{
+ InsertCommand in = new InsertCommand();
+ List<String> columnfamilies = null;
+ List<String> values = null;
+ String table = null;
+ Token t = null;
+}
+{
+ <INSERT>
+ <INTO>
+ table = Identifier()
+ {
+ in.setTable(table);
+ }
- <WHERE> cond = WhereClause() {
- try{
- del.setCondition(cond);
- }catch(ClassCastException ce) {
- throw generateParseException();
- }
- }
- {
- return del;
- }
+ columnfamilies = getColumns()
+ {
+ in.setColumnfamilies(columnfamilies);
+ }
+
+ <VALUES> values = getLiteralValues()
+ {
+ in.setValues(values);
+ }
+
+ <WHERE>
+ <ROW> <EQUALS> ( t=<STRING_LITERAL> | t=<QUOTED_IDENTIFIER> )
+ {
+ in.setRow(t.image.substring(1, t.image.length()-1));
+ }
+ {
+ return in;
+ }
+}
+
+DeleteCommand deleteCommand() :
+{
+ DeleteCommand deleteCommand = new DeleteCommand();
+ List<String> columnList = null;
+ Token t = null;
+ String table = null;
+}
+{
+ <DELETE>
+ columnList = ColumnList()
+ {
+ deleteCommand.setColumnList(columnList);
+ }
+
+ <FROM>
+ table = Identifier()
+ {
+ deleteCommand.setTable(table);
+ }
+
+ <WHERE>
+ <ROW> <EQUALS> ( t=<STRING_LITERAL> | t=<QUOTED_IDENTIFIER> )
+ {
+ deleteCommand.setRow(t.image.substring(1, t.image.length()-1));
+ }
+
+ { return deleteCommand; }
}
SelectCommand selectCommand() :
{
- SelectCommand select = new SelectCommand();
- Map<String, List<String>> cond = null;
- String argument = null;
- int limit;
+ SelectCommand select = new SelectCommand();
+ List<String> columns = null;
+ String rowKey = "";
+ String timestamp = null;
+ int numVersion = 0;
+ String tableName = null;
+ int limit;
}
{
- <SELECT>
- argument = getString()
- {
- select.setTable(argument);
- }
+ <SELECT>
+ columns = ColumnList()
+ <FROM>
+ tableName = Identifier()
+ {
+ select.setColumns(columns);
+ select.setTable(tableName);
+ }
- [ <WHERE> cond = WhereClause() {
- try{
- select.setCondition(cond);
- }catch(ClassCastException ce) {
- throw generateParseException();
- }
- } ]
+ [ ( <WHERE> <ROW> <EQUALS>
+ { select.setWhere(true); }
+ | <STARTING> <FROM> )
- [ <LIMIT><EQUALS> limit = getInt() {
- try{
- select.setLimit(limit);
- }catch(ClassCastException ce) {
- throw generateParseException();
- }
+ rowKey = getStringLiteral()
+ {
+ select.setRowKey(rowKey);
+ }
+ ]
+
+ [ <TIMESTAMP>
+ timestamp = getStringLiteral()
+ {
+ select.setTimestamp(timestamp);
+ }
+ ]
+
+ [
+ <NUM_VERSIONS>
+ numVersion = Number()
+ {
+ select.setVersion(numVersion);
+ }
+ ]
+
+ [ <LIMIT><EQUALS> limit = Number() {
+ try{
+ select.setLimit(limit);
+ }catch(ClassCastException ce) {
+ throw generateParseException();
+ }
} ]
{ return select; }
}
-ClearCommand clearCommand() :
+EnableCommand enableCommand() :
{
- ClearCommand clear = new ClearCommand();
+ EnableCommand enableCommand = new EnableCommand();
+ String table = null;
}
{
- <CLEAR> { return clear; }
+ <ENABLE>
+ table = Identifier()
+ {
+ enableCommand.setTable(table);
+ return enableCommand;
+ }
}
-/**
-* TODO : expressions codes need more love.
-*/
-
-String getString():
-{ Token t = null; }
+DisableCommand disableCommand() :
{
- ( t=<ID>
- | t=<QUOTED_STRING>
- )
- { return t.image.toString(); }
+ DisableCommand disableCommand = new DisableCommand();
+ String table = null;
}
-
-int getInt():
-{ Token t = null; }
{
- t = <NUM>
- { return Integer.parseInt(t.image.toString()); }
+ <DISABLE>
+ table = Identifier()
+ {
+ disableCommand.setTable(table);
+ return disableCommand;
+ }
}
-Map<String, List<String>> WhereClause() :
+ClearCommand clearCommand() :
{
- Map<String, List<String>> result =
- new HashMap<String, List<String>>();
- List<String> exception =
- new ArrayList<String>();
+ ClearCommand clear = new ClearCommand();
}
{
- {
- try{
- result.putAll(ConditionExpression());
- }catch(ParseException pe) {
- exception.add(pe.toString());
- result.put("error", exception);
- }
- }
- (
- <AND> {
- try{
- result.putAll(ConditionExpression());
- }catch(ParseException pe) {
- exception.add(pe.toString());
- result.put("error", exception);
- }
- }
- )*
-
- { return result; }
-}
-
-Map<String, List<String>> ConditionExpression() :
-{
- Token tSearchName, tComparator, tComparand;
- Map<String, List<String>> tmp =
- new HashMap<String, List<String>>();
- List<String> values =
- new ArrayList<String>();
-}
-{
- (
- tSearchName=<ROW>
- | tSearchName=<COLUMN>
- | tSearchName=<TIME>
- | tSearchName=<ID>
- | tSearchName=<VALUES>
- | tSearchName=<COLUMNFAMILIES>
- )
-
- ( tComparator=<EQUALS> | tComparator=<NOTEQUAL> )
-
- ( tComparand=<QUOTED_STRING>
- {
- values.add("quoted string");
- tmp.put("error", values);
- return tmp;
- }
- | tComparand=<STRING_LITERAL> {
- values.add(tComparator.image);
- values.add(tComparand.image.substring(1,tComparand.image.length() - 1));
-
- if(tSearchName.image.toString().equals("row") ||
- tSearchName.image.toString().equals("column") ||
- tSearchName.image.toString().equals("time"))
- { tmp.put(tSearchName.image, values); }
- else
- {
- values.add(tSearchName.image.toString());
- tmp.put("error", values);
- }
-
- return tmp;
-} )
+ <CLEAR>
+ {
+ return clear;
+ }
}
+////////////////////////////////////////////////
+// Utility expansion units...
+
List<String> getLiteralValues() :
{
- List<String> values = new ArrayList<String>();
- String literal = null;
+ List<String> values = new ArrayList<String>();
+ String literal = null;
}
{
<LPAREN>
@@ -490,10 +610,9 @@
| (
<ID>
| <STRING_LITERAL>
- | <QUOTED_STRING>
- | <STRING>
+ | <QUOTED_IDENTIFIER>
) { values.removeAll(values); }
- )*
+ )*
<RPAREN>
{
return values;
@@ -502,10 +621,105 @@
String getStringLiteral() :
{
- Token stringLiteral;
+ Token s;
}
{
- stringLiteral=<STRING_LITERAL>
- { return stringLiteral.image.substring(1,stringLiteral.image.length() - 1); }
- | <QUOTED_STRING> { return null; }
+ ( s=<STRING_LITERAL> | s=<QUOTED_IDENTIFIER> )
+ {
+ String value = s.image.toString();
+ return value.substring(1,value.length() - 1);
+ }
}
+
+List<String> getColumns() : // return parenthesized column list
+{
+ List<String> values = new ArrayList<String>();
+ String literal = null;
+}
+{
+<LPAREN>
+ { literal = getColumn();
+ if(literal != null) values.add(literal);
+ }
+ (
+ <COMMA>
+ {
+ literal = getColumn();
+ if(literal != null) values.add(literal);
+ }
+ )*
+<RPAREN>
+ {
+ return values;
+ }
+}
+
+String getColumn() :
+{
+ Token col;
+}
+{
+ (
+ ( col=<ID> | col=<ASTERISK> )
+ { return col.image.toString(); }
+ | (col=<QUOTED_IDENTIFIER> | col=<STRING_LITERAL> )
+ { return col.image.substring(1,col.image.toString().length() - 1); }
+ )
+}
+
+List<String> TableList() :
+{
+ List<String> tableList = new ArrayList<String>();
+ String table = null;
+}
+{
+ table = Identifier() { tableList.add(table); }
+ ( <COMMA> table = Identifier()
+ { tableList.add(table); }
+ )*
+
+ { return tableList; }
+}
+
+List<String> ColumnList() :
+{
+ List<String> columnList = new ArrayList<String>();
+ String column = null;
+}
+{
+ column = getColumn()
+ {
+ if(column != null) {
+ columnList.add(column);
+ } else {
+ return columnList;
+ }
+ }
+ ( <COMMA> column = getColumn()
+ { columnList.add(column); }
+ )*
+
+ { return columnList; }
+}
+
+int Number() :
+{
+ Token t = null;
+}
+{
+ t = <INTEGER_LITERAL>
+ { return Integer.parseInt(t.image.toString()); }
+}
+
+String Identifier() :
+{
+ Token t = null;
+}
+{
+ (
+ t = <ID>
+ { return t.image.toString(); }
+ | ( t=<QUOTED_IDENTIFIER> | t=<STRING_LITERAL> )
+ { return t.image.substring(1,t.image.toString().length() - 1); }
+ )
+}
\ No newline at end of file
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpCommand.java Wed Sep 12 21:01:05 2007
@@ -22,10 +22,9 @@
import org.apache.hadoop.conf.Configuration;
public class HelpCommand extends BasicCommand {
-
private String argument;
- public ReturnMsg execute(Configuration conf) {
+ public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
HelpManager.printHelp(this.argument);
return null;
}
@@ -33,5 +32,4 @@
public void setArgument(String argument) {
this.argument = argument;
}
-
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpContents.java Wed Sep 12 21:01:05 2007
@@ -30,36 +30,77 @@
public static Map<? extends String, ? extends String[]> Load() {
Map<String, String[]> load = new HashMap<String, String[]>();
- load.put("SHOW", new String[] { "List all tables.", "SHOW TABLES;" });
+ String columnName = "column_name: "
+ + "\n\t column_family_name"
+ + "\n\t| column_family_name:column_label_name";
+ String columnList = "{column_name, [, column_name] ... | *}";
+
+ load.put("SHOW", new String[] {"List all available tables", "SHOW TABLES;"});
+
load.put("FS", new String[] { "Hadoop FsShell operations.",
"FS -copyFromLocal /home/user/backup.dat fs/user/backup;" });
- load.put("CLEAR", new String[] {"Clear the screen.", "CLEAR;"} );
- load.put("DESCRIBE", new String[] { "Describe a table's columnfamilies.",
- "DESCRIBE <table_name>;" });
+
+ load.put("CLEAR", new String[] {"Clear the screen", "CLEAR;"} );
+
+ load.put("DESCRIBE", new String[] { "Print information about tables",
+ "[DESCRIBE|DESC] table_name;" });
+
load.put("CREATE", new String[] {
- "Create a table",
- "CREATE <table_name>"
- + "\n\t COLUMNFAMILIES('cf_name1'[, 'cf_name2', ...]);"
- + "\n [LIMIT=versions_limit];" });
+ "Create tables",
+ "CREATE TABLE table_name"
+ + "\n\t(column_family_spec [, column_family_spec] ...);"
+ + "\n\n"
+ + "column_family_spec:"
+ + "\n\tcolumn_family_name"
+ + "\n\t[MAX_VERSIONS=n]"
+ + "\n\t[MAX_LENGTH=n]"
+ + "\n\t[COMPRESSION=NONE|RECORD|BLOCK]"
+ + "\n\t[IN_MEMORY]"
+ + "\n\t[BLOOMFILTER=NONE|BLOOM|COUNTING|RETOUCHED VECTOR_SIZE=n NUM_HASH=n]"
+ });
+
load.put("DROP", new String[] {
- "Drop columnfamilie(s) from a table or drop table(s)",
- "DROP table_name1[, table_name2, ...] | cf_name1[, cf_name2, ...];" });
+ "Drop tables",
+ "DROP TABLE table_name [, table_name] ...;" });
+
load.put("INSERT", new String[] {
- "Insert row into table",
- "INSERT <table_name>" + "\n\t('column_name1'[, 'column_name2', ...])"
- + "\n\t VALUES('entry1'[, 'entry2', ...])"
- + "\n WHERE row='row_key';" });
+ "Insert values into tables",
+ "INSERT INTO table_name"
+ + "\n\t(column_name, ...) VALUES ('value', ...)"
+ + "\n\tWHERE row='row_key';"
+ + "\n\n" + columnName
+ });
+
load.put("DELETE", new String[] {
- "Delete cell or row in table.",
- "DELETE <table_name>" + "\n\t WHERE row='row_key;"
- + "\n [AND column='column_name'];" });
+ "Delete a subset of the data in a table",
+ "DELETE " + columnList
+ + "\n\tFROM table_name"
+ + "\n\tWHERE row='row-key';"
+ + "\n\n"
+ + columnName
+ });
+
load.put("SELECT",
new String[] {
- "Select values from a table",
- "SELECT <table_name>" + "\n\t [WHERE row='row_key']"
- + "\n [AND column='column_name'];"
- + "\n [AND time='timestamp'];"
- + "\n [LIMIT=versions_limit];" });
+ "Select values from tables",
+ "SELECT " + columnList + " FROM table_name"
+ + "\n\t[WHERE row='row_key' | STARTING FROM 'row-key']"
+ + "\n\t[NUM_VERSIONS = version_count]"
+ + "\n\t[TIMESTAMP 'timestamp']"
+ + "\n\t[LIMIT = row_count]"
+ + "\n\t[INTO FILE 'file_name'];"
+ });
+
+ load.put("ALTER",
+ new String[] {
+ "Alter the structure of a table",
+ "ALTER TABLE table_name"
+ + "\n\t ADD column_spec"
+ + "\n\t| ADD (column_spec, column_spec, ...)"
+ + "\n\t| DROP column_family_name"
+ + "\n\t| CHANGE column_spec;"
+ });
+
load.put("EXIT", new String[] { "Exit shell", "EXIT;" });
return load;
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/HelpManager.java Wed Sep 12 21:01:05 2007
@@ -27,13 +27,14 @@
*/
public class HelpManager {
/** application name */
- public static final String APP_NAME = "HBase Shell";
+ public static final String APP_NAME = "Hbase Shell";
/** version of the code */
- public static final String APP_VERSION = "0.0.1";
+ public static final String APP_VERSION = "0.0.2";
/** help contents map */
- public static final Map<String, String[]> help = new HashMap<String, String[]>();
+ public static final Map<String, String[]> help =
+ new HashMap<String, String[]>();
public HelpManager() {
help.putAll(HelpContents.Load());
@@ -41,7 +42,6 @@
/** Print out the program version. */
public void printVersion() {
- ClearCommand.clear();
System.out.println(APP_NAME + ", " + APP_VERSION + " version.\n"
+ "Copyright (c) 2007 by udanax, "
+ "licensed to Apache Software Foundation.\n"
@@ -55,12 +55,13 @@
for (Map.Entry<String, String[]> helpMap : help.entrySet()) {
wrapping(helpMap.getKey(), helpMap.getValue(), false);
}
+ System.out.println();
} else {
if (help.containsKey(cmd.toUpperCase())) {
String[] msg = help.get(cmd.toUpperCase());
wrapping(cmd.toUpperCase(), msg, true);
} else {
- System.out.println("Unknown Command : Type 'help' for usage.");
+ System.out.println("Unknown Command : Type 'help;' for usage.");
}
}
}
@@ -76,6 +77,6 @@
}
if (example)
- System.out.println("\n>>> " + cmdType[1]);
+ System.out.println("\nSyntax:\n" + cmdType[1] + "\n");
}
-}
+}
\ No newline at end of file
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/InsertCommand.java Wed Sep 12 21:01:05 2007
@@ -21,21 +21,23 @@
import java.io.IOException;
import java.util.List;
-import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTable;
import org.apache.hadoop.io.Text;
+/**
+ * Inserts values into tables.
+ */
public class InsertCommand extends BasicCommand {
- private Text table;
+ private Text tableName;
private List<String> columnfamilies;
private List<String> values;
- private Map<String, List<String>> condition;
+ private String rowKey;
public ReturnMsg execute(Configuration conf) {
- if (this.table == null || this.values == null || this.condition == null)
+ if (this.tableName == null || this.values == null || this.rowKey == null)
return new ReturnMsg(0, "Syntax error : Please check 'Insert' syntax.");
if (this.columnfamilies.size() != this.values.size())
@@ -43,12 +45,16 @@
"Mismatch between values list and columnfamilies list");
try {
- HTable table = new HTable(conf, this.table);
+ HTable table = new HTable(conf, this.tableName);
long lockId = table.startUpdate(getRow());
for (int i = 0; i < this.values.size(); i++) {
- table.put(lockId, getColumn(i), getValue(i));
-
+ Text column = null;
+ if(getColumn(i).toString().contains(":"))
+ column = getColumn(i);
+ else
+ column = new Text(getColumn(i) + ":");
+ table.put(lockId, column, getValue(i));
}
table.commit(lockId);
@@ -60,7 +66,7 @@
}
public void setTable(String table) {
- this.table = new Text(table);
+ this.tableName = new Text(table);
}
public void setColumnfamilies(List<String> columnfamilies) {
@@ -71,12 +77,12 @@
this.values = values;
}
- public void setCondition(Map<String, List<String>> cond) {
- this.condition = cond;
+ public void setRow(String row) {
+ this.rowKey = row;
}
public Text getRow() {
- return new Text(this.condition.get("row").get(1));
+ return new Text(this.rowKey);
}
public Text getColumn(int i) {
@@ -85,5 +91,5 @@
public byte[] getValue(int i) {
return this.values.get(i).getBytes();
- }
+ }
}
Modified: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java?rev=575156&r1=575155&r2=575156&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java (original)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/ReturnMsg.java Wed Sep 12 21:01:05 2007
@@ -19,10 +19,13 @@
*/
package org.apache.hadoop.hbase.shell;
+/**
+ * Message returned when a {@link Command} is
+ * {@link Command#execute(org.apache.hadoop.conf.Configuration)}'ed.
+ */
public class ReturnMsg {
-
- private String msg;
- private int type;
+ private final String msg;
+ private final int type;
public ReturnMsg(int i, String string) {
this.type = i;
@@ -41,5 +44,4 @@
public int getType() {
return this.type;
}
-
}
Added: lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java?rev=575156&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java (added)
+++ lucene/hadoop/trunk/src/contrib/hbase/src/java/org/apache/hadoop/hbase/shell/SchemaModificationCommand.java Wed Sep 12 21:01:05 2007
@@ -0,0 +1,106 @@
+/**
+ * Copyright 2007 The Apache Software Foundation
+ *
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hbase.shell;
+
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.hadoop.hbase.BloomFilterDescriptor;
+import org.apache.hadoop.hbase.BloomFilterDescriptor.BloomFilterType;
+import org.apache.hadoop.hbase.HColumnDescriptor;
+import org.apache.hadoop.io.Text;
+
+/**
+ * The base class of schema modification commands, CreateCommand and Alter
+ * Command. Provides utility methods for alteration operations.
+ */
+public abstract class SchemaModificationCommand extends BasicCommand {
+
+ private int maxVersions;
+ private int maxLength;
+ private HColumnDescriptor.CompressionType compression;
+ private boolean inMemory;
+ private BloomFilterDescriptor bloomFilterDesc;
+ private BloomFilterType bloomFilterType;
+ private int vectorSize;
+ private int numHash;
+ private int numEntries;
+
+ private void initOptions() {
+ maxVersions = HColumnDescriptor.DEFAULT_N_VERSIONS;
+ maxLength = HColumnDescriptor.DEFAULT_MAX_VALUE_LENGTH;
+ compression = HColumnDescriptor.DEFAULT_COMPRESSION_TYPE;
+ inMemory = HColumnDescriptor.DEFAULT_IN_MEMORY;
+ bloomFilterDesc = HColumnDescriptor.DEFAULT_BLOOM_FILTER_DESCRIPTOR;
+ }
+
+ /**
+ * Given a column name and column spec, returns an instance of
+ * HColumnDescriptor representing the column spec.
+ */
+ protected HColumnDescriptor getColumnDescriptor(String column,
+ Map<String, Object> columnSpec) throws IllegalArgumentException {
+ initOptions();
+
+ Set<String> specs = columnSpec.keySet();
+ for (String spec : specs) {
+ spec = spec.toUpperCase();
+
+ if (spec.equals("MAX_VERSIONS")) {
+ maxVersions = (Integer) columnSpec.get(spec);
+ } else if (spec.equals("MAX_LENGTH")) {
+ maxLength = (Integer) columnSpec.get(spec);
+ } else if (spec.equals("COMPRESSION")) {
+ compression = HColumnDescriptor.CompressionType
+ .valueOf(((String) columnSpec.get(spec)).toUpperCase());
+ } else if (spec.equals("IN_MEMORY")) {
+ inMemory = (Boolean) columnSpec.get(spec);
+ } else if (spec.equals("BLOOMFILTER")) {
+ bloomFilterType = BloomFilterType.valueOf(((String) columnSpec
+ .get(spec)).toUpperCase());
+ } else if (spec.equals("VECTOR_SIZE")) {
+ vectorSize = (Integer) columnSpec.get(spec);
+ } else if (spec.equals("NUM_HASH")) {
+ numHash = (Integer) columnSpec.get(spec);
+ } else if (spec.equals("NUM_ENTRIES")) {
+ numEntries = (Integer) columnSpec.get(spec);
+ } else {
+ throw new IllegalArgumentException("Invalid option: " + spec);
+ }
+ }
+
+ // Now we gather all the specified options for this column.
+ if (bloomFilterType != null) {
+ if (specs.contains("NUM_ENTRIES")) {
+ bloomFilterDesc = new BloomFilterDescriptor(bloomFilterType, numEntries);
+ } else {
+ bloomFilterDesc = new BloomFilterDescriptor(bloomFilterType,
+ vectorSize, numHash);
+ }
+ }
+
+ column = appendDelimiter(column);
+
+ HColumnDescriptor columnDesc = new HColumnDescriptor(new Text(column),
+ maxVersions, compression, inMemory, maxLength, bloomFilterDesc);
+
+ return columnDesc;
+ }
+}
\ No newline at end of file