You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by zs...@apache.org on 2010/01/21 11:38:15 UTC
svn commit: r901644 [30/37] - in /hadoop/hive/trunk: ./
ql/src/java/org/apache/hadoop/hive/ql/
ql/src/java/org/apache/hadoop/hive/ql/exec/
ql/src/java/org/apache/hadoop/hive/ql/exec/persistence/
ql/src/java/org/apache/hadoop/hive/ql/history/ ql/src/jav...
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFSplit.java Thu Jan 21 10:37:58 2010
@@ -22,65 +22,62 @@
import org.apache.hadoop.hive.ql.exec.UDFArgumentException;
import org.apache.hadoop.hive.ql.exec.UDFArgumentLengthException;
-import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.exec.description;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
+import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorFactory;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableStringObjectInspector;
import org.apache.hadoop.io.Text;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorConverters;
-@description(
- name = "split",
- value = "_FUNC_(str, regex) - Splits str around occurances that match " +
- "regex",
- extended = "Example:\n" +
- " > SELECT _FUNC_('oneAtwoBthreeC', '[ABC]') FROM src LIMIT 1;\n" +
- " [\"one\", \"two\", \"three\"]"
- )
+@description(name = "split", value = "_FUNC_(str, regex) - Splits str around occurances that match "
+ + "regex", extended = "Example:\n"
+ + " > SELECT _FUNC_('oneAtwoBthreeC', '[ABC]') FROM src LIMIT 1;\n"
+ + " [\"one\", \"two\", \"three\"]")
public class GenericUDFSplit extends GenericUDF {
private ObjectInspectorConverters.Converter[] converters;
-
+
+ @Override
public ObjectInspector initialize(ObjectInspector[] arguments)
- throws UDFArgumentException {
+ throws UDFArgumentException {
if (arguments.length != 2) {
throw new UDFArgumentLengthException(
"The function SPLIT(s, regexp) takes exactly 2 arguments.");
}
-
+
converters = new ObjectInspectorConverters.Converter[arguments.length];
- for(int i = 0; i < arguments.length; i++) {
+ for (int i = 0; i < arguments.length; i++) {
converters[i] = ObjectInspectorConverters.getConverter(arguments[i],
PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
-
- return ObjectInspectorFactory.getStandardListObjectInspector(
- PrimitiveObjectInspectorFactory.writableStringObjectInspector);
+
+ return ObjectInspectorFactory
+ .getStandardListObjectInspector(PrimitiveObjectInspectorFactory.writableStringObjectInspector);
}
+ @Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
- assert(arguments.length == 2);
-
+ assert (arguments.length == 2);
+
if (arguments[0].get() == null || arguments[1].get() == null) {
return null;
}
-
- Text s = (Text)converters[0].convert(arguments[0].get());
- Text regex = (Text)converters[1].convert(arguments[1].get());
-
+
+ Text s = (Text) converters[0].convert(arguments[0].get());
+ Text regex = (Text) converters[1].convert(arguments[1].get());
+
ArrayList<Text> result = new ArrayList<Text>();
-
- for(String str: s.toString().split(regex.toString())) {
+
+ for (String str : s.toString().split(regex.toString())) {
result.add(new Text(str));
}
-
+
return result;
}
-
+
+ @Override
public String getDisplayString(String[] children) {
- assert(children.length == 2);
+ assert (children.length == 2);
return "split(" + children[0] + ", " + children[1] + ")";
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFUtils.java Thu Jan 21 10:37:58 2010
@@ -19,12 +19,10 @@
package org.apache.hadoop.hive.ql.udf.generic;
import java.lang.reflect.Array;
-import java.lang.reflect.GenericArrayType;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.lang.reflect.Type;
import java.nio.ByteBuffer;
-import java.util.Arrays;
import java.util.HashMap;
import org.apache.commons.logging.Log;
@@ -60,30 +58,30 @@
public static boolean isUtfStartByte(byte b) {
return (b & 0xC0) != 0x80;
}
-
+
/**
* This class helps to find the return ObjectInspector for a GenericUDF.
*
- * In many cases like CASE and IF, the GenericUDF is returning a value out
- * of several possibilities. However these possibilities may not always
- * have the same ObjectInspector.
+ * In many cases like CASE and IF, the GenericUDF is returning a value out of
+ * several possibilities. However these possibilities may not always have the
+ * same ObjectInspector.
*
- * This class will help detect whether all possibilities have exactly the
- * same ObjectInspector. If not, then we need to convert the Objects to
- * the same ObjectInspector.
+ * This class will help detect whether all possibilities have exactly the same
+ * ObjectInspector. If not, then we need to convert the Objects to the same
+ * ObjectInspector.
*
- * A special case is when some values are constant NULL. In this case we
- * can use the same ObjectInspector.
+ * A special case is when some values are constant NULL. In this case we can
+ * use the same ObjectInspector.
*/
public static class ReturnObjectInspectorResolver {
boolean allowTypeConversion;
ObjectInspector returnObjectInspector;
-
- // We create converters beforehand, so that the converters can reuse the
- // same object for returning conversion results.
+
+ // We create converters beforehand, so that the converters can reuse the
+ // same object for returning conversion results.
HashMap<ObjectInspector, Converter> converters;
-
+
public ReturnObjectInspectorResolver() {
this(false);
}
@@ -91,61 +89,67 @@
public ReturnObjectInspectorResolver(boolean allowTypeConversion) {
this.allowTypeConversion = allowTypeConversion;
}
+
/**
* Update returnObjectInspector and valueInspectorsAreTheSame based on the
* ObjectInspector seen.
+ *
* @return false if there is a type mismatch
*/
- public boolean update(ObjectInspector oi)
- throws UDFArgumentTypeException {
+ public boolean update(ObjectInspector oi) throws UDFArgumentTypeException {
if (oi instanceof VoidObjectInspector) {
return true;
}
-
+
if (returnObjectInspector == null) {
// The first argument, just set it.
returnObjectInspector = oi;
return true;
}
-
+
if (returnObjectInspector == oi) {
- // The new ObjectInspector is the same as the old one, directly return true
+ // The new ObjectInspector is the same as the old one, directly return
+ // true
return true;
}
-
+
TypeInfo oiTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(oi);
- TypeInfo rTypeInfo = TypeInfoUtils.getTypeInfoFromObjectInspector(returnObjectInspector);
+ TypeInfo rTypeInfo = TypeInfoUtils
+ .getTypeInfoFromObjectInspector(returnObjectInspector);
if (oiTypeInfo == rTypeInfo) {
// Convert everything to writable, if types of arguments are the same,
// but ObjectInspectors are different.
- returnObjectInspector = ObjectInspectorUtils.getStandardObjectInspector(returnObjectInspector,
- ObjectInspectorCopyOption.WRITABLE);
+ returnObjectInspector = ObjectInspectorUtils
+ .getStandardObjectInspector(returnObjectInspector,
+ ObjectInspectorCopyOption.WRITABLE);
return true;
}
-
+
if (!allowTypeConversion) {
return false;
}
-
- // Types are different, we need to check whether we can convert them to
+
+ // Types are different, we need to check whether we can convert them to
// a common base class or not.
- TypeInfo commonTypeInfo = FunctionRegistry.getCommonClass(oiTypeInfo, rTypeInfo);
+ TypeInfo commonTypeInfo = FunctionRegistry.getCommonClass(oiTypeInfo,
+ rTypeInfo);
if (commonTypeInfo == null) {
return false;
}
- returnObjectInspector = TypeInfoUtils.getStandardWritableObjectInspectorFromTypeInfo(commonTypeInfo);
-
+ returnObjectInspector = TypeInfoUtils
+ .getStandardWritableObjectInspectorFromTypeInfo(commonTypeInfo);
+
return true;
}
-
+
/**
* Returns the ObjectInspector of the return value.
*/
public ObjectInspector get() {
return returnObjectInspector;
}
-
+
/**
* Convert the return Object if necessary (when the ObjectInspectors of
* different possibilities are not all the same).
@@ -159,160 +163,174 @@
if (o == null) {
return null;
}
-
+
if (converters == null) {
converters = new HashMap<ObjectInspector, Converter>();
}
-
+
Converter converter = converters.get(oi);
if (converter == null) {
- converter = ObjectInspectorConverters.getConverter(oi, returnObjectInspector);
+ converter = ObjectInspectorConverters.getConverter(oi,
+ returnObjectInspector);
converters.put(oi, converter);
}
converted = converter.convert(o);
}
return converted;
}
-
+
}
-
+
/**
- * Convert parameters for the method if needed.
+ * Convert parameters for the method if needed.
*/
public static class ConversionHelper {
- private Method m;
- private ObjectInspector[] givenParameterOIs;
+ private final ObjectInspector[] givenParameterOIs;
Type[] methodParameterTypes;
- private boolean isVariableLengthArgument;
+ private final boolean isVariableLengthArgument;
Type lastParaElementType;
-
+
boolean conversionNeeded;
Converter[] converters;
Object[] convertedParameters;
Object[] convertedParametersInArray;
-
private static Class<?> getClassFromType(Type t) {
if (t instanceof Class<?>) {
- return (Class<?>)t;
+ return (Class<?>) t;
} else if (t instanceof ParameterizedType) {
- ParameterizedType pt = (ParameterizedType)t;
- return (Class<?>)pt.getRawType();
+ ParameterizedType pt = (ParameterizedType) t;
+ return (Class<?>) pt.getRawType();
}
return null;
}
-
+
/**
- * Create a PrimitiveConversionHelper for Method m. The ObjectInspector's
+ * Create a PrimitiveConversionHelper for Method m. The ObjectInspector's
* input parameters are specified in parameters.
*/
- public ConversionHelper(Method m, ObjectInspector[] parameterOIs) throws UDFArgumentException {
- this.m = m;
- this.givenParameterOIs = parameterOIs;
-
+ public ConversionHelper(Method m, ObjectInspector[] parameterOIs)
+ throws UDFArgumentException {
+ givenParameterOIs = parameterOIs;
+
methodParameterTypes = m.getGenericParameterTypes();
- // Whether the method takes an array like Object[],
+ // Whether the method takes an array like Object[],
// or String[] etc in the last argument.
- lastParaElementType = TypeInfoUtils.getArrayElementType(
- methodParameterTypes.length == 0 ? null :
- methodParameterTypes[methodParameterTypes.length-1]);
+ lastParaElementType = TypeInfoUtils
+ .getArrayElementType(methodParameterTypes.length == 0 ? null
+ : methodParameterTypes[methodParameterTypes.length - 1]);
isVariableLengthArgument = (lastParaElementType != null);
-
+
// Create the output OI array
ObjectInspector[] methodParameterOIs = new ObjectInspector[parameterOIs.length];
-
+
if (isVariableLengthArgument) {
-
- // ConversionHelper can be called without method parameter length checkings
+
+ // ConversionHelper can be called without method parameter length
+ // checkings
// for terminatePartial() and merge() calls.
if (parameterOIs.length < methodParameterTypes.length - 1) {
- throw new UDFArgumentLengthException(m.toString() + " requires at least "
- + (methodParameterTypes.length - 1) + " arguments but only "
- + parameterOIs.length + " are passed in.");
+ throw new UDFArgumentLengthException(m.toString()
+ + " requires at least " + (methodParameterTypes.length - 1)
+ + " arguments but only " + parameterOIs.length
+ + " are passed in.");
}
// Copy the first methodParameterTypes.length - 1 entries
for (int i = 0; i < methodParameterTypes.length - 1; i++) {
- // This method takes Object, so it accepts whatever types that are passed in.
+ // This method takes Object, so it accepts whatever types that are
+ // passed in.
if (methodParameterTypes[i] == Object.class) {
methodParameterOIs[i] = ObjectInspectorUtils
- .getStandardObjectInspector(parameterOIs[i], ObjectInspectorCopyOption.JAVA);
+ .getStandardObjectInspector(parameterOIs[i],
+ ObjectInspectorCopyOption.JAVA);
} else {
methodParameterOIs[i] = ObjectInspectorFactory
- .getReflectionObjectInspector(methodParameterTypes[i], ObjectInspectorOptions.JAVA);
+ .getReflectionObjectInspector(methodParameterTypes[i],
+ ObjectInspectorOptions.JAVA);
}
}
// Deal with the last entry
if (lastParaElementType == Object.class) {
- // This method takes Object[], so it accepts whatever types that are passed in.
+ // This method takes Object[], so it accepts whatever types that are
+ // passed in.
for (int i = methodParameterTypes.length - 1; i < parameterOIs.length; i++) {
methodParameterOIs[i] = ObjectInspectorUtils
- .getStandardObjectInspector(parameterOIs[i], ObjectInspectorCopyOption.JAVA);
+ .getStandardObjectInspector(parameterOIs[i],
+ ObjectInspectorCopyOption.JAVA);
}
} else {
- // This method takes something like String[], so it only accepts something like String
- ObjectInspector oi = ObjectInspectorFactory.getReflectionObjectInspector(
- lastParaElementType, ObjectInspectorOptions.JAVA);
+ // This method takes something like String[], so it only accepts
+ // something like String
+ ObjectInspector oi = ObjectInspectorFactory
+ .getReflectionObjectInspector(lastParaElementType,
+ ObjectInspectorOptions.JAVA);
for (int i = methodParameterTypes.length - 1; i < parameterOIs.length; i++) {
methodParameterOIs[i] = oi;
}
}
-
+
} else {
-
+
// Normal case, the last parameter is a normal parameter.
- // ConversionHelper can be called without method parameter length checkings
+ // ConversionHelper can be called without method parameter length
+ // checkings
// for terminatePartial() and merge() calls.
if (methodParameterTypes.length != parameterOIs.length) {
- throw new UDFArgumentLengthException(m.toString() + " requires "
- + methodParameterTypes.length + " arguments but "
+ throw new UDFArgumentLengthException(m.toString() + " requires "
+ + methodParameterTypes.length + " arguments but "
+ parameterOIs.length + " are passed in.");
}
for (int i = 0; i < methodParameterTypes.length; i++) {
- // This method takes Object, so it accepts whatever types that are passed in.
+ // This method takes Object, so it accepts whatever types that are
+ // passed in.
if (methodParameterTypes[i] == Object.class) {
methodParameterOIs[i] = ObjectInspectorUtils
- .getStandardObjectInspector(parameterOIs[i], ObjectInspectorCopyOption.JAVA);
+ .getStandardObjectInspector(parameterOIs[i],
+ ObjectInspectorCopyOption.JAVA);
} else {
methodParameterOIs[i] = ObjectInspectorFactory
- .getReflectionObjectInspector(methodParameterTypes[i], ObjectInspectorOptions.JAVA);
+ .getReflectionObjectInspector(methodParameterTypes[i],
+ ObjectInspectorOptions.JAVA);
}
}
}
-
+
// Create the converters
conversionNeeded = false;
converters = new Converter[parameterOIs.length];
for (int i = 0; i < parameterOIs.length; i++) {
- Converter pc = ObjectInspectorConverters
- .getConverter(parameterOIs[i], methodParameterOIs[i]);
+ Converter pc = ObjectInspectorConverters.getConverter(parameterOIs[i],
+ methodParameterOIs[i]);
converters[i] = pc;
// Conversion is needed?
- conversionNeeded = conversionNeeded || (!(pc instanceof IdentityConverter));
+ conversionNeeded = conversionNeeded
+ || (!(pc instanceof IdentityConverter));
}
-
+
if (isVariableLengthArgument) {
convertedParameters = new Object[methodParameterTypes.length];
- convertedParametersInArray = (Object[])Array.newInstance(
- getClassFromType(lastParaElementType), parameterOIs.length - methodParameterTypes.length + 1);
+ convertedParametersInArray = (Object[]) Array.newInstance(
+ getClassFromType(lastParaElementType), parameterOIs.length
+ - methodParameterTypes.length + 1);
convertedParameters[convertedParameters.length - 1] = convertedParametersInArray;
} else {
convertedParameters = new Object[parameterOIs.length];
}
}
-
+
public Object[] convertIfNecessary(Object... parameters) {
-
- assert(parameters.length == givenParameterOIs.length);
-
+
+ assert (parameters.length == givenParameterOIs.length);
+
if (!conversionNeeded && !isVariableLengthArgument) {
// no conversion needed, and not variable-length argument:
// just return what is passed in.
return parameters;
}
-
+
if (isVariableLengthArgument) {
// convert the first methodParameterTypes.length - 1 entries
for (int i = 0; i < methodParameterTypes.length - 1; i++) {
@@ -320,8 +338,8 @@
}
// convert the rest and put into the last entry
for (int i = methodParameterTypes.length - 1; i < parameters.length; i++) {
- convertedParametersInArray[i + 1 - methodParameterTypes.length] =
- converters[i].convert(parameters[i]);
+ convertedParametersInArray[i + 1 - methodParameterTypes.length] = converters[i]
+ .convert(parameters[i]);
}
} else {
// normal case, convert all parameters
@@ -338,25 +356,25 @@
*/
public static String getOrdinal(int i) {
int unit = i % 10;
- return (i <= 0) ? ""
- : (i != 11 && unit == 1) ? i + "st"
- : (i != 12 && unit == 2) ? i + "nd"
- : (i != 13 && unit == 3) ? i + "rd"
- : i + "th";
- }
+ return (i <= 0) ? "" : (i != 11 && unit == 1) ? i + "st"
+ : (i != 12 && unit == 2) ? i + "nd" : (i != 13 && unit == 3) ? i + "rd"
+ : i + "th";
+ }
/**
- * Finds any occurence of <code>subtext</code> from <code>text</code> in the backing
- * buffer, for avoiding string encoding and decoding.
- * Shamelessly copy from {@link org.apache.hadoop.io.Text#find(String, int)}.
+ * Finds any occurence of <code>subtext</code> from <code>text</code> in the
+ * backing buffer, for avoiding string encoding and decoding. Shamelessly copy
+ * from {@link org.apache.hadoop.io.Text#find(String, int)}.
*/
public static int findText(Text text, Text subtext, int start) {
// src.position(start) can't accept negative numbers.
- if(start < 0)
- return -1;
+ if (start < 0) {
+ return -1;
+ }
- ByteBuffer src = ByteBuffer.wrap(text.getBytes(),0,text.getLength());
- ByteBuffer tgt = ByteBuffer.wrap(subtext.getBytes(),0,subtext.getLength());
+ ByteBuffer src = ByteBuffer.wrap(text.getBytes(), 0, text.getLength());
+ ByteBuffer tgt = ByteBuffer
+ .wrap(subtext.getBytes(), 0, subtext.getLength());
byte b = tgt.get();
src.position(start);
@@ -365,7 +383,7 @@
src.mark(); // save position in loop
tgt.mark(); // save position in target
boolean found = true;
- int pos = src.position()-1;
+ int pos = src.position() - 1;
while (tgt.hasRemaining()) {
if (!src.hasRemaining()) { // src expired first
tgt.reset();
@@ -380,7 +398,9 @@
break; // no match
}
}
- if (found) return pos;
+ if (found) {
+ return pos;
+ }
}
}
return -1; // not found
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDFWhen.java Thu Jan 21 10:37:58 2010
@@ -20,26 +20,18 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator;
import org.apache.hadoop.hive.ql.exec.UDFArgumentTypeException;
import org.apache.hadoop.hive.ql.metadata.HiveException;
import org.apache.hadoop.hive.serde.Constants;
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.PrimitiveObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.ObjectInspectorCopyOption;
import org.apache.hadoop.hive.serde2.objectinspector.primitive.BooleanObjectInspector;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.PrimitiveObjectInspectorUtils;
-import org.apache.hadoop.hive.serde2.objectinspector.primitive.VoidObjectInspector;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfo;
-import org.apache.hadoop.hive.serde2.typeinfo.TypeInfoUtils;
/**
* GenericUDF Class for SQL construct "CASE a WHEN b THEN c [ELSE f] END".
*
- * NOTES:
- * 1. a and b should have the same TypeInfo, or an exception will be thrown.
- * 2. c and f should have the same TypeInfo, or an exception will be thrown.
+ * NOTES: 1. a and b should have the same TypeInfo, or an exception will be
+ * thrown. 2. c and f should have the same TypeInfo, or an exception will be
+ * thrown.
*/
public class GenericUDFWhen extends GenericUDF {
@@ -48,70 +40,72 @@
ObjectInspector[] argumentOIs;
GenericUDFUtils.ReturnObjectInspectorResolver returnOIResolver;
GenericUDFUtils.ReturnObjectInspectorResolver caseOIResolver;
-
+
@Override
public ObjectInspector initialize(ObjectInspector[] arguments)
throws UDFArgumentTypeException {
-
- this.argumentOIs = arguments;
+
+ argumentOIs = arguments;
returnOIResolver = new GenericUDFUtils.ReturnObjectInspectorResolver();
-
- for (int i=0; i+1<arguments.length; i+=2) {
+
+ for (int i = 0; i + 1 < arguments.length; i += 2) {
if (!arguments[i].getTypeName().equals(Constants.BOOLEAN_TYPE_NAME)) {
- throw new UDFArgumentTypeException(i,
- "\"" + Constants.BOOLEAN_TYPE_NAME + "\" is expected after WHEN, "
+ throw new UDFArgumentTypeException(i, "\""
+ + Constants.BOOLEAN_TYPE_NAME + "\" is expected after WHEN, "
+ "but \"" + arguments[i].getTypeName() + "\" is found");
}
- if (!returnOIResolver.update(arguments[i+1])) {
- throw new UDFArgumentTypeException(i+1,
+ if (!returnOIResolver.update(arguments[i + 1])) {
+ throw new UDFArgumentTypeException(i + 1,
"The expressions after THEN should have the same type: \""
- + returnOIResolver.get().getTypeName() + "\" is expected but \""
- + arguments[i+1].getTypeName() + "\" is found");
+ + returnOIResolver.get().getTypeName()
+ + "\" is expected but \"" + arguments[i + 1].getTypeName()
+ + "\" is found");
}
}
if (arguments.length % 2 == 1) {
int i = arguments.length - 2;
- if (!returnOIResolver.update(arguments[i+1])) {
- throw new UDFArgumentTypeException(i+1,
+ if (!returnOIResolver.update(arguments[i + 1])) {
+ throw new UDFArgumentTypeException(i + 1,
"The expression after ELSE should have the same type as those after THEN: \""
- + returnOIResolver.get().getTypeName() + "\" is expected but \""
- + arguments[i+1].getTypeName() + "\" is found");
+ + returnOIResolver.get().getTypeName()
+ + "\" is expected but \"" + arguments[i + 1].getTypeName()
+ + "\" is found");
}
}
-
+
return returnOIResolver.get();
}
@Override
public Object evaluate(DeferredObject[] arguments) throws HiveException {
- for (int i=0; i+1<arguments.length; i+=2) {
+ for (int i = 0; i + 1 < arguments.length; i += 2) {
Object caseKey = arguments[i].get();
- if (caseKey != null && ((BooleanObjectInspector)argumentOIs[i]).get(caseKey)) {
- Object caseValue = arguments[i+1].get();
+ if (caseKey != null
+ && ((BooleanObjectInspector) argumentOIs[i]).get(caseKey)) {
+ Object caseValue = arguments[i + 1].get();
return returnOIResolver.convertIfNecessary(caseValue,
- argumentOIs[i+1]);
+ argumentOIs[i + 1]);
}
}
// Process else statement
if (arguments.length % 2 == 1) {
int i = arguments.length - 2;
- Object elseValue = arguments[i+1].get();
- return returnOIResolver.convertIfNecessary(elseValue,
- argumentOIs[i+1]);
+ Object elseValue = arguments[i + 1].get();
+ return returnOIResolver.convertIfNecessary(elseValue, argumentOIs[i + 1]);
}
return null;
}
@Override
public String getDisplayString(String[] children) {
- assert(children.length >= 2);
+ assert (children.length >= 2);
StringBuilder sb = new StringBuilder();
sb.append("CASE");
- for(int i=0; i+1<children.length; i+=2) {
+ for (int i = 0; i + 1 < children.length; i += 2) {
sb.append(" WHEN (");
sb.append(children[i]);
sb.append(") THEN (");
- sb.append(children[i+1]);
+ sb.append(children[i + 1]);
sb.append(")");
}
if (children.length % 2 == 1) {
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTF.java Thu Jan 21 10:37:58 2010
@@ -32,34 +32,35 @@
public abstract class GenericUDTF {
Collector collector = null;
-
+
/**
- * Initialize this GenericUDTF. This will be called only once per
- * instance.
+ * Initialize this GenericUDTF. This will be called only once per instance.
*
- * @param args An array of ObjectInspectors for the arguments
- * @return A StructObjectInspector for output. The output struct
- * represents a row of the table where the fields of the stuct
- * are the columns. The field names are unimportant as they
- * will be overridden by user supplied column aliases.
+ * @param args
+ * An array of ObjectInspectors for the arguments
+ * @return A StructObjectInspector for output. The output struct represents a
+ * row of the table where the fields of the stuct are the columns. The
+ * field names are unimportant as they will be overridden by user
+ * supplied column aliases.
*/
- public abstract StructObjectInspector initialize(ObjectInspector [] argOIs)
- throws UDFArgumentException;
-
+ public abstract StructObjectInspector initialize(ObjectInspector[] argOIs)
+ throws UDFArgumentException;
+
/**
* Give a set of arguments for the UDTF to process.
*
- * @param o object array of arguments
+ * @param o
+ * object array of arguments
*/
- public abstract void process(Object [] args) throws HiveException;
-
+ public abstract void process(Object[] args) throws HiveException;
+
/**
- * Called to notify the UDTF that there are no more rows to process. Note
- * that forward() should not be called in this function. Only clean up code
- * should be run.
+ * Called to notify the UDTF that there are no more rows to process. Note that
+ * forward() should not be called in this function. Only clean up code should
+ * be run.
*/
public abstract void close() throws HiveException;
-
+
/**
* Associates a collector with this UDTF. Can't be specified in the
* constructor as the UDTF may be initialized before the collector has been
@@ -70,7 +71,7 @@
public final void setCollector(Collector collector) {
this.collector = collector;
}
-
+
/**
* Passes an output row to the collector
*
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/GenericUDTFExplode.java Thu Jan 21 10:37:58 2010
@@ -29,47 +29,45 @@
import org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorFactory;
import org.apache.hadoop.hive.serde2.objectinspector.StructObjectInspector;
-@description(
- name = "explode",
- value = "_FUNC_(a) - separates the elements of array a into multiple rows "
-)
+@description(name = "explode", value = "_FUNC_(a) - separates the elements of array a into multiple rows ")
public class GenericUDTFExplode extends GenericUDTF {
ListObjectInspector listOI = null;
-
+
@Override
- public void close() throws HiveException{
+ public void close() throws HiveException {
}
-
+
@Override
- public StructObjectInspector initialize(ObjectInspector [] args)
- throws UDFArgumentException {
-
+ public StructObjectInspector initialize(ObjectInspector[] args)
+ throws UDFArgumentException {
+
if (args.length != 1) {
throw new UDFArgumentException("explode() takes only one argument");
}
-
+
if (args[0].getCategory() != ObjectInspector.Category.LIST) {
throw new UDFArgumentException("explode() takes an array as a parameter");
}
- listOI = (ListObjectInspector)args[0];
-
+ listOI = (ListObjectInspector) args[0];
+
ArrayList<String> fieldNames = new ArrayList<String>();
ArrayList<ObjectInspector> fieldOIs = new ArrayList<ObjectInspector>();
fieldNames.add("col");
fieldOIs.add(listOI.getListElementObjectInspector());
- return ObjectInspectorFactory.getStandardStructObjectInspector(
- fieldNames, fieldOIs);
+ return ObjectInspectorFactory.getStandardStructObjectInspector(fieldNames,
+ fieldOIs);
}
Object forwardObj[] = new Object[1];
+
@Override
- public void process(Object [] o) throws HiveException {
-
+ public void process(Object[] o) throws HiveException {
+
List<?> list = listOI.getList(o[0]);
for (Object r : list) {
forwardObj[0] = r;
- this.forward(forwardObj);
+ forward(forwardObj);
}
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/udf/generic/UDTFCollector.java Thu Jan 21 10:37:58 2010
@@ -22,21 +22,25 @@
import org.apache.hadoop.hive.ql.metadata.HiveException;
/**
- * UDTFCollector collects data from a GenericUDTF and passes the data to a
+ * UDTFCollector collects data from a GenericUDTF and passes the data to a
* UDTFOperator
*/
public class UDTFCollector implements Collector {
- /* (non-Javadoc)
- * @see org.apache.hadoop.hive.ql.udf.generic.Collector#collect(java.lang.Object)
+ /*
+ * (non-Javadoc)
+ *
+ * @see
+ * org.apache.hadoop.hive.ql.udf.generic.Collector#collect(java.lang.Object)
*/
UDTFOperator op = null;
-
+
public UDTFCollector(UDTFOperator op) {
this.op = op;
}
+
@Override
- public void collect(Object input) throws HiveException{
+ public void collect(Object input) throws HiveException {
op.forwardUDTFOutput(input);
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManager.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManager.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManager.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManager.java Thu Jan 21 10:37:58 2010
@@ -67,174 +67,165 @@
package org.apache.hadoop.hive.ql.util.jdbm;
import java.io.IOException;
+
import org.apache.hadoop.hive.ql.util.jdbm.helper.Serializer;
/**
- * An interface to manages records, which are uninterpreted blobs of data.
- * <p>
- * The set of record operations is simple: fetch, insert, update and delete.
- * Each record is identified using a "rowid" and contains a byte[] data block.
- * Rowids are returned on inserts and you can store them someplace safe
- * to be able to get back to them. Data blocks can be as long as you wish,
- * and may have lengths different from the original when updating.
- *
+ * An interface to manages records, which are uninterpreted blobs of data.
+ * <p>
+ * The set of record operations is simple: fetch, insert, update and delete.
+ * Each record is identified using a "rowid" and contains a byte[] data block.
+ * Rowids are returned on inserts and you can store them someplace safe to be
+ * able to get back to them. Data blocks can be as long as you wish, and may
+ * have lengths different from the original when updating.
+ *
* @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
* @author <a href="cg@cdegroot.com">Cees de Groot</a>
* @version $Id: RecordManager.java,v 1.3 2005/06/25 23:12:31 doomdark Exp $
*/
-public interface RecordManager
-{
+public interface RecordManager {
- /**
- * Reserved slot for name directory.
- */
- public static final int NAME_DIRECTORY_ROOT = 0;
-
-
- /**
- * Inserts a new record using standard java object serialization.
- *
- * @param obj the object for the new record.
- * @return the rowid for the new record.
- * @throws IOException when one of the underlying I/O operations fails.
- */
- public abstract long insert( Object obj )
- throws IOException;
-
-
- /**
- * Inserts a new record using a custom serializer.
- *
- * @param obj the object for the new record.
- * @param serializer a custom serializer
- * @return the rowid for the new record.
- * @throws IOException when one of the underlying I/O operations fails.
- */
- public abstract long insert( Object obj, Serializer serializer )
- throws IOException;
-
-
- /**
- * Deletes a record.
- *
- * @param recid the rowid for the record that should be deleted.
- * @throws IOException when one of the underlying I/O operations fails.
- */
- public abstract void delete( long recid )
- throws IOException;
-
-
- /**
- * Updates a record using standard java object serialization.
- *
- * @param recid the recid for the record that is to be updated.
- * @param obj the new object for the record.
- * @throws IOException when one of the underlying I/O operations fails.
- */
- public abstract void update( long recid, Object obj )
- throws IOException;
-
-
- /**
- * Updates a record using a custom serializer.
- *
- * @param recid the recid for the record that is to be updated.
- * @param obj the new object for the record.
- * @param serializer a custom serializer
- * @throws IOException when one of the underlying I/O operations fails.
- */
- public abstract void update( long recid, Object obj, Serializer serializer )
- throws IOException;
-
-
- /**
- * Fetches a record using standard java object serialization.
- *
- * @param recid the recid for the record that must be fetched.
- * @return the object contained in the record.
- * @throws IOException when one of the underlying I/O operations fails.
- */
- public abstract Object fetch( long recid )
- throws IOException;
-
-
- /**
- * Fetches a record using a custom serializer.
- *
- * @param recid the recid for the record that must be fetched.
- * @param serializer a custom serializer
- * @return the object contained in the record.
- * @throws IOException when one of the underlying I/O operations fails.
- */
- public abstract Object fetch( long recid, Serializer serializer )
- throws IOException;
-
-
- /**
- * Closes the record manager.
- *
- * @throws IOException when one of the underlying I/O operations fails.
- */
- public abstract void close()
- throws IOException;
-
-
- /**
- * Returns the number of slots available for "root" rowids. These slots
- * can be used to store special rowids, like rowids that point to
- * other rowids. Root rowids are useful for bootstrapping access to
- * a set of data.
- */
- public abstract int getRootCount();
-
-
- /**
- * Returns the indicated root rowid.
- *
- * @see #getRootCount
- */
- public abstract long getRoot( int id )
- throws IOException;
-
-
- /**
- * Sets the indicated root rowid.
- *
- * @see #getRootCount
- */
- public abstract void setRoot( int id, long rowid )
- throws IOException;
-
-
- /**
- * Commit (make persistent) all changes since beginning of transaction.
- */
- public abstract void commit()
- throws IOException;
-
-
- /**
- * Rollback (cancel) all changes since beginning of transaction.
- */
- public abstract void rollback()
- throws IOException;
-
-
-
-
- /**
- * Obtain the record id of a named object. Returns 0 if named object
- * doesn't exist.
- */
- public abstract long getNamedObject( String name )
- throws IOException;
-
-
- /**
- * Set the record id of a named object.
- */
- public abstract void setNamedObject( String name, long recid )
- throws IOException;
+ /**
+ * Reserved slot for name directory.
+ */
+ public static final int NAME_DIRECTORY_ROOT = 0;
+
+ /**
+ * Inserts a new record using standard java object serialization.
+ *
+ * @param obj
+ * the object for the new record.
+ * @return the rowid for the new record.
+ * @throws IOException
+ * when one of the underlying I/O operations fails.
+ */
+ public abstract long insert(Object obj) throws IOException;
+
+ /**
+ * Inserts a new record using a custom serializer.
+ *
+ * @param obj
+ * the object for the new record.
+ * @param serializer
+ * a custom serializer
+ * @return the rowid for the new record.
+ * @throws IOException
+ * when one of the underlying I/O operations fails.
+ */
+ public abstract long insert(Object obj, Serializer serializer)
+ throws IOException;
+
+ /**
+ * Deletes a record.
+ *
+ * @param recid
+ * the rowid for the record that should be deleted.
+ * @throws IOException
+ * when one of the underlying I/O operations fails.
+ */
+ public abstract void delete(long recid) throws IOException;
+
+ /**
+ * Updates a record using standard java object serialization.
+ *
+ * @param recid
+ * the recid for the record that is to be updated.
+ * @param obj
+ * the new object for the record.
+ * @throws IOException
+ * when one of the underlying I/O operations fails.
+ */
+ public abstract void update(long recid, Object obj) throws IOException;
+
+ /**
+ * Updates a record using a custom serializer.
+ *
+ * @param recid
+ * the recid for the record that is to be updated.
+ * @param obj
+ * the new object for the record.
+ * @param serializer
+ * a custom serializer
+ * @throws IOException
+ * when one of the underlying I/O operations fails.
+ */
+ public abstract void update(long recid, Object obj, Serializer serializer)
+ throws IOException;
+
+ /**
+ * Fetches a record using standard java object serialization.
+ *
+ * @param recid
+ * the recid for the record that must be fetched.
+ * @return the object contained in the record.
+ * @throws IOException
+ * when one of the underlying I/O operations fails.
+ */
+ public abstract Object fetch(long recid) throws IOException;
+
+ /**
+ * Fetches a record using a custom serializer.
+ *
+ * @param recid
+ * the recid for the record that must be fetched.
+ * @param serializer
+ * a custom serializer
+ * @return the object contained in the record.
+ * @throws IOException
+ * when one of the underlying I/O operations fails.
+ */
+ public abstract Object fetch(long recid, Serializer serializer)
+ throws IOException;
+
+ /**
+ * Closes the record manager.
+ *
+ * @throws IOException
+ * when one of the underlying I/O operations fails.
+ */
+ public abstract void close() throws IOException;
+
+ /**
+ * Returns the number of slots available for "root" rowids. These slots can be
+ * used to store special rowids, like rowids that point to other rowids. Root
+ * rowids are useful for bootstrapping access to a set of data.
+ */
+ public abstract int getRootCount();
+
+ /**
+ * Returns the indicated root rowid.
+ *
+ * @see #getRootCount
+ */
+ public abstract long getRoot(int id) throws IOException;
+
+ /**
+ * Sets the indicated root rowid.
+ *
+ * @see #getRootCount
+ */
+ public abstract void setRoot(int id, long rowid) throws IOException;
+
+ /**
+ * Commit (make persistent) all changes since beginning of transaction.
+ */
+ public abstract void commit() throws IOException;
+
+ /**
+ * Rollback (cancel) all changes since beginning of transaction.
+ */
+ public abstract void rollback() throws IOException;
+
+ /**
+ * Obtain the record id of a named object. Returns 0 if named object doesn't
+ * exist.
+ */
+ public abstract long getNamedObject(String name) throws IOException;
+
+ /**
+ * Set the record id of a named object.
+ */
+ public abstract void setNamedObject(String name, long recid)
+ throws IOException;
}
-
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerFactory.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerFactory.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerFactory.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerFactory.java Thu Jan 21 10:37:58 2010
@@ -66,82 +66,82 @@
package org.apache.hadoop.hive.ql.util.jdbm;
-import java.io.IOException;
import java.io.File;
+import java.io.IOException;
import java.util.Properties;
/**
* This is the factory class to use for instantiating {@link RecordManager}
* instances.
- *
+ *
* @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
* @author <a href="cg@cdegroot.com">Cees de Groot</a>
- * @version $Id: RecordManagerFactory.java,v 1.2 2005/06/25 23:12:31 doomdark Exp $
+ * @version $Id: RecordManagerFactory.java,v 1.2 2005/06/25 23:12:31 doomdark
+ * Exp $
*/
-public final class RecordManagerFactory
-{
-
- /**
- * Create a record manager.
- *
- * @param name Name of the record file.
- * @throws IOException if an I/O related exception occurs while creating
- * or opening the record manager.
- * @throws UnsupportedOperationException if some options are not supported by the
- * implementation.
- * @throws IllegalArgumentException if some options are invalid.
- */
- public static RecordManager createRecordManager( String name )
- throws IOException
- {
- return createRecordManager( name, new Properties() );
- }
+public final class RecordManagerFactory {
-
- /**
- * Create a record manager.
- *
- * @param name Name of the record file.
- * @param options Record manager options.
- * @throws IOException if an I/O related exception occurs while creating
- * or opening the record manager.
- * @throws UnsupportedOperationException if some options are not supported by the
- * implementation.
- * @throws IllegalArgumentException if some options are invalid.
- */
- public static RecordManager createRecordManager( String name,
- Properties options )
- throws IOException
- {
- RecordManagerProvider factory = getFactory(options);
- return factory.createRecordManager( name, options );
- }
-
- public static RecordManager createRecordManager( File file, Properties options)
- throws IOException
- {
- RecordManagerProvider factory = getFactory(options);
- return factory.createRecordManager( file, options );
- }
-
- private static RecordManagerProvider getFactory(Properties options) {
- String provider;
- Class clazz;
- RecordManagerProvider factory;
-
- provider = options.getProperty( RecordManagerOptions.PROVIDER_FACTORY,
- "org.apache.hadoop.hive.ql.util.jdbm.recman.Provider" );
-
- try {
- clazz = Class.forName( provider );
- factory = (RecordManagerProvider) clazz.newInstance();
- } catch ( Exception except ) {
- throw new IllegalArgumentException( "Invalid record manager provider: "
- + provider
- + "\n[" + except.getClass().getName()
- + ": " + except.getMessage()
- + "]" );
- }
- return factory;
+ /**
+ * Create a record manager.
+ *
+ * @param name
+ * Name of the record file.
+ * @throws IOException
+ * if an I/O related exception occurs while creating or opening the
+ * record manager.
+ * @throws UnsupportedOperationException
+ * if some options are not supported by the implementation.
+ * @throws IllegalArgumentException
+ * if some options are invalid.
+ */
+ public static RecordManager createRecordManager(String name)
+ throws IOException {
+ return createRecordManager(name, new Properties());
+ }
+
+ /**
+ * Create a record manager.
+ *
+ * @param name
+ * Name of the record file.
+ * @param options
+ * Record manager options.
+ * @throws IOException
+ * if an I/O related exception occurs while creating or opening the
+ * record manager.
+ * @throws UnsupportedOperationException
+ * if some options are not supported by the implementation.
+ * @throws IllegalArgumentException
+ * if some options are invalid.
+ */
+ public static RecordManager createRecordManager(String name,
+ Properties options) throws IOException {
+ RecordManagerProvider factory = getFactory(options);
+ return factory.createRecordManager(name, options);
+ }
+
+ public static RecordManager createRecordManager(File file, Properties options)
+ throws IOException {
+ RecordManagerProvider factory = getFactory(options);
+ return factory.createRecordManager(file, options);
+ }
+
+ private static RecordManagerProvider getFactory(Properties options) {
+ String provider;
+ Class clazz;
+ RecordManagerProvider factory;
+
+ provider = options.getProperty(RecordManagerOptions.PROVIDER_FACTORY,
+ "org.apache.hadoop.hive.ql.util.jdbm.recman.Provider");
+
+ try {
+ clazz = Class.forName(provider);
+ factory = (RecordManagerProvider) clazz.newInstance();
+ } catch (Exception except) {
+ throw new IllegalArgumentException("Invalid record manager provider: "
+ + provider + "\n[" + except.getClass().getName() + ": "
+ + except.getMessage() + "]");
}
+ return factory;
+ }
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerOptions.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerOptions.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerOptions.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerOptions.java Thu Jan 21 10:37:58 2010
@@ -68,79 +68,71 @@
/**
* Standard options for RecordManager.
- *
+ *
* @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
* @author <a href="cg@cdegroot.com">Cees de Groot</a>
- * @version $Id: RecordManagerOptions.java,v 1.1 2002/05/31 06:33:20 boisvert Exp $
+ * @version $Id: RecordManagerOptions.java,v 1.1 2002/05/31 06:33:20 boisvert
+ * Exp $
*/
-public class RecordManagerOptions
-{
+public class RecordManagerOptions {
- /**
- * Option to create a thread-safe record manager.
- */
- public static final String PROVIDER_FACTORY = "jdbm.provider";
-
-
- /**
- * Option to create a thread-safe record manager.
- */
- public static final String THREAD_SAFE = "jdbm.threadSafe";
-
-
- /**
- * Option to automatically commit data after each operation.
- */
- public static final String AUTO_COMMIT = "jdbm.autoCommit";
-
-
- /**
- * Option to disable transaction (to increase performance at the cost of
- * potential data loss).
- */
- public static final String DISABLE_TRANSACTIONS = "jdbm.disableTransactions";
-
-
- /**
- * Cache type.
- */
- public static final String CACHE_TYPE = "jdbm.cache.type";
-
-
- /**
- * Cache size (when applicable)
- */
- public static final String CACHE_SIZE = "jdbm.cache.size";
-
-
- /**
- * Use normal (strong) object references for the record cache.
- */
- public static final String NORMAL_CACHE = "normal";
-
-
- /**
- * Use soft references {$link java.lang.ref.SoftReference} for the record
- * cache instead of the default normal object references.
- * <p>
- * Soft references are cleared at the discretion of the garbage collector
- * in response to memory demand.
- */
- public static final String SOFT_REF_CACHE = "soft";
-
-
- /**
- * Use weak references {$link java.lang.ref.WeakReference} for the record
- * cache instead of the default normal object references.
- * <p>
- * Weak references do not prevent their referents from being made
- * finalizable, finalized, and then reclaimed.
- */
- public static final String WEAK_REF_CACHE = "weak";
-
- /**
- * Disable cache.
- */
- public static final String NO_CACHE = "nocache";
+ /**
+ * Option to create a thread-safe record manager.
+ */
+ public static final String PROVIDER_FACTORY = "jdbm.provider";
+
+ /**
+ * Option to create a thread-safe record manager.
+ */
+ public static final String THREAD_SAFE = "jdbm.threadSafe";
+
+ /**
+ * Option to automatically commit data after each operation.
+ */
+ public static final String AUTO_COMMIT = "jdbm.autoCommit";
+
+ /**
+ * Option to disable transaction (to increase performance at the cost of
+ * potential data loss).
+ */
+ public static final String DISABLE_TRANSACTIONS = "jdbm.disableTransactions";
+
+ /**
+ * Cache type.
+ */
+ public static final String CACHE_TYPE = "jdbm.cache.type";
+
+ /**
+ * Cache size (when applicable)
+ */
+ public static final String CACHE_SIZE = "jdbm.cache.size";
+
+ /**
+ * Use normal (strong) object references for the record cache.
+ */
+ public static final String NORMAL_CACHE = "normal";
+
+ /**
+ * Use soft references {$link java.lang.ref.SoftReference} for the record
+ * cache instead of the default normal object references.
+ * <p>
+ * Soft references are cleared at the discretion of the garbage collector in
+ * response to memory demand.
+ */
+ public static final String SOFT_REF_CACHE = "soft";
+
+ /**
+ * Use weak references {$link java.lang.ref.WeakReference} for the record
+ * cache instead of the default normal object references.
+ * <p>
+ * Weak references do not prevent their referents from being made finalizable,
+ * finalized, and then reclaimed.
+ */
+ public static final String WEAK_REF_CACHE = "weak";
+
+ /**
+ * Disable cache.
+ */
+ public static final String NO_CACHE = "nocache";
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerProvider.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerProvider.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerProvider.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/RecordManagerProvider.java Thu Jan 21 10:37:58 2010
@@ -66,36 +66,38 @@
package org.apache.hadoop.hive.ql.util.jdbm;
-import java.io.IOException;
import java.io.File;
+import java.io.IOException;
import java.util.Properties;
/**
- * Provider of RecordManager implementation. Classes implementing this
- * interface act as a factory to provide implementations of RecordManager.
- *
+ * Provider of RecordManager implementation. Classes implementing this interface
+ * act as a factory to provide implementations of RecordManager.
+ *
* @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
- * @version $Id: RecordManagerProvider.java,v 1.2 2005/06/25 23:12:31 doomdark Exp $
+ * @version $Id: RecordManagerProvider.java,v 1.2 2005/06/25 23:12:31 doomdark
+ * Exp $
*/
-public interface RecordManagerProvider
-{
+public interface RecordManagerProvider {
+
+ /**
+ * Create a record manager.
+ *
+ * @param filename
+ * Base filename of the record file.
+ * @param options
+ * Record manager options.
+ * @throws IOException
+ * if an I/O related exception occurs while creating or opening the
+ * record manager.
+ * @throws UnsupportedOperationException
+ * if some options are not supported by the implementation.
+ * @throws IllegalArgumentException
+ * if some options are invalid.
+ */
+ public RecordManager createRecordManager(String filename, Properties options)
+ throws IOException;
- /**
- * Create a record manager.
- *
- * @param filename Base filename of the record file.
- * @param options Record manager options.
- * @throws IOException if an I/O related exception occurs while creating
- * or opening the record manager.
- * @throws UnsupportedOperationException if some options are not supported by the
- * implementation.
- * @throws IllegalArgumentException if some options are invalid.
- */
- public RecordManager createRecordManager( String filename,
- Properties options )
- throws IOException;
-
- public RecordManager createRecordManager( File file,
- Properties options )
- throws IOException;
+ public RecordManager createRecordManager(File file, Properties options)
+ throws IOException;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArrayComparator.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArrayComparator.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArrayComparator.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArrayComparator.java Thu Jan 21 10:37:58 2010
@@ -64,89 +64,86 @@
package org.apache.hadoop.hive.ql.util.jdbm.helper;
-import java.util.Comparator;
import java.io.Serializable;
+import java.util.Comparator;
/**
* Comparator for byte arrays.
- *
+ *
* @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
- * @version $Id: ByteArrayComparator.java,v 1.4 2002/05/31 06:33:20 boisvert Exp $
+ * @version $Id: ByteArrayComparator.java,v 1.4 2002/05/31 06:33:20 boisvert Exp
+ * $
*/
-public final class ByteArrayComparator
- implements Comparator, Serializable
-{
-
- /**
- * Version id for serialization.
- */
- final static long serialVersionUID = 1L;
-
-
- /**
- * Compare two objects.
- *
- * @param obj1 First object
- * @param obj2 Second object
- * @return a positive integer if obj1 > obj2, 0 if obj1 == obj2,
- * and a negative integer if obj1 < obj2
- */
- public int compare( Object obj1, Object obj2 )
- {
- if ( obj1 == null ) {
- throw new IllegalArgumentException( "Argument 'obj1' is null" );
- }
+public final class ByteArrayComparator implements Comparator, Serializable {
- if ( obj2 == null ) {
- throw new IllegalArgumentException( "Argument 'obj2' is null" );
- }
+ /**
+ * Version id for serialization.
+ */
+ final static long serialVersionUID = 1L;
+
+ /**
+ * Compare two objects.
+ *
+ * @param obj1
+ * First object
+ * @param obj2
+ * Second object
+ * @return a positive integer if obj1 > obj2, 0 if obj1 == obj2, and a
+ * negative integer if obj1 < obj2
+ */
+ public int compare(Object obj1, Object obj2) {
+ if (obj1 == null) {
+ throw new IllegalArgumentException("Argument 'obj1' is null");
+ }
- return compareByteArray( (byte[]) obj1, (byte[]) obj2 );
- }
+ if (obj2 == null) {
+ throw new IllegalArgumentException("Argument 'obj2' is null");
+ }
+ return compareByteArray((byte[]) obj1, (byte[]) obj2);
+ }
- /**
- * Compare two byte arrays.
- */
- public static int compareByteArray( byte[] thisKey, byte[] otherKey )
- {
- int len = Math.min( thisKey.length, otherKey.length );
-
- // compare the byte arrays
- for ( int i=0; i<len; i++ ) {
- if ( thisKey[i] >= 0 ) {
- if ( otherKey[i] >= 0 ) {
- // both positive
- if ( thisKey[i] < otherKey[i] ) {
- return -1;
- } else if ( thisKey[i] > otherKey[i] ) {
- return 1;
- }
- } else {
- // otherKey is negative => greater (because MSB is 1)
- return -1;
- }
- } else {
- if ( otherKey[i] >= 0 ) {
- // thisKey is negative => greater (because MSB is 1)
- return 1;
- } else {
- // both negative
- if ( thisKey[i] < otherKey[i] ) {
- return -1;
- } else if ( thisKey[i] > otherKey[i] ) {
- return 1;
- }
- }
- }
- }
- if ( thisKey.length == otherKey.length) {
- return 0;
+ /**
+ * Compare two byte arrays.
+ */
+ public static int compareByteArray(byte[] thisKey, byte[] otherKey) {
+ int len = Math.min(thisKey.length, otherKey.length);
+
+ // compare the byte arrays
+ for (int i = 0; i < len; i++) {
+ if (thisKey[i] >= 0) {
+ if (otherKey[i] >= 0) {
+ // both positive
+ if (thisKey[i] < otherKey[i]) {
+ return -1;
+ } else if (thisKey[i] > otherKey[i]) {
+ return 1;
+ }
+ } else {
+ // otherKey is negative => greater (because MSB is 1)
+ return -1;
}
- if ( thisKey.length < otherKey.length ) {
+ } else {
+ if (otherKey[i] >= 0) {
+ // thisKey is negative => greater (because MSB is 1)
+ return 1;
+ } else {
+ // both negative
+ if (thisKey[i] < otherKey[i]) {
return -1;
+ } else if (thisKey[i] > otherKey[i]) {
+ return 1;
+ }
}
- return 1;
+ }
+ }
+ if (thisKey.length == otherKey.length) {
+ return 0;
+ }
+ if (thisKey.length < otherKey.length) {
+ return -1;
}
+ return 1;
+ }
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArraySerializer.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArraySerializer.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArraySerializer.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/ByteArraySerializer.java Thu Jan 21 10:37:58 2010
@@ -66,55 +66,48 @@
import java.io.IOException;
-
/**
- * Serializer for byte arrays -- simple returns the byte array itself. No actual
+ * Serializer for byte arrays -- simple returns the byte array itself. No actual
* serialization is performed.
- *
+ *
* @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
- * @version $Id: ByteArraySerializer.java,v 1.1 2003/03/21 02:48:42 boisvert Exp $
+ * @version $Id: ByteArraySerializer.java,v 1.1 2003/03/21 02:48:42 boisvert Exp
+ * $
*/
-public final class ByteArraySerializer
- implements Serializer
-{
-
- /**
- * Version id for serialization.
- */
- final static long serialVersionUID = 1L;
-
-
- /**
- * Static instance.
- */
- public static final ByteArraySerializer INSTANCE = new ByteArraySerializer();
-
-
- /**
- * Serialize the content of an object into a byte array.
- *
- * @param obj Object to serialize
- * @return a byte array representing the object's state
- *
- */
- public byte[] serialize( Object obj )
- throws IOException
- {
- return (byte[]) obj;
- }
-
-
- /**
- * Deserialize the content of an object from a byte array.
- *
- * @param serialized Byte array representation of the object
- * @return deserialized object
- *
- */
- public Object deserialize( byte[] serialized )
- throws IOException
- {
- return serialized;
- }
+public final class ByteArraySerializer implements Serializer {
+
+ /**
+ * Version id for serialization.
+ */
+ final static long serialVersionUID = 1L;
+
+ /**
+ * Static instance.
+ */
+ public static final ByteArraySerializer INSTANCE = new ByteArraySerializer();
+
+ /**
+ * Serialize the content of an object into a byte array.
+ *
+ * @param obj
+ * Object to serialize
+ * @return a byte array representing the object's state
+ *
+ */
+ public byte[] serialize(Object obj) throws IOException {
+ return (byte[]) obj;
+ }
+
+ /**
+ * Deserialize the content of an object from a byte array.
+ *
+ * @param serialized
+ * Byte array representation of the object
+ * @return deserialized object
+ *
+ */
+ public Object deserialize(byte[] serialized) throws IOException {
+ return serialized;
+ }
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CacheEvictionException.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CacheEvictionException.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CacheEvictionException.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CacheEvictionException.java Thu Jan 21 10:37:58 2010
@@ -66,28 +66,24 @@
package org.apache.hadoop.hive.ql.util.jdbm.helper;
/**
- * Exception that occurs during eviction of an object in the cache.
- *
- * @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
- * @version $Id: CacheEvictionException.java,v 1.4 2003/10/21 15:43:20 boisvert Exp $
+ * Exception that occurs during eviction of an object in the cache.
+ *
+ * @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
+ * @version $Id: CacheEvictionException.java,v 1.4 2003/10/21 15:43:20 boisvert
+ * Exp $
*/
-public class CacheEvictionException
- extends Exception
-{
+public class CacheEvictionException extends Exception {
- /**
- * Nested exception -- the original exception that occured, if any.
- */
- protected Exception _nested;
+ /**
+ * Nested exception -- the original exception that occured, if any.
+ */
+ protected Exception _nested;
+ public CacheEvictionException(Exception nested) {
+ _nested = nested;
+ }
- public CacheEvictionException( Exception nested )
- {
- _nested = nested;
- }
-
- public Exception getNestedException()
- {
- return _nested;
- }
+ public Exception getNestedException() {
+ return _nested;
+ }
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicy.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicy.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicy.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicy.java Thu Jan 21 10:37:58 2010
@@ -68,94 +68,92 @@
import java.util.Enumeration;
/**
- * CachePolicity is an abstraction for different cache policies.
- * (ie. MRU, time-based, soft-refs, ...)
- *
+ * CachePolicity is an abstraction for different cache policies. (ie. MRU,
+ * time-based, soft-refs, ...)
+ *
* @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
* @author <a href="mailto:dranatunga@users.sourceforge.net">Dilum Ranatunga</a>
* @version $Id: CachePolicy.java,v 1.5 2003/11/01 13:25:02 dranatunga Exp $
*/
-public interface CachePolicy
-{
+public interface CachePolicy {
- /**
- * Place an object in the cache. If the cache does not currently contain
- * an object for the key specified, this mapping is added. If an object
- * currently exists under the specified key, the current object is
- * replaced with the new object.
- * <p>
- * If the changes to the cache cause the eviction of any objects
- * <strong>stored under other key(s)</strong>, events corresponding to
- * the evictions are fired for each object. If an event listener is
- * unable to handle the eviction, and throws a cache eviction exception,
- * that exception is propagated to the caller. If such an exception is
- * thrown, the cache itself should be left as it was before the
- * <code>put()</code> operation was invoked: the the object whose
- * eviction failed is still in the cache, and the new insertion or
- * modification is reverted.
- *
- * @param key key for the cached object
- * @param value the cached object
- * @throws CacheEvictionException propagated if, while evicting objects
- * to make room for new object, an eviction listener encountered
- * this problem.
- */
- public void put( Object key, Object value )
- throws CacheEvictionException;
-
-
- /**
- * Obtain the object stored under the key specified.
- *
- * @param key key the object was cached under
- * @return the object if it is still in the cache, null otherwise.
- */
- public Object get( Object key );
-
-
- /**
- * Remove the object stored under the key specified. Note that since
- * eviction notices are only fired when objects under <strong>different
- * keys</strong> are evicted, no event is fired for any object stored
- * under this key (see {@link #put(Object, Object) put( )}).
- *
- * @param key key the object was stored in the cache under.
- */
- public void remove( Object key );
-
-
- /**
- * Remove all objects from the cache. Consistent with
- * {@link #remove(Object) remove( )}, no eviction notices are fired.
- */
- public void removeAll();
-
-
- /**
- * Enumerate through the objects currently in the cache.
- */
- public Enumeration elements();
-
-
- /**
- * Add a listener to this cache policy.
- * <p>
- * If this cache policy already contains a listener that is equal to
- * the one being added, this call has no effect.
- *
- * @param listener the (non-null) listener to add to this policy
- * @throws IllegalArgumentException if listener is null.
- */
- public void addListener( CachePolicyListener listener )
- throws IllegalArgumentException;
-
-
- /**
- * Remove a listener from this cache policy. The listener is found
- * using object equality, not identity.
- *
- * @param listener the listener to remove from this policy
- */
- public void removeListener( CachePolicyListener listener );
+ /**
+ * Place an object in the cache. If the cache does not currently contain an
+ * object for the key specified, this mapping is added. If an object currently
+ * exists under the specified key, the current object is replaced with the new
+ * object.
+ * <p>
+ * If the changes to the cache cause the eviction of any objects
+ * <strong>stored under other key(s)</strong>, events corresponding to the
+ * evictions are fired for each object. If an event listener is unable to
+ * handle the eviction, and throws a cache eviction exception, that exception
+ * is propagated to the caller. If such an exception is thrown, the cache
+ * itself should be left as it was before the <code>put()</code> operation was
+ * invoked: the the object whose eviction failed is still in the cache, and
+ * the new insertion or modification is reverted.
+ *
+ * @param key
+ * key for the cached object
+ * @param value
+ * the cached object
+ * @throws CacheEvictionException
+ * propagated if, while evicting objects to make room for new
+ * object, an eviction listener encountered this problem.
+ */
+ public void put(Object key, Object value) throws CacheEvictionException;
+
+ /**
+ * Obtain the object stored under the key specified.
+ *
+ * @param key
+ * key the object was cached under
+ * @return the object if it is still in the cache, null otherwise.
+ */
+ public Object get(Object key);
+
+ /**
+ * Remove the object stored under the key specified. Note that since eviction
+ * notices are only fired when objects under <strong>different keys</strong>
+ * are evicted, no event is fired for any object stored under this key (see
+ * {@link #put(Object, Object) put( )}).
+ *
+ * @param key
+ * key the object was stored in the cache under.
+ */
+ public void remove(Object key);
+
+ /**
+ * Remove all objects from the cache. Consistent with {@link #remove(Object)
+ * remove( )}, no eviction notices are fired.
+ */
+ public void removeAll();
+
+ /**
+ * Enumerate through the objects currently in the cache.
+ */
+ public Enumeration elements();
+
+ /**
+ * Add a listener to this cache policy.
+ * <p>
+ * If this cache policy already contains a listener that is equal to the one
+ * being added, this call has no effect.
+ *
+ * @param listener
+ * the (non-null) listener to add to this policy
+ * @throws IllegalArgumentException
+ * if listener is null.
+ */
+ public void addListener(CachePolicyListener listener)
+ throws IllegalArgumentException;
+
+ /**
+ * Remove a listener from this cache policy. The listener is found using
+ * object equality, not identity.
+ *
+ * @param listener
+ * the listener to remove from this policy
+ */
+ public void removeListener(CachePolicyListener listener);
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicyListener.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicyListener.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicyListener.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/CachePolicyListener.java Thu Jan 21 10:37:58 2010
@@ -66,30 +66,32 @@
package org.apache.hadoop.hive.ql.util.jdbm.helper;
/**
- * Callback interface between {@link CachePolicy} and a Cache implementation
- * to notify about cached object eviction.
+ * Callback interface between {@link CachePolicy} and a Cache implementation to
+ * notify about cached object eviction.
* <p>
* Note that <code>CachePolicy</code> implementations typically use
- * <em>object equality</em> when removing listeners, so concrete
- * implementations of this interface should also pay attention to
- * their {@link Object#equals(Object)} and {@link Object#hashCode()}
- * methods.
- *
+ * <em>object equality</em> when removing listeners, so concrete implementations
+ * of this interface should also pay attention to their
+ * {@link Object#equals(Object)} and {@link Object#hashCode()} methods.
+ *
* @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
- * @version $Id: CachePolicyListener.java,v 1.3 2003/11/01 13:25:41 dranatunga Exp $
+ * @version $Id: CachePolicyListener.java,v 1.3 2003/11/01 13:25:41 dranatunga
+ * Exp $
*/
public interface CachePolicyListener {
- /**
- * Notification that the cache this listener is attached to is evicting
- * the object indicated.
- *
- * @param obj object being evited from cache
- * @throws CacheEvictionException if this listener encountered problems
- * while preparing for the specified object's eviction. For example,
- * a listener may try to persist the object to disk, and encounter
- * an <code>IOException</code>.
- */
- public void cacheObjectEvicted(Object obj) throws CacheEvictionException;
+ /**
+ * Notification that the cache this listener is attached to is evicting the
+ * object indicated.
+ *
+ * @param obj
+ * object being evited from cache
+ * @throws CacheEvictionException
+ * if this listener encountered problems while preparing for the
+ * specified object's eviction. For example, a listener may try to
+ * persist the object to disk, and encounter an
+ * <code>IOException</code>.
+ */
+ public void cacheObjectEvicted(Object obj) throws CacheEvictionException;
}
Modified: hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Conversion.java
URL: http://svn.apache.org/viewvc/hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Conversion.java?rev=901644&r1=901643&r2=901644&view=diff
==============================================================================
--- hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Conversion.java (original)
+++ hadoop/hive/trunk/ql/src/java/org/apache/hadoop/hive/ql/util/jdbm/helper/Conversion.java Thu Jan 21 10:37:58 2010
@@ -64,178 +64,148 @@
package org.apache.hadoop.hive.ql.util.jdbm.helper;
-
/**
* Miscelaneous conversion utility methods.
- *
+ *
* @author <a href="mailto:boisvert@intalio.com">Alex Boisvert</a>
* @version $Id: Conversion.java,v 1.3 2002/05/31 06:33:20 boisvert Exp $
*/
-public class Conversion
-{
-
- /**
- * Convert a string into a byte array.
- */
- public static byte[] convertToByteArray( String s )
- {
- try {
- // see the following page for character encoding
- // http://java.sun.com/products/jdk/1.1/docs/guide/intl/encoding.doc.html
- return s.getBytes( "UTF8" );
- } catch ( java.io.UnsupportedEncodingException uee ) {
- uee.printStackTrace();
- throw new Error( "Platform doesn't support UTF8 encoding" );
- }
- }
-
-
- /**
- * Convert a byte into a byte array.
- */
- public static byte[] convertToByteArray( byte n )
- {
- n = (byte)( n ^ ( (byte) 0x80 ) ); // flip MSB because "byte" is signed
- return new byte[] { n };
- }
-
-
- /**
- * Convert a short into a byte array.
- */
- public static byte[] convertToByteArray( short n )
- {
- n = (short) ( n ^ ( (short) 0x8000 ) ); // flip MSB because "short" is signed
- byte[] key = new byte[ 2 ];
- pack2( key, 0, n );
- return key;
- }
-
-
- /**
- * Convert an int into a byte array.
- */
- public static byte[] convertToByteArray( int n )
- {
- n = (n ^ 0x80000000); // flip MSB because "int" is signed
- byte[] key = new byte[4];
- pack4(key, 0, n);
- return key;
- }
-
-
- /**
- * Convert a long into a byte array.
- */
- public static byte[] convertToByteArray( long n )
- {
- n = (n ^ 0x8000000000000000L); // flip MSB because "long" is signed
- byte[] key = new byte[8];
- pack8( key, 0, n );
- return key;
- }
-
-
- /**
- * Convert a byte array (encoded as UTF-8) into a String
- */
- public static String convertToString( byte[] buf )
- {
- try {
- // see the following page for character encoding
- // http://java.sun.com/products/jdk/1.1/docs/guide/intl/encoding.doc.html
- return new String( buf, "UTF8" );
- } catch ( java.io.UnsupportedEncodingException uee ) {
- uee.printStackTrace();
- throw new Error( "Platform doesn't support UTF8 encoding" );
- }
- }
-
-
- /**
- * Convert a byte array into an integer (signed 32-bit) value.
- */
- public static int convertToInt( byte[] buf )
- {
- int value = unpack4( buf, 0 );
- value = ( value ^ 0x80000000 ); // flip MSB because "int" is signed
- return value;
- }
-
-
- /**
- * Convert a byte array into a long (signed 64-bit) value.
- */
- public static long convertToLong( byte[] buf )
- {
- long value = ( (long) unpack4( buf, 0 ) << 32 )
- + ( unpack4( buf, 4 ) & 0xFFFFFFFFL );
- value = ( value ^ 0x8000000000000000L ); // flip MSB because "long" is signed
- return value;
- }
+public class Conversion {
-
-
-
- static int unpack4( byte[] buf, int offset )
- {
- int value = ( buf[ offset ] << 24 )
- | ( ( buf[ offset+1 ] << 16 ) & 0x00FF0000 )
- | ( ( buf[ offset+2 ] << 8 ) & 0x0000FF00 )
- | ( ( buf[ offset+3 ] << 0 ) & 0x000000FF );
-
- return value;
- }
-
-
- static final void pack2( byte[] data, int offs, int val )
- {
- data[offs++] = (byte) ( val >> 8 );
- data[offs++] = (byte) val;
- }
-
-
- static final void pack4( byte[] data, int offs, int val )
- {
- data[offs++] = (byte) ( val >> 24 );
- data[offs++] = (byte) ( val >> 16 );
- data[offs++] = (byte) ( val >> 8 );
- data[offs++] = (byte) val;
- }
-
-
- static final void pack8( byte[] data, int offs, long val )
- {
- pack4( data, 0, (int) ( val >> 32 ) );
- pack4( data, 4, (int) val );
- }
-
-
- /**
- * Test static methods
- */
- public static void main( String[] args )
- {
- byte[] buf;
-
- buf = convertToByteArray( (int) 5 );
- System.out.println( "int value of 5 is: " + convertToInt( buf ) );
-
- buf = convertToByteArray( (int) -1 );
- System.out.println( "int value of -1 is: " + convertToInt( buf ) );
-
- buf = convertToByteArray( (int) 22111000 );
- System.out.println( "int value of 22111000 is: " + convertToInt( buf ) );
-
-
- buf = convertToByteArray( (long) 5L );
- System.out.println( "long value of 5 is: " + convertToLong( buf ) );
-
- buf = convertToByteArray( (long) -1L );
- System.out.println( "long value of -1 is: " + convertToLong( buf ) );
-
- buf = convertToByteArray( (long) 1112223334445556667L );
- System.out.println( "long value of 1112223334445556667 is: " + convertToLong( buf ) );
- }
+ /**
+ * Convert a string into a byte array.
+ */
+ public static byte[] convertToByteArray(String s) {
+ try {
+ // see the following page for character encoding
+ // http://java.sun.com/products/jdk/1.1/docs/guide/intl/encoding.doc.html
+ return s.getBytes("UTF8");
+ } catch (java.io.UnsupportedEncodingException uee) {
+ uee.printStackTrace();
+ throw new Error("Platform doesn't support UTF8 encoding");
+ }
+ }
+
+ /**
+ * Convert a byte into a byte array.
+ */
+ public static byte[] convertToByteArray(byte n) {
+ n = (byte) (n ^ ((byte) 0x80)); // flip MSB because "byte" is signed
+ return new byte[] { n };
+ }
+
+ /**
+ * Convert a short into a byte array.
+ */
+ public static byte[] convertToByteArray(short n) {
+ n = (short) (n ^ ((short) 0x8000)); // flip MSB because "short" is signed
+ byte[] key = new byte[2];
+ pack2(key, 0, n);
+ return key;
+ }
+
+ /**
+ * Convert an int into a byte array.
+ */
+ public static byte[] convertToByteArray(int n) {
+ n = (n ^ 0x80000000); // flip MSB because "int" is signed
+ byte[] key = new byte[4];
+ pack4(key, 0, n);
+ return key;
+ }
+
+ /**
+ * Convert a long into a byte array.
+ */
+ public static byte[] convertToByteArray(long n) {
+ n = (n ^ 0x8000000000000000L); // flip MSB because "long" is signed
+ byte[] key = new byte[8];
+ pack8(key, 0, n);
+ return key;
+ }
+
+ /**
+ * Convert a byte array (encoded as UTF-8) into a String
+ */
+ public static String convertToString(byte[] buf) {
+ try {
+ // see the following page for character encoding
+ // http://java.sun.com/products/jdk/1.1/docs/guide/intl/encoding.doc.html
+ return new String(buf, "UTF8");
+ } catch (java.io.UnsupportedEncodingException uee) {
+ uee.printStackTrace();
+ throw new Error("Platform doesn't support UTF8 encoding");
+ }
+ }
+
+ /**
+ * Convert a byte array into an integer (signed 32-bit) value.
+ */
+ public static int convertToInt(byte[] buf) {
+ int value = unpack4(buf, 0);
+ value = (value ^ 0x80000000); // flip MSB because "int" is signed
+ return value;
+ }
+
+ /**
+ * Convert a byte array into a long (signed 64-bit) value.
+ */
+ public static long convertToLong(byte[] buf) {
+ long value = ((long) unpack4(buf, 0) << 32)
+ + (unpack4(buf, 4) & 0xFFFFFFFFL);
+ value = (value ^ 0x8000000000000000L); // flip MSB because "long" is signed
+ return value;
+ }
+
+ static int unpack4(byte[] buf, int offset) {
+ int value = (buf[offset] << 24) | ((buf[offset + 1] << 16) & 0x00FF0000)
+ | ((buf[offset + 2] << 8) & 0x0000FF00)
+ | ((buf[offset + 3] << 0) & 0x000000FF);
+
+ return value;
+ }
+
+ static final void pack2(byte[] data, int offs, int val) {
+ data[offs++] = (byte) (val >> 8);
+ data[offs++] = (byte) val;
+ }
+
+ static final void pack4(byte[] data, int offs, int val) {
+ data[offs++] = (byte) (val >> 24);
+ data[offs++] = (byte) (val >> 16);
+ data[offs++] = (byte) (val >> 8);
+ data[offs++] = (byte) val;
+ }
+
+ static final void pack8(byte[] data, int offs, long val) {
+ pack4(data, 0, (int) (val >> 32));
+ pack4(data, 4, (int) val);
+ }
+
+ /**
+ * Test static methods
+ */
+ public static void main(String[] args) {
+ byte[] buf;
+
+ buf = convertToByteArray(5);
+ System.out.println("int value of 5 is: " + convertToInt(buf));
+
+ buf = convertToByteArray(-1);
+ System.out.println("int value of -1 is: " + convertToInt(buf));
+
+ buf = convertToByteArray(22111000);
+ System.out.println("int value of 22111000 is: " + convertToInt(buf));
+
+ buf = convertToByteArray(5L);
+ System.out.println("long value of 5 is: " + convertToLong(buf));
+
+ buf = convertToByteArray(-1L);
+ System.out.println("long value of -1 is: " + convertToLong(buf));
+
+ buf = convertToByteArray(1112223334445556667L);
+ System.out.println("long value of 1112223334445556667 is: "
+ + convertToLong(buf));
+ }
}