You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2013/03/07 03:57:46 UTC

svn commit: r1453669 [4/5] - in /hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common: ./ src/ src/main/bin/ src/main/conf/ src/main/docs/ src/main/docs/src/documentation/content/xdocs/ src/main/java/ src/main/java/org/apache/hadoop/fs/...

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c Thu Mar  7 02:57:40 2013
@@ -14,7 +14,7 @@
  *  See the License for the specific language governing permissions and
  *  limitations under the License.
  */
- 
+
 #include <jni.h>
 #include "file_descriptor.h"
 #include "org_apache_hadoop.h"
@@ -26,6 +26,10 @@ static jfieldID fd_descriptor;
 // the no-argument constructor
 static jmethodID fd_constructor;
 
+#ifdef WINDOWS
+// the internal field for the long handle
+static jfieldID fd_handle;
+#endif
 
 void fd_init(JNIEnv* env)
 {
@@ -37,6 +41,12 @@ void fd_init(JNIEnv* env)
 
   fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
   PASS_EXCEPTIONS(env);
+
+#ifdef WINDOWS
+  fd_handle = (*env)->GetFieldID(env, fd_class, "handle", "J");
+  PASS_EXCEPTIONS(env);
+#endif
+
   fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
 }
 
@@ -46,9 +56,13 @@ void fd_deinit(JNIEnv *env) {
     fd_class = NULL;
   }
   fd_descriptor = NULL;
+#ifdef WINDOWS
+  fd_handle = NULL;
+#endif
   fd_constructor = NULL;
 }
 
+#ifdef UNIX
 /*
  * Given an instance 'obj' of java.io.FileDescriptor, return the
  * underlying fd, or throw if unavailable
@@ -71,4 +85,31 @@ jobject fd_create(JNIEnv *env, int fd) {
 
   (*env)->SetIntField(env, obj, fd_descriptor, fd);
   return obj;
-} 
+}
+#endif
+
+#ifdef WINDOWS
+/*
+ * Given an instance 'obj' of java.io.FileDescriptor, return the
+ * underlying fd, or throw if unavailable
+ */
+long fd_get(JNIEnv* env, jobject obj) {
+  if (obj == NULL) {
+    THROW(env, "java/lang/NullPointerException",
+          "FileDescriptor object is null");
+    return -1;
+  }
+  return (long) (*env)->GetLongField(env, obj, fd_handle);
+}
+
+/*
+ * Create a FileDescriptor object corresponding to the given int fd
+ */
+jobject fd_create(JNIEnv *env, long fd) {
+  jobject obj = (*env)->NewObject(env, fd_class, fd_constructor);
+  PASS_EXCEPTIONS_RET(env, (jobject) NULL);
+
+  (*env)->SetLongField(env, obj, fd_handle, fd);
+  return obj;
+}
+#endif

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h Thu Mar  7 02:57:40 2013
@@ -18,11 +18,19 @@
 #define FILE_DESCRIPTOR_H
 
 #include <jni.h>
+#include "org_apache_hadoop.h"
 
 void fd_init(JNIEnv *env);
 void fd_deinit(JNIEnv *env);
 
+#ifdef UNIX
 int fd_get(JNIEnv* env, jobject obj);
 jobject fd_create(JNIEnv *env, int fd);
+#endif
+
+#ifdef WINDOWS
+long fd_get(JNIEnv* env, jobject obj);
+jobject fd_create(JNIEnv *env, long fd);
+#endif
 
 #endif

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c Thu Mar  7 02:57:40 2013
@@ -16,7 +16,11 @@
  * limitations under the License.
  */
 
+#include "org_apache_hadoop.h"
+
+#ifdef UNIX
 #include "config.h"
+#endif // UNIX
 
 #include <jni.h>
 
@@ -28,4 +32,4 @@ JNIEXPORT jboolean JNICALL Java_org_apac
 #else
   return JNI_FALSE;
 #endif
-}
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c Thu Mar  7 02:57:40 2013
@@ -16,18 +16,22 @@
  * limitations under the License.
  */
 
-#include <arpa/inet.h>
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_util_NativeCrc32.h"
+
 #include <assert.h>
-#include <inttypes.h>
 #include <stdlib.h>
 #include <stdint.h>
 #include <string.h>
-#include <unistd.h>
 
+#ifdef UNIX
+#include <inttypes.h>
+#include <arpa/inet.h>
+#include <unistd.h>
 #include "config.h"
-#include "org_apache_hadoop.h"
-#include "org_apache_hadoop_util_NativeCrc32.h"
 #include "gcc_optimizations.h"
+#endif // UNIX
+
 #include "bulk_crc32.h"
 
 static void throw_checksum_exception(JNIEnv *env,
@@ -36,6 +40,9 @@ static void throw_checksum_exception(JNI
   char message[1024];
   jstring jstr_message;
   char *filename;
+  jclass checksum_exception_clazz;
+  jmethodID checksum_exception_ctor;
+  jthrowable obj;
 
   // Get filename as C string, or "null" if not provided
   if (j_filename == NULL) {
@@ -50,28 +57,38 @@ static void throw_checksum_exception(JNI
   }
 
   // Format error message
+#ifdef WINDOWS
+  _snprintf_s(
+	message,
+	sizeof(message),
+	_TRUNCATE,
+    "Checksum error: %s at %I64d exp: %d got: %d",
+    filename, pos, expected_crc, got_crc);
+#else
   snprintf(message, sizeof(message),
     "Checksum error: %s at %"PRId64" exp: %"PRId32" got: %"PRId32,
     filename, pos, expected_crc, got_crc);
+#endif // WINDOWS
+
   if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL) {
     goto cleanup;
   }
  
   // Throw exception
-  jclass checksum_exception_clazz = (*env)->FindClass(
+  checksum_exception_clazz = (*env)->FindClass(
     env, "org/apache/hadoop/fs/ChecksumException");
   if (checksum_exception_clazz == NULL) {
     goto cleanup;
   }
 
-  jmethodID checksum_exception_ctor = (*env)->GetMethodID(env,
+  checksum_exception_ctor = (*env)->GetMethodID(env,
     checksum_exception_clazz, "<init>",
     "(Ljava/lang/String;J)V");
   if (checksum_exception_ctor == NULL) {
     goto cleanup;
   }
 
-  jthrowable obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
+  obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
     checksum_exception_ctor, jstr_message, pos);
   if (obj == NULL) goto cleanup;
 
@@ -103,6 +120,14 @@ JNIEXPORT void JNICALL Java_org_apache_h
     jobject j_data, jint data_offset, jint data_len,
     jstring j_filename, jlong base_pos)
 {
+  uint8_t *sums_addr;
+  uint8_t *data_addr;
+  uint32_t *sums;
+  uint8_t *data;
+  int crc_type;
+  crc32_error_t error_data;
+  int ret;
+
   if (unlikely(!j_sums || !j_data)) {
     THROW(env, "java/lang/NullPointerException",
       "input ByteBuffers must not be null");
@@ -110,8 +135,8 @@ JNIEXPORT void JNICALL Java_org_apache_h
   }
 
   // Convert direct byte buffers to C pointers
-  uint8_t *sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
-  uint8_t *data_addr = (*env)->GetDirectBufferAddress(env, j_data);
+  sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
+  data_addr = (*env)->GetDirectBufferAddress(env, j_data);
 
   if (unlikely(!sums_addr || !data_addr)) {
     THROW(env, "java/lang/IllegalArgumentException",
@@ -129,16 +154,15 @@ JNIEXPORT void JNICALL Java_org_apache_h
     return;
   }
 
-  uint32_t *sums = (uint32_t *)(sums_addr + sums_offset);
-  uint8_t *data = data_addr + data_offset;
+  sums = (uint32_t *)(sums_addr + sums_offset);
+  data = data_addr + data_offset;
 
   // Convert to correct internal C constant for CRC type
-  int crc_type = convert_java_crc_type(env, j_crc_type);
+  crc_type = convert_java_crc_type(env, j_crc_type);
   if (crc_type == -1) return; // exception already thrown
 
   // Setup complete. Actually verify checksums.
-  crc32_error_t error_data;
-  int ret = bulk_verify_crc(data, data_len, sums, crc_type,
+  ret = bulk_verify_crc(data, data_len, sums, crc_type,
                             bytes_per_checksum, &error_data);
   if (likely(ret == CHECKSUMS_VALID)) {
     return;

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c Thu Mar  7 02:57:40 2013
@@ -21,25 +21,31 @@
  *   All rights reserved. Use of this source code is governed by a
  *   BSD-style license that can be found in the LICENSE file.
  */
+
+#include "org_apache_hadoop.h"
+
 #include <assert.h>
-#include <arpa/inet.h>
 #include <errno.h>
 #include <stdint.h>
+
+#ifdef UNIX
+#include <arpa/inet.h>
 #include <unistd.h>
+#endif // UNIX
 
 #include "crc32_zlib_polynomial_tables.h"
 #include "crc32c_tables.h"
 #include "bulk_crc32.h"
 #include "gcc_optimizations.h"
 
-#ifndef __FreeBSD__
+#if (!defined(__FreeBSD__) && !defined(WINDOWS))
 #define USE_PIPELINED
 #endif
 
 #define CRC_INITIAL_VAL 0xffffffff
 
 typedef uint32_t (*crc_update_func_t)(uint32_t, const uint8_t *, size_t);
-static inline uint32_t crc_val(uint32_t crc);
+static uint32_t crc_val(uint32_t crc);
 static uint32_t crc32_zlib_sb8(uint32_t crc, const uint8_t *buf, size_t length);
 static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length);
 
@@ -187,7 +193,7 @@ return_crc_error:
 /**
  * Extract the final result of a CRC
  */
-static inline uint32_t crc_val(uint32_t crc) {
+uint32_t crc_val(uint32_t crc) {
   return ~crc;
 }
 
@@ -200,11 +206,13 @@ static uint32_t crc32c_sb8(uint32_t crc,
   uint32_t end_bytes = length - running_length; 
   int li;
   for (li=0; li < running_length/8; li++) {
+	uint32_t term1;
+	uint32_t term2;
     crc ^= *(uint32_t *)buf;
     buf += 4;
-    uint32_t term1 = CRC32C_T8_7[crc & 0x000000FF] ^
+    term1 = CRC32C_T8_7[crc & 0x000000FF] ^
         CRC32C_T8_6[(crc >> 8) & 0x000000FF];
-    uint32_t term2 = crc >> 16;
+    term2 = crc >> 16;
     crc = term1 ^
         CRC32C_T8_5[term2 & 0x000000FF] ^ 
         CRC32C_T8_4[(term2 >> 8) & 0x000000FF];
@@ -234,11 +242,13 @@ static uint32_t crc32_zlib_sb8(
   uint32_t end_bytes = length - running_length; 
   int li;
   for (li=0; li < running_length/8; li++) {
+	uint32_t term1;
+	uint32_t term2;
     crc ^= *(uint32_t *)buf;
     buf += 4;
-    uint32_t term1 = CRC32_T8_7[crc & 0x000000FF] ^
+    term1 = CRC32_T8_7[crc & 0x000000FF] ^
         CRC32_T8_6[(crc >> 8) & 0x000000FF];
-    uint32_t term2 = crc >> 16;
+    term2 = crc >> 16;
     crc = term1 ^
         CRC32_T8_5[term2 & 0x000000FF] ^ 
         CRC32_T8_4[(term2 >> 8) & 0x000000FF];

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h Thu Mar  7 02:57:40 2013
@@ -19,7 +19,10 @@
 #define BULK_CRC32_H_INCLUDED
 
 #include <stdint.h>
+
+#ifdef UNIX
 #include <unistd.h> /* for size_t */
+#endif // UNIX
 
 // Constants for different CRC algorithms
 #define CRC32C_POLYNOMIAL 1

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h Thu Mar  7 02:57:40 2013
@@ -17,19 +17,22 @@
  */
 
 /**
- * This file includes some common utilities 
+ * This file includes some common utilities
  * for all native code used in hadoop.
  */
- 
+
 #if !defined ORG_APACHE_HADOOP_H
 #define ORG_APACHE_HADOOP_H
 
-#include <dlfcn.h>
-#include <jni.h>
-
-#include "config.h"
+#if defined(_WIN32)
+#undef UNIX
+#define WINDOWS
+#else
+#undef WINDOWS
+#define UNIX
+#endif
 
-/* A helper macro to 'throw' a java exception. */ 
+/* A helper macro to 'throw' a java exception. */
 #define THROW(env, exception_name, message) \
   { \
 	jclass ecls = (*env)->FindClass(env, exception_name); \
@@ -55,13 +58,21 @@
     if ((*env)->ExceptionCheck(env)) return (ret); \
   }
 
-/** 
- * A helper function to dlsym a 'symbol' from a given library-handle. 
- * 
+/**
+ * Unix definitions
+ */
+#ifdef UNIX
+#include <config.h>
+#include <dlfcn.h>
+#include <jni.h>
+
+/**
+ * A helper function to dlsym a 'symbol' from a given library-handle.
+ *
  * @param env jni handle to report contingencies.
  * @param handle handle to the dlopen'ed library.
  * @param symbol symbol to load.
- * @return returns the address where the symbol is loaded in memory, 
+ * @return returns the address where the symbol is loaded in memory,
  *         <code>NULL</code> on error.
  */
 static __attribute__ ((unused))
@@ -84,6 +95,76 @@ void *do_dlsym(JNIEnv *env, void *handle
   if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
     return; \
   }
+#endif
+// Unix part end
+
+
+/**
+ * Windows definitions
+ */
+#ifdef WINDOWS
+
+/* Force using Unicode throughout the code */
+#ifndef UNICODE
+#define UNICODE
+#endif
+
+/* Microsoft C Compiler does not support the C99 inline keyword */
+#ifndef __cplusplus
+#define inline __inline;
+#endif // _cplusplus
+
+/* Optimization macros supported by GCC but for which there is no
+   direct equivalent in the Microsoft C compiler */
+#define likely(_c) (_c)
+#define unlikely(_c) (_c)
+
+/* Disable certain warnings in the native CRC32 code. */
+#pragma warning(disable:4018)		// Signed/unsigned mismatch.
+#pragma warning(disable:4244)		// Possible loss of data in conversion.
+#pragma warning(disable:4267)		// Possible loss of data.
+#pragma warning(disable:4996)		// Use of deprecated function.
+
+#include <Windows.h>
+#include <stdio.h>
+#include <jni.h>
+
+#define snprintf(a, b ,c, d) _snprintf_s((a), (b), _TRUNCATE, (c), (d))
+
+/* A helper macro to dlsym the requisite dynamic symbol and bail-out on error. */
+#define LOAD_DYNAMIC_SYMBOL(func_type, func_ptr, env, handle, symbol) \
+  if ((func_ptr = (func_type) do_dlsym(env, handle, symbol)) == NULL) { \
+    return; \
+  }
+
+/**
+ * A helper function to dynamic load a 'symbol' from a given library-handle.
+ *
+ * @param env jni handle to report contingencies.
+ * @param handle handle to the dynamic library.
+ * @param symbol symbol to load.
+ * @return returns the address where the symbol is loaded in memory,
+ *         <code>NULL</code> on error.
+ */
+static FARPROC WINAPI do_dlsym(JNIEnv *env, HMODULE handle, LPCSTR symbol) {
+  DWORD dwErrorCode = ERROR_SUCCESS;
+  FARPROC func_ptr = NULL;
+
+  if (!env || !handle || !symbol) {
+    THROW(env, "java/lang/InternalError", NULL);
+    return NULL;
+  }
+
+  func_ptr = GetProcAddress(handle, symbol);
+  if (func_ptr == NULL)
+  {
+    THROW(env, "java/lang/UnsatisfiedLinkError", symbol);
+  }
+  return func_ptr;
+}
+#endif
+// Windows part end
+
 
 #define LOCK_CLASS(env, clazz, classname) \
   if ((*env)->MonitorEnter(env, clazz) != 0) { \

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c Thu Mar  7 02:57:40 2013
@@ -16,6 +16,8 @@
  * limitations under the License.
  */
 
+#include "org_apache_hadoop.h"
+
 #include "bulk_crc32.h"
 
 #include <stdint.h>

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine.proto Thu Mar  7 02:57:40 2013
@@ -1,4 +1,4 @@
-/**
+/**DER
  * Licensed to the Apache Software Foundation (ASF) under one
  * or more contributor license agreements.  See the NOTICE file
  * distributed with this work for additional information
@@ -28,20 +28,17 @@ option java_generate_equals_and_hash = t
 package hadoop.common;
 
 /**
- * This message is used for Protobuf Rpc Engine.
- * The message is used to marshal a Rpc-request
- * from RPC client to the RPC server.
+ * This message is the header for the Protobuf Rpc Engine
+ * when sending a RPC request from  RPC client to the RPC server.
+ * The actual request (serialized as protobuf) follows this request.
  *
  * No special header is needed for the Rpc Response for Protobuf Rpc Engine.
  * The normal RPC response header (see RpcHeader.proto) are sufficient. 
  */
-message RequestProto {
+message RequestHeaderProto {
   /** Name of the RPC method */
   required string methodName = 1;
 
-  /** Bytes corresponding to the client protobuf request */
-  optional bytes request = 2;
-  
   /** 
    * RPCs for a particular interface (ie protocol) are done using a
    * IPC connection that is setup using rpcProxy.

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml Thu Mar  7 02:57:40 2013
@@ -318,6 +318,20 @@
 </property>
 
 <property>
+  <name>io.compression.codec.bzip2.library</name>
+  <value>system-native</value>
+  <description>The native-code library to be used for compression and
+  decompression by the bzip2 codec.  This library could be specified
+  either by by name or the full pathname.  In the former case, the
+  library is located by the dynamic linker, usually searching the
+  directories specified in the environment variable LD_LIBRARY_PATH.
+  
+  The value of "system-native" indicates that the default system
+  library should be used.  To indicate that the algorithm should
+  operate entirely in Java, specify "java-builtin".</description>
+</property>
+
+<property>
   <name>io.serializations</name>
   <value>org.apache.hadoop.io.serializer.WritableSerialization,org.apache.hadoop.io.serializer.avro.AvroSpecificSerialization,org.apache.hadoop.io.serializer.avro.AvroReflectSerialization</value>
   <description>A list of serialization classes that can be used for

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/site/apt/SingleNodeSetup.apt.vm Thu Mar  7 02:57:40 2013
@@ -33,9 +33,7 @@ Single Node Setup
      * GNU/Linux is supported as a development and production platform.
        Hadoop has been demonstrated on GNU/Linux clusters with 2000 nodes.
 
-     * Win32 is supported as a development platform. Distributed operation
-       has not been well tested on Win32, so it is not supported as a
-       production platform.
+     * Windows is also a supported platform.
 
 ** Required Software
 
@@ -46,11 +44,6 @@ Single Node Setup
     [[2]] ssh must be installed and sshd must be running to use the Hadoop
        scripts that manage remote Hadoop daemons.
 
-   Additional requirements for Windows include:
-
-    [[1]] Cygwin - Required for shell support in addition to the required
-       software above.
-
 ** Installing Software
 
    If your cluster doesn't have the requisite software you will need to
@@ -63,11 +56,6 @@ Single Node Setup
    $ sudo apt-get install rsync
 ----
 
-   On Windows, if you did not install the required software when you
-   installed cygwin, start the cygwin installer and select the packages:
-
-     * openssh - the Net category
-
 * Download
 
    To get a Hadoop distribution, download a recent stable release from one

Propchange: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/core/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/core:r1449958-1453659

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextTestHelper.java Thu Mar  7 02:57:40 2013
@@ -68,7 +68,7 @@ public final class FileContextTestHelper
   public static String getAbsoluteTestRootDir(FileContext fc)
       throws IOException {
     if (absTestRootDir == null) {
-      if (TEST_ROOT_DIR.startsWith("/")) {
+      if (new Path(TEST_ROOT_DIR).isAbsolute()) {
         absTestRootDir = TEST_ROOT_DIR;
       } else {
         absTestRootDir = fc.getWorkingDirectory().toString() + "/"

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextURIBase.java Thu Mar  7 02:57:40 2013
@@ -20,9 +20,11 @@ package org.apache.hadoop.fs;
 
 import java.io.*;
 import java.util.ArrayList;
+import java.util.regex.Pattern;
 import junit.framework.Assert;
 
 import org.apache.hadoop.fs.permission.FsPermission;
+import org.apache.hadoop.util.Shell;
 import org.junit.After;
 import org.junit.Before;
 import org.junit.Test;
@@ -52,6 +54,12 @@ public abstract class FileContextURIBase
   private static final String basePath = System.getProperty("test.build.data",
   "build/test/data") + "/testContextURI";
   private static final Path BASE = new Path(basePath);
+
+  // Matches anything containing <, >, :, ", |, ?, *, or anything that ends with
+  // space or dot.
+  private static final Pattern WIN_INVALID_FILE_NAME_PATTERN = Pattern.compile(
+    "^(.*?[<>\\:\"\\|\\?\\*].*?)|(.*?[ \\.])$");
+
   protected FileContext fc1;
   protected FileContext fc2;
 
@@ -81,6 +89,10 @@ public abstract class FileContextURIBase
         "  ", "^ " };
 
     for (String f : fileNames) {
+      if (!isTestableFileNameOnPlatform(f)) {
+        continue;
+      }
+
       // Create a file on fc2's file system using fc1
       Path testPath = qualifiedPath(f, fc2);
       // Ensure file does not exist
@@ -205,6 +217,10 @@ public abstract class FileContextURIBase
         "deleteTest/()&^%$#@!~_+}{><?", "  ", "^ " };
 
     for (String f : dirNames) {
+      if (!isTestableFileNameOnPlatform(f)) {
+        continue;
+      }
+
       // Create a file on fc2's file system using fc1
       Path testPath = qualifiedPath(f, fc2);
       // Ensure file does not exist
@@ -374,6 +390,10 @@ public abstract class FileContextURIBase
         "deleteTest/()&^%$#@!~_+}{><?", "  ", "^ " };
 
     for (String f : dirNames) {
+      if (!isTestableFileNameOnPlatform(f)) {
+        continue;
+      }
+
       // Create a file on fc2's file system using fc1
       Path testPath = qualifiedPath(f, fc2);
       // Ensure file does not exist
@@ -492,6 +512,10 @@ public abstract class FileContextURIBase
     ArrayList<Path> testDirs = new ArrayList<Path>();
 
     for (String d : dirs) {
+      if (!isTestableFileNameOnPlatform(d)) {
+        continue;
+      }
+
       testDirs.add(qualifiedPath(d, fc2));
     }
     Assert.assertFalse(exists(fc1, testDirs.get(0)));
@@ -506,15 +530,17 @@ public abstract class FileContextURIBase
     Assert.assertEquals(qualifiedPath(hPrefix, fc1), paths[0].getPath());
 
     paths = fc1.util().listStatus(qualifiedPath(hPrefix, fc1));
-    Assert.assertEquals(6, paths.length);
-    for (int i = 0; i < dirs.length; i++) {
+    Assert.assertEquals(testDirs.size(), paths.length);
+    for (int i = 0; i < testDirs.size(); i++) {
       boolean found = false;
       for (int j = 0; j < paths.length; j++) {
-        if (qualifiedPath(dirs[i],fc1).equals(paths[j].getPath())) {
+        if (qualifiedPath(testDirs.get(i).toString(), fc1).equals(
+            paths[j].getPath())) {
+
           found = true;
         }
       }
-      Assert.assertTrue(dirs[i] + " not found", found);
+      Assert.assertTrue(testDirs.get(i) + " not found", found);
     }
 
     paths = fc1.util().listStatus(qualifiedPath(dirs[0], fc1));
@@ -539,9 +565,32 @@ public abstract class FileContextURIBase
       }
       Assert.assertTrue(stat.getPath() + " not found", found);
     }
-    Assert.assertEquals(6, dirLen);
+    Assert.assertEquals(testDirs.size(), dirLen);
 
     pathsItor = fc1.listStatus(qualifiedPath(dirs[0], fc1));
     Assert.assertFalse(pathsItor.hasNext());
   }
+
+  /**
+   * Returns true if the argument is a file name that is testable on the platform
+   * currently running the test.  This is intended for use by tests so that they
+   * can skip checking file names that aren't supported by the underlying
+   * platform.  The current implementation specifically checks for patterns that
+   * are not valid file names on Windows when the tests are running on Windows.
+   * 
+   * @param fileName String file name to check
+   * @return boolean true if the argument is valid as a file name
+   */
+  private static boolean isTestableFileNameOnPlatform(String fileName) {
+    boolean valid = true;
+
+    if (Shell.WINDOWS) {
+      // Disallow reserved characters: <, >, :, ", |, ?, *.
+      // Disallow trailing space or period.
+      // See http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247(v=vs.85).aspx
+      valid = !WIN_INVALID_FILE_NAME_PATTERN.matcher(fileName).matches();
+    }
+
+    return valid;
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemTestHelper.java Thu Mar  7 02:57:40 2013
@@ -86,7 +86,7 @@ public final class FileSystemTestHelper 
       throws IOException {
     // NOTE: can't cache because of different filesystems!
     //if (absTestRootDir == null) 
-      if (TEST_ROOT_DIR.startsWith("/")) {
+      if (new Path(TEST_ROOT_DIR).isAbsolute()) {
         absTestRootDir = TEST_ROOT_DIR;
       } else {
         absTestRootDir = fSys.getWorkingDirectory().toString() + "/"

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestDFVariations.java Thu Mar  7 02:57:40 2013
@@ -17,15 +17,23 @@
 */
 package org.apache.hadoop.fs;
 
-import junit.framework.TestCase;
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.fail;
 
+import java.io.BufferedReader;
 import java.io.File;
+import java.io.FileNotFoundException;
 import java.io.IOException;
+import java.io.StringReader;
 import java.util.EnumSet;
+import java.util.Random;
 
+import org.apache.hadoop.test.GenericTestUtils;
 import org.apache.hadoop.util.Shell;
+import org.junit.Test;
+import static org.junit.Assert.*;
 
-public class TestDFVariations extends TestCase {
+public class TestDFVariations {
 
   public static class XXDF extends DF {
     private final String osName;
@@ -39,17 +47,12 @@ public class TestDFVariations extends Te
     }
     @Override
     protected String[] getExecString() {
-      switch(getOSType()) {
-        case OS_TYPE_AIX:
-          return new String[] { "echo", "IGNORE\n", "/dev/sda3",
-            "453115160", "400077240", "11%", "18", "skip%", "/foo/bar", "\n" };
-        default:
-          return new String[] { "echo", "IGNORE\n", "/dev/sda3",
-            "453115160", "53037920", "400077240", "11%", "/foo/bar", "\n" };
-      }
+      return new String[] { "echo", "IGNORE\n", 
+        "/dev/sda3", "453115160", "53037920", "400077240", "11%", "/foo/bar\n"};
     }
   }
 
+  @Test(timeout=5000)
   public void testOSParsing() throws Exception {
     for (DF.OSType ost : EnumSet.allOf(DF.OSType.class)) {
       XXDF df = new XXDF(ost.getId());
@@ -58,6 +61,89 @@ public class TestDFVariations extends Te
         df.getMount());
     }
   }
+  
+  @Test(timeout=5000)
+  public void testDFInvalidPath() throws Exception {
+    // Generate a path that doesn't exist
+    Random random = new Random(0xDEADBEEFl);
+    File file = null;
+    byte[] bytes = new byte[64];
+    while (file == null) {
+      random.nextBytes(bytes);
+      final String invalid = new String("/" + bytes);
+      final File invalidFile = new File(invalid);
+      if (!invalidFile.exists()) {
+        file = invalidFile;
+      }
+    }
+    DF df = new DF(file, 0l);
+    try {
+      df.getMount();
+    } catch (FileNotFoundException e) {
+      // expected, since path does not exist
+      GenericTestUtils.assertExceptionContains(file.getName(), e);
+    }
+  }
+  
+  @Test(timeout=5000)
+  public void testDFMalformedOutput() throws Exception {
+    DF df = new DF(new File("/"), 0l);
+    BufferedReader reader = new BufferedReader(new StringReader(
+        "Filesystem     1K-blocks     Used Available Use% Mounted on\n" +
+        "/dev/sda5       19222656 10597036   7649060  59% /"));
+    df.parseExecResult(reader);
+    df.parseOutput();
+    
+    reader = new BufferedReader(new StringReader(
+        "Filesystem     1K-blocks     Used Available Use% Mounted on"));
+    df.parseExecResult(reader);
+    try {
+      df.parseOutput();
+      fail("Expected exception with missing line!");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains(
+          "Fewer lines of output than expected", e);
+      System.out.println(e.toString());
+    }
+    
+    reader = new BufferedReader(new StringReader(
+        "Filesystem     1K-blocks     Used Available Use% Mounted on\n" +
+        " "));
+    df.parseExecResult(reader);
+    try {
+      df.parseOutput();
+      fail("Expected exception with empty line!");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains("Unexpected empty line", e);
+      System.out.println(e.toString());
+    }
+    
+    reader = new BufferedReader(new StringReader(
+        "Filesystem     1K-blocks     Used Available Use% Mounted on\n" +
+        "       19222656 10597036   7649060  59% /"));
+    df.parseExecResult(reader);
+    try {
+      df.parseOutput();
+      fail("Expected exception with missing field!");
+    } catch (IOException e) {
+      GenericTestUtils.assertExceptionContains("Could not parse line: ", e);
+      System.out.println(e.toString());
+    }
+  }
 
+  @Test(timeout=5000)
+  public void testGetMountCurrentDirectory() throws Exception {
+    File currentDirectory = new File(".");
+    String workingDir = currentDirectory.getAbsoluteFile().getCanonicalPath();
+    DF df = new DF(new File(workingDir), 0L);
+    String mountPath = df.getMount();
+    File mountDir = new File(mountPath);
+    assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should exist.", 
+        mountDir.exists());
+    assertTrue("Mount dir ["+mountDir.getAbsolutePath()+"] should be directory.", 
+        mountDir.isDirectory());
+    assertTrue("Working dir ["+workingDir+"] should start with ["+mountPath+"].",
+        workingDir.startsWith(mountPath));
+  }
 }
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileContextResolveAfs.java Thu Mar  7 02:57:40 2013
@@ -43,13 +43,14 @@ public class TestFileContextResolveAfs {
     fc = FileContext.getFileContext();
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testFileContextResolveAfs() throws IOException {
     Configuration conf = new Configuration();
     localFs = FileSystem.get(conf);
     
     Path localPath = new Path(TEST_ROOT_DIR_LOCAL + "/TestFileContextResolveAfs1");
-    Path linkPath = new Path("file://" + TEST_ROOT_DIR_LOCAL + "/TestFileContextResolveAfs2");
+    Path linkPath = localFs.makeQualified(new Path(TEST_ROOT_DIR_LOCAL,
+      "TestFileContextResolveAfs2"));
     localFs.mkdirs(new Path(TEST_ROOT_DIR_LOCAL));
     localFs.create(localPath);
     

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java Thu Mar  7 02:57:40 2013
@@ -20,16 +20,24 @@ package org.apache.hadoop.fs;
 import org.junit.Before;
 import java.io.BufferedReader;
 import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
 import java.io.FileReader;
 import java.io.IOException;
 import java.io.PrintWriter;
+import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collections;
 import java.util.List;
+import java.util.jar.Attributes;
+import java.util.jar.JarFile;
+import java.util.jar.Manifest;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.StringUtils;
 import org.junit.After;
 import org.junit.Assert;
 import org.junit.Test;
@@ -121,7 +129,7 @@ public class TestFileUtil {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testListFiles() throws IOException {
     setupDirs();
     //Test existing files case 
@@ -148,7 +156,7 @@ public class TestFileUtil {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testListAPI() throws IOException {
     setupDirs();
     //Test existing files case 
@@ -196,7 +204,7 @@ public class TestFileUtil {
     Assert.assertTrue(!partitioned.exists());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDelete() throws IOException {
     setupDirs();
     boolean ret = FileUtil.fullyDelete(del);
@@ -211,7 +219,7 @@ public class TestFileUtil {
    * (b) symlink to dir only and not the dir pointed to by symlink.
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDeleteSymlinks() throws IOException {
     setupDirs();
     
@@ -241,7 +249,7 @@ public class TestFileUtil {
    * (b) dangling symlink to directory properly
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDeleteDanglingSymlinks() throws IOException {
     setupDirs();
     // delete the directory tmp to make tmpDir a dangling link to dir tmp and
@@ -268,7 +276,7 @@ public class TestFileUtil {
     Assert.assertEquals(3, del.list().length);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFullyDeleteContents() throws IOException {
     setupDirs();
     boolean ret = FileUtil.fullyDeleteContents(del);
@@ -384,15 +392,19 @@ public class TestFileUtil {
         zlink.exists());
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDelete() throws IOException {
+    if(Shell.WINDOWS) {
+      // windows Dir.setWritable(false) does not work for directories
+      return;
+    }
     LOG.info("Running test to verify failure of fullyDelete()");
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDelete(new MyFile(del));
     validateAndSetWritablePermissions(true, ret);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDeleteGrantPermissions() throws IOException {
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDelete(new MyFile(del), true);
@@ -461,15 +473,19 @@ public class TestFileUtil {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDeleteContents() throws IOException {
+    if(Shell.WINDOWS) {
+      // windows Dir.setWritable(false) does not work for directories
+      return;
+    }
     LOG.info("Running test to verify failure of fullyDeleteContents()");
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
     validateAndSetWritablePermissions(true, ret);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testFailFullyDeleteContentsGrantPermissions() throws IOException {
     setupDirsAndNonWritablePermissions();
     boolean ret = FileUtil.fullyDeleteContents(new MyFile(del), true);
@@ -477,7 +493,7 @@ public class TestFileUtil {
     validateAndSetWritablePermissions(false, ret);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testCopyMergeSingleDirectory() throws IOException {
     setupDirs();
     boolean copyMergeResult = copyMerge("partitioned", "tmp/merged");
@@ -536,7 +552,7 @@ public class TestFileUtil {
    * and that directory sizes are not added to the final calculated size
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetDU() throws IOException {
     setupDirs();
 
@@ -547,6 +563,136 @@ public class TestFileUtil {
     Assert.assertEquals(expected, du);
   }
 
+  @Test (timeout = 30000)
+  public void testSymlink() throws Exception {
+    Assert.assertFalse(del.exists());
+    del.mkdirs();
+
+    byte[] data = "testSymLink".getBytes();
+
+    File file = new File(del, FILE);
+    File link = new File(del, "_link");
+
+    //write some data to the file
+    FileOutputStream os = new FileOutputStream(file);
+    os.write(data);
+    os.close();
+
+    //create the symlink
+    FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
+
+    //ensure that symlink length is correctly reported by Java
+    Assert.assertEquals(data.length, file.length());
+    Assert.assertEquals(data.length, link.length());
+
+    //ensure that we can read from link.
+    FileInputStream in = new FileInputStream(link);
+    long len = 0;
+    while (in.read() > 0) {
+      len++;
+    }
+    in.close();
+    Assert.assertEquals(data.length, len);
+  }
+  
+  /**
+   * Test that rename on a symlink works as expected.
+   */
+  @Test (timeout = 30000)
+  public void testSymlinkRenameTo() throws Exception {
+    Assert.assertFalse(del.exists());
+    del.mkdirs();
+
+    File file = new File(del, FILE);
+    file.createNewFile();
+    File link = new File(del, "_link");
+
+    // create the symlink
+    FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
+
+    Assert.assertTrue(file.exists());
+    Assert.assertTrue(link.exists());
+
+    File link2 = new File(del, "_link2");
+
+    // Rename the symlink
+    Assert.assertTrue(link.renameTo(link2));
+
+    // Make sure the file still exists
+    // (NOTE: this would fail on Java6 on Windows if we didn't
+    // copy the file in FileUtil#symlink)
+    Assert.assertTrue(file.exists());
+
+    Assert.assertTrue(link2.exists());
+    Assert.assertFalse(link.exists());
+  }
+
+  /**
+   * Test that deletion of a symlink works as expected.
+   */
+  @Test (timeout = 30000)
+  public void testSymlinkDelete() throws Exception {
+    Assert.assertFalse(del.exists());
+    del.mkdirs();
+
+    File file = new File(del, FILE);
+    file.createNewFile();
+    File link = new File(del, "_link");
+
+    // create the symlink
+    FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
+
+    Assert.assertTrue(file.exists());
+    Assert.assertTrue(link.exists());
+
+    // make sure that deleting a symlink works properly
+    Assert.assertTrue(link.delete());
+    Assert.assertFalse(link.exists());
+    Assert.assertTrue(file.exists());
+  }
+
+  /**
+   * Test that length on a symlink works as expected.
+   */
+  @Test (timeout = 30000)
+  public void testSymlinkLength() throws Exception {
+    Assert.assertFalse(del.exists());
+    del.mkdirs();
+
+    byte[] data = "testSymLinkData".getBytes();
+
+    File file = new File(del, FILE);
+    File link = new File(del, "_link");
+
+    // write some data to the file
+    FileOutputStream os = new FileOutputStream(file);
+    os.write(data);
+    os.close();
+
+    Assert.assertEquals(0, link.length());
+
+    // create the symlink
+    FileUtil.symLink(file.getAbsolutePath(), link.getAbsolutePath());
+
+    // ensure that File#length returns the target file and link size
+    Assert.assertEquals(data.length, file.length());
+    Assert.assertEquals(data.length, link.length());
+
+    file.delete();
+    Assert.assertFalse(file.exists());
+
+    if (Shell.WINDOWS && !Shell.isJava7OrAbove()) {
+      // On Java6 on Windows, we copied the file
+      Assert.assertEquals(data.length, link.length());
+    } else {
+      // Otherwise, the target file size is zero
+      Assert.assertEquals(0, link.length());
+    }
+
+    link.delete();
+    Assert.assertFalse(link.exists());
+  }
+
   private void doUntarAndVerify(File tarFile, File untarDir) 
                                  throws IOException {
     if (untarDir.exists() && !FileUtil.fullyDelete(untarDir)) {
@@ -574,7 +720,7 @@ public class TestFileUtil {
     Assert.assertTrue(testFile.length() == 8);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUntar() throws IOException {
     String tarGzFileName = System.getProperty("test.cache.data",
         "build/test/cache") + "/test-untar.tgz";
@@ -586,4 +732,69 @@ public class TestFileUtil {
     doUntarAndVerify(new File(tarGzFileName), untarDir);
     doUntarAndVerify(new File(tarFileName), untarDir);
   }
+
+  @Test (timeout = 30000)
+  public void testCreateJarWithClassPath() throws Exception {
+    // setup test directory for files
+    Assert.assertFalse(tmp.exists());
+    Assert.assertTrue(tmp.mkdirs());
+
+    // create files expected to match a wildcard
+    List<File> wildcardMatches = Arrays.asList(new File(tmp, "wildcard1.jar"),
+      new File(tmp, "wildcard2.jar"), new File(tmp, "wildcard3.JAR"),
+      new File(tmp, "wildcard4.JAR"));
+    for (File wildcardMatch: wildcardMatches) {
+      Assert.assertTrue("failure creating file: " + wildcardMatch,
+        wildcardMatch.createNewFile());
+    }
+
+    // create non-jar files, which we expect to not be included in the classpath
+    Assert.assertTrue(new File(tmp, "text.txt").createNewFile());
+    Assert.assertTrue(new File(tmp, "executable.exe").createNewFile());
+    Assert.assertTrue(new File(tmp, "README").createNewFile());
+
+    // create classpath jar
+    String wildcardPath = tmp.getCanonicalPath() + File.separator + "*";
+    List<String> classPaths = Arrays.asList("cp1.jar", "cp2.jar", wildcardPath,
+      "cp3.jar");
+    String inputClassPath = StringUtils.join(File.pathSeparator, classPaths);
+    String classPathJar = FileUtil.createJarWithClassPath(inputClassPath,
+      new Path(tmp.getCanonicalPath()));
+
+    // verify classpath by reading manifest from jar file
+    JarFile jarFile = null;
+    try {
+      jarFile = new JarFile(classPathJar);
+      Manifest jarManifest = jarFile.getManifest();
+      Assert.assertNotNull(jarManifest);
+      Attributes mainAttributes = jarManifest.getMainAttributes();
+      Assert.assertNotNull(mainAttributes);
+      Assert.assertTrue(mainAttributes.containsKey(Attributes.Name.CLASS_PATH));
+      String classPathAttr = mainAttributes.getValue(Attributes.Name.CLASS_PATH);
+      Assert.assertNotNull(classPathAttr);
+      List<String> expectedClassPaths = new ArrayList<String>();
+      for (String classPath: classPaths) {
+        if (!wildcardPath.equals(classPath)) {
+          expectedClassPaths.add(new File(classPath).toURI().toURL()
+            .toExternalForm());
+        } else {
+          // add wildcard matches
+          for (File wildcardMatch: wildcardMatches) {
+            expectedClassPaths.add(wildcardMatch.toURI().toURL()
+              .toExternalForm());
+          }
+        }
+      }
+      List<String> actualClassPaths = Arrays.asList(classPathAttr.split(" "));
+      Assert.assertEquals(expectedClassPaths, actualClassPaths);
+    } finally {
+      if (jarFile != null) {
+        try {
+          jarFile.close();
+        } catch (IOException e) {
+          LOG.warn("exception closing jarFile: " + classPathJar, e);
+        }
+      }
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellReturnCode.java Thu Mar  7 02:57:40 2013
@@ -121,20 +121,22 @@ public class TestFsShellReturnCode {
    * 
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChmod() throws Exception {
+    Path p1 = new Path(TEST_ROOT_DIR, "testChmod/fileExists");
 
-    final String f1 = TEST_ROOT_DIR + "/" + "testChmod/fileExists";
-    final String f2 = TEST_ROOT_DIR + "/" + "testChmod/fileDoesNotExist";
-    final String f3 = TEST_ROOT_DIR + "/" + "testChmod/nonExistingfiles*";
+    final String f1 = p1.toUri().getPath();
+    final String f2 = new Path(TEST_ROOT_DIR, "testChmod/fileDoesNotExist")
+      .toUri().getPath();
+    final String f3 = new Path(TEST_ROOT_DIR, "testChmod/nonExistingfiles*")
+      .toUri().getPath();
+
+    final Path p4 = new Path(TEST_ROOT_DIR, "testChmod/file1");
+    final Path p5 = new Path(TEST_ROOT_DIR, "testChmod/file2");
+    final Path p6 = new Path(TEST_ROOT_DIR, "testChmod/file3");
 
-    Path p1 = new Path(f1);
-
-    final Path p4 = new Path(TEST_ROOT_DIR + "/" + "testChmod/file1");
-    final Path p5 = new Path(TEST_ROOT_DIR + "/" + "testChmod/file2");
-    final Path p6 = new Path(TEST_ROOT_DIR + "/" + "testChmod/file3");
-
-    final String f7 = TEST_ROOT_DIR + "/" + "testChmod/file*";
+    final String f7 = new Path(TEST_ROOT_DIR, "testChmod/file*").toUri()
+      .getPath();
 
     // create and write test file
     writeFile(fileSys, p1);
@@ -175,20 +177,23 @@ public class TestFsShellReturnCode {
    * 
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChown() throws Exception {
+    Path p1 = new Path(TEST_ROOT_DIR, "testChown/fileExists");
 
-    final String f1 = TEST_ROOT_DIR + "/" + "testChown/fileExists";
-    final String f2 = TEST_ROOT_DIR + "/" + "testChown/fileDoesNotExist";
-    final String f3 = TEST_ROOT_DIR + "/" + "testChown/nonExistingfiles*";
+    final String f1 = p1.toUri().getPath();
+    final String f2 = new Path(TEST_ROOT_DIR, "testChown/fileDoesNotExist")
+      .toUri().getPath();
+    final String f3 = new Path(TEST_ROOT_DIR, "testChown/nonExistingfiles*")
+      .toUri().getPath();
 
-    Path p1 = new Path(f1);
 
-    final Path p4 = new Path(TEST_ROOT_DIR + "/" + "testChown/file1");
-    final Path p5 = new Path(TEST_ROOT_DIR + "/" + "testChown/file2");
-    final Path p6 = new Path(TEST_ROOT_DIR + "/" + "testChown/file3");
+    final Path p4 = new Path(TEST_ROOT_DIR, "testChown/file1");
+    final Path p5 = new Path(TEST_ROOT_DIR, "testChown/file2");
+    final Path p6 = new Path(TEST_ROOT_DIR, "testChown/file3");
 
-    final String f7 = TEST_ROOT_DIR + "/" + "testChown/file*";
+    final String f7 = new Path(TEST_ROOT_DIR, "testChown/file*").toUri()
+      .getPath();
 
     // create and write test file
     writeFile(fileSys, p1);
@@ -228,20 +233,22 @@ public class TestFsShellReturnCode {
    * 
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testChgrp() throws Exception {
+    Path p1 = new Path(TEST_ROOT_DIR, "testChgrp/fileExists");
 
-    final String f1 = TEST_ROOT_DIR + "/" + "testChgrp/fileExists";
-    final String f2 = TEST_ROOT_DIR + "/" + "testChgrp/fileDoesNotExist";
-    final String f3 = TEST_ROOT_DIR + "/" + "testChgrp/nonExistingfiles*";
-
-    Path p1 = new Path(f1);
-
-    final Path p4 = new Path(TEST_ROOT_DIR + "/" + "testChgrp/file1");
-    final Path p5 = new Path(TEST_ROOT_DIR + "/" + "testChgrp/file2");
-    final Path p6 = new Path(TEST_ROOT_DIR + "/" + "testChgrp/file3");
+    final String f1 = p1.toUri().getPath();
+    final String f2 = new Path(TEST_ROOT_DIR, "testChgrp/fileDoesNotExist")
+      .toUri().getPath();
+    final String f3 = new Path(TEST_ROOT_DIR, "testChgrp/nonExistingfiles*")
+      .toUri().getPath();
+
+    final Path p4 = new Path(TEST_ROOT_DIR, "testChgrp/file1");
+    final Path p5 = new Path(TEST_ROOT_DIR, "testChgrp/file2");
+    final Path p6 = new Path(TEST_ROOT_DIR, "testChgrp/file3");
 
-    final String f7 = TEST_ROOT_DIR + "/" + "testChgrp/file*";
+    final String f7 = new Path(TEST_ROOT_DIR, "testChgrp/file*").toUri()
+      .getPath();
 
     // create and write test file
     writeFile(fileSys, p1);
@@ -271,7 +278,7 @@ public class TestFsShellReturnCode {
     change(1, null, "admin", f2, f7);
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testGetWithInvalidSourcePathShouldNotDisplayNullInConsole()
       throws Exception {
     Configuration conf = new Configuration();
@@ -288,8 +295,8 @@ public class TestFsShellReturnCode {
       fileSys.mkdirs(tdir);
       String[] args = new String[3];
       args[0] = "-get";
-      args[1] = tdir+"/invalidSrc";
-      args[2] = tdir+"/invalidDst";
+      args[1] = new Path(tdir.toUri().getPath(), "/invalidSrc").toString();
+      args[2] = new Path(tdir.toUri().getPath(), "/invalidDst").toString();
       assertTrue("file exists", !fileSys.exists(new Path(args[1])));
       assertTrue("file exists", !fileSys.exists(new Path(args[2])));
       int run = shell.run(args);
@@ -303,7 +310,7 @@ public class TestFsShellReturnCode {
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testRmWithNonexistentGlob() throws Exception {
     Configuration conf = new Configuration();
     FsShell shell = new FsShell();
@@ -324,7 +331,7 @@ public class TestFsShellReturnCode {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRmForceWithNonexistentGlob() throws Exception {
     Configuration conf = new Configuration();
     FsShell shell = new FsShell();
@@ -343,7 +350,7 @@ public class TestFsShellReturnCode {
     }
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testInvalidDefaultFS() throws Exception {
     // if default fs doesn't exist or is invalid, but the path provided in 
     // arguments is valid - fsshell should work
@@ -374,7 +381,7 @@ public class TestFsShellReturnCode {
     
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testInterrupt() throws Exception {
     MyFsShell shell = new MyFsShell();
     shell.setConf(new Configuration());

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java Thu Mar  7 02:57:40 2013
@@ -54,17 +54,6 @@ import static org.apache.hadoop.fs.HardL
  * NOTICE: This test class only tests the functionality of the OS
  * upon which the test is run! (although you're pretty safe with the
  * unix-like OS's, unless a typo sneaks in.)
- * 
- * Notes about Windows testing:  
- * (a) In order to create hardlinks, the process must be run with 
- * administrative privs, in both the account AND the invocation.
- * For instance, to run within Eclipse, the Eclipse application must be 
- * launched by right-clicking on it, and selecting "Run as Administrator" 
- * (and that option will only be available if the current user id does 
- * in fact have admin privs).
- * (b) The getLinkCount() test case will fail for Windows, unless Cygwin
- * is set up properly.  In particular, ${cygwin}/bin must be in
- * the PATH environment variable, so the cygwin utilities can be found.
  */
 public class TestHardLink {
   
@@ -221,9 +210,6 @@ public class TestHardLink {
    * Sanity check the simplest case of HardLink.getLinkCount()
    * to make sure we get back "1" for ordinary single-linked files.
    * Tests with multiply-linked files are in later test cases.
-   * 
-   * If this fails on Windows but passes on Unix, the most likely cause is 
-   * incorrect configuration of the Cygwin installation; see above.
    */
   @Test
   public void testGetLinkCount() throws IOException {
@@ -412,7 +398,7 @@ public class TestHardLink {
     assertEquals(5, win.hardLinkCommand.length); 
     assertEquals(7, win.hardLinkMultPrefix.length);
     assertEquals(8, win.hardLinkMultSuffix.length);
-    assertEquals(3, win.getLinkCountCommand.length);
+    assertEquals(4, win.getLinkCountCommand.length);
 
     assertTrue(win.hardLinkMultPrefix[4].equals("%f"));
     //make sure "%f" was not munged
@@ -423,7 +409,7 @@ public class TestHardLink {
     assertTrue(win.hardLinkMultSuffix[7].equals("1>NUL"));
     //make sure "1>NUL" was not munged
     assertEquals(5, ("1>NUL").length()); 
-    assertTrue(win.getLinkCountCommand[1].equals("-c%h"));
+    assertTrue(win.getLinkCountCommand[1].equals("hardlink"));
     //make sure "-c%h" was not munged
     assertEquals(4, ("-c%h").length()); 
   }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java Thu Mar  7 02:57:40 2013
@@ -129,7 +129,7 @@ public class TestLocalDirAllocator {
    * The second dir exists & is RW
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void test0() throws Exception {
     if (isWindows) return;
     String dir0 = buildBufferDir(ROOT, 0);
@@ -141,7 +141,8 @@ public class TestLocalDirAllocator {
       validateTempDirCreation(dir1);
       validateTempDirCreation(dir1);
     } finally {
-      Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
@@ -150,7 +151,7 @@ public class TestLocalDirAllocator {
    * The second dir exists & is RW
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testROBufferDirAndRWBufferDir() throws Exception {
     if (isWindows) return;
     String dir1 = buildBufferDir(ROOT, 1);
@@ -162,14 +163,15 @@ public class TestLocalDirAllocator {
       validateTempDirCreation(dir2);
       validateTempDirCreation(dir2);
     } finally {
-      Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
   /** Two buffer dirs. Both do not exist but on a RW disk.
    * Check if tmp dirs are allocated in a round-robin
    */
-  @Test
+  @Test (timeout = 30000)
   public void testDirsNotExist() throws Exception {
     if (isWindows) return;
     String dir2 = buildBufferDir(ROOT, 2);
@@ -195,7 +197,7 @@ public class TestLocalDirAllocator {
    * Later disk1 becomes read-only.
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testRWBufferDirBecomesRO() throws Exception {
     if (isWindows) return;
     String dir3 = buildBufferDir(ROOT, 3);
@@ -233,7 +235,7 @@ public class TestLocalDirAllocator {
    * @throws Exception
    */
   static final int TRIALS = 100;
-  @Test
+  @Test (timeout = 30000)
   public void testCreateManyFiles() throws Exception {
     if (isWindows) return;
     String dir5 = buildBufferDir(ROOT, 5);
@@ -270,7 +272,7 @@ public class TestLocalDirAllocator {
    * directory. With checkAccess true, the directory should not be created.
    * @throws Exception
    */
-  @Test
+  @Test (timeout = 30000)
   public void testLocalPathForWriteDirCreation() throws IOException {
     String dir0 = buildBufferDir(ROOT, 0);
     String dir1 = buildBufferDir(ROOT, 1);
@@ -291,7 +293,8 @@ public class TestLocalDirAllocator {
         assertEquals(e.getClass(), FileNotFoundException.class);
       }
     } finally {
-      Shell.execCommand(new String[] { "chmod", "u+w", BUFFER_DIR_ROOT });
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
@@ -300,7 +303,7 @@ public class TestLocalDirAllocator {
    * Test when mapred.local.dir not configured and called
    * getLocalPathForWrite
    */
-  @Test
+  @Test (timeout = 30000)
   public void testShouldNotthrowNPE() throws Exception {
     Configuration conf1 = new Configuration();
     try {
@@ -319,7 +322,7 @@ public class TestLocalDirAllocator {
    * are mistakenly created from fully qualified path strings.
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testNoSideEffects() throws IOException {
     assumeTrue(!isWindows);
     String dir = buildBufferDir(ROOT, 0);
@@ -330,7 +333,8 @@ public class TestLocalDirAllocator {
       assertTrue(result.getParentFile().delete());
       assertFalse(new File(dir).exists());
     } finally {
-      Shell.execCommand(new String[]{"chmod", "u+w", BUFFER_DIR_ROOT});
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
@@ -340,7 +344,7 @@ public class TestLocalDirAllocator {
    *
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetLocalPathToRead() throws IOException {
     assumeTrue(!isWindows);
     String dir = buildBufferDir(ROOT, 0);
@@ -353,7 +357,8 @@ public class TestLocalDirAllocator {
       assertEquals(f1.getName(), p1.getName());
       assertEquals("file", p1.getFileSystem(conf).getUri().getScheme());
     } finally {
-      Shell.execCommand(new String[] { "chmod", "u+w", BUFFER_DIR_ROOT });
+      Shell.execCommand(Shell.getSetPermissionCommand("u+w", false,
+                                                      BUFFER_DIR_ROOT));
       rmBufferDirs();
     }
   }
@@ -364,7 +369,7 @@ public class TestLocalDirAllocator {
    *
    * @throws IOException
    */
-  @Test
+  @Test (timeout = 30000)
   public void testGetAllLocalPathsToRead() throws IOException {
     assumeTrue(!isWindows);
     
@@ -412,7 +417,7 @@ public class TestLocalDirAllocator {
     }
   }
   
-  @Test
+  @Test (timeout = 30000)
   public void testRemoveContext() throws IOException {
     String dir = buildBufferDir(ROOT, 0);
     try {

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java Thu Mar  7 02:57:40 2013
@@ -17,6 +17,7 @@
  */
 
 package org.apache.hadoop.fs;
+import org.junit.Test;
 
 import java.io.IOException;
 import java.net.URI;
@@ -25,10 +26,14 @@ import java.util.Arrays;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.io.AvroTestUtil;
+import org.apache.hadoop.util.Shell;
 
 import junit.framework.TestCase;
 
+import static org.junit.Assert.fail;
+
 public class TestPath extends TestCase {
+  @Test (timeout = 30000)
   public void testToString() {
     toStringTest("/");
     toStringTest("/foo");
@@ -61,6 +66,7 @@ public class TestPath extends TestCase {
     assertEquals(pathString, new Path(pathString).toString());
   }
 
+  @Test (timeout = 30000)
   public void testNormalize() throws URISyntaxException {
     assertEquals("", new Path(".").toString());
     assertEquals("..", new Path("..").toString());
@@ -82,6 +88,7 @@ public class TestPath extends TestCase {
     }
   }
 
+  @Test (timeout = 30000)
   public void testIsAbsolute() {
     assertTrue(new Path("/").isAbsolute());
     assertTrue(new Path("/foo").isAbsolute());
@@ -94,6 +101,7 @@ public class TestPath extends TestCase {
     }
   }
 
+  @Test (timeout = 30000)
   public void testParent() {
     assertEquals(new Path("/foo"), new Path("/foo/bar").getParent());
     assertEquals(new Path("foo"), new Path("foo/bar").getParent());
@@ -104,6 +112,7 @@ public class TestPath extends TestCase {
     }
   }
 
+  @Test (timeout = 30000)
   public void testChild() {
     assertEquals(new Path("."), new Path(".", "."));
     assertEquals(new Path("/"), new Path("/", "."));
@@ -123,10 +132,12 @@ public class TestPath extends TestCase {
     }
   }
   
+  @Test (timeout = 30000)
   public void testEquals() {
     assertFalse(new Path("/").equals(new Path("/foo")));
   }
 
+  @Test (timeout = 30000)
   public void testDots() {
     // Test Path(String) 
     assertEquals(new Path("/foo/bar/baz").toString(), "/foo/bar/baz");
@@ -164,18 +175,54 @@ public class TestPath extends TestCase {
     assertEquals(new Path("foo/bar/baz","../../../../..").toString(), "../..");
   }
 
+  /** Test that Windows paths are correctly handled */
+  @Test (timeout = 5000)
+  public void testWindowsPaths() throws URISyntaxException, IOException {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar");
+    assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar");
+    assertEquals(new Path("/c:/foo/bar").toString(), "c:/foo/bar");
+    assertEquals(new Path("file://c:/foo/bar").toString(), "file://c:/foo/bar");
+  }
+
+  /** Test invalid paths on Windows are correctly rejected */
+  @Test (timeout = 5000)
+  public void testInvalidWindowsPaths() throws URISyntaxException, IOException {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    String [] invalidPaths = {
+        "hdfs:\\\\\\tmp"
+    };
+
+    for (String path : invalidPaths) {
+      try {
+        Path item = new Path(path);
+        fail("Did not throw for invalid path " + path);
+      } catch (IllegalArgumentException iae) {
+      }
+    }
+  }
+
   /** Test Path objects created from other Path objects */
+  @Test (timeout = 30000)
   public void testChildParentResolution() throws URISyntaxException, IOException {
     Path parent = new Path("foo1://bar1/baz1");
     Path child  = new Path("foo2://bar2/baz2");
     assertEquals(child, new Path(parent, child));
   }
   
+  @Test (timeout = 30000)
   public void testScheme() throws java.io.IOException {
     assertEquals("foo:/bar", new Path("foo:/","/bar").toString()); 
     assertEquals("foo://bar/baz", new Path("foo://bar/","/baz").toString()); 
   }
 
+  @Test (timeout = 30000)
   public void testURI() throws URISyntaxException, IOException {
     URI uri = new URI("file:///bar#baz");
     Path path = new Path(uri);
@@ -198,6 +245,7 @@ public class TestPath extends TestCase {
   }
 
   /** Test URIs created from Path objects */
+  @Test (timeout = 30000)
   public void testPathToUriConversion() throws URISyntaxException, IOException {
     // Path differs from URI in that it ignores the query part..
     assertEquals(new URI(null, null, "/foo?bar", null, null),  new Path("/foo?bar").toUri());
@@ -218,6 +266,7 @@ public class TestPath extends TestCase {
   }
 
   /** Test reserved characters in URIs (and therefore Paths) */
+  @Test (timeout = 30000)
   public void testReservedCharacters() throws URISyntaxException, IOException {
     // URI encodes the path
     assertEquals("/foo%20bar", new URI(null, null, "/foo bar", null, null).getRawPath());
@@ -239,6 +288,7 @@ public class TestPath extends TestCase {
     assertEquals("/foo%3Fbar", new URI("http", "localhost", "/foo?bar", null, null).toURL().getPath());
   }
   
+  @Test (timeout = 30000)
   public void testMakeQualified() throws URISyntaxException {
     URI defaultUri = new URI("hdfs://host1/dir1");
     URI wd         = new URI("hdfs://host2/dir2");
@@ -252,6 +302,7 @@ public class TestPath extends TestCase {
                  new Path("file").makeQualified(defaultUri, new Path(wd)));
  }
 
+  @Test (timeout = 30000)
   public void testGetName() {
     assertEquals("", new Path("/").getName());
     assertEquals("foo", new Path("foo").getName());
@@ -261,13 +312,17 @@ public class TestPath extends TestCase {
     assertEquals("bar", new Path("hdfs://host/foo/bar").getName());
   }
   
+  @Test (timeout = 30000)
   public void testAvroReflect() throws Exception {
     AvroTestUtil.testReflect
       (new Path("foo"),
        "{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.fs.Path\"}");
   }
 
+  @Test (timeout = 30000)
   public void testGlobEscapeStatus() throws Exception {
+    // This test is not meaningful on Windows where * is disallowed in file name.
+    if (Shell.WINDOWS) return;
     FileSystem lfs = FileSystem.getLocal(new Configuration());
     Path testRoot = lfs.makeQualified(new Path(
         System.getProperty("test.build.data","test/build/data"),
@@ -324,4 +379,31 @@ public class TestPath extends TestCase {
     assertEquals(1, stats.length);
     assertEquals(new Path(testRoot, "*/f"), stats[0].getPath());
   }
+
+  @Test (timeout = 30000)
+  public void testMergePaths() {
+    assertEquals(new Path("/foo/bar"),
+      Path.mergePaths(new Path("/foo"),
+        new Path("/bar")));
+
+    assertEquals(new Path("/foo/bar/baz"),
+      Path.mergePaths(new Path("/foo/bar"),
+        new Path("/baz")));
+
+    assertEquals(new Path("/foo/bar/baz"),
+      Path.mergePaths(new Path("/foo"),
+        new Path("/bar/baz")));
+
+    assertEquals(new Path(Shell.WINDOWS ? "/C:/foo/bar" : "/C:/foo/C:/bar"),
+      Path.mergePaths(new Path("/C:/foo"),
+        new Path("/C:/bar")));
+
+    assertEquals(new Path("viewfs:///foo/bar"),
+      Path.mergePaths(new Path("viewfs:///foo"),
+        new Path("file:///bar")));
+
+    assertEquals(new Path("viewfs://vfsauthority/foo/bar"),
+      Path.mergePaths(new Path("viewfs://vfsauthority/foo"),
+        new Path("file://fileauthority/bar")));
+  }
 }

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestTrash.java Thu Mar  7 02:57:40 2013
@@ -55,7 +55,7 @@ public class TestTrash extends TestCase 
   // check that the specified file is in Trash
   protected static void checkTrash(FileSystem trashFs, Path trashRoot,
       Path path) throws IOException {
-    Path p = new Path(trashRoot+"/"+ path.toUri().getPath());
+    Path p = Path.mergePaths(trashRoot, path);
     assertTrue("Could not find file in trash: "+ p , trashFs.exists(p));
   }
   
@@ -399,7 +399,8 @@ public class TestTrash extends TestCase 
         assertTrue(val==0);
       }
       // current trash directory
-      Path trashDir = new Path(trashRoot.toUri().getPath() + myFile.getParent().toUri().getPath());
+      Path trashDir = Path.mergePaths(new Path(trashRoot.toUri().getPath()),
+        new Path(myFile.getParent().toUri().getPath()));
       
       System.out.println("Deleting same myFile: myFile.parent=" + myFile.getParent().toUri().getPath() + 
           "; trashroot="+trashRoot.toUri().getPath() + 

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java Thu Mar  7 02:57:40 2013
@@ -19,8 +19,10 @@ package org.apache.hadoop.fs.shell;
 
 import static org.junit.Assert.assertEquals;
 import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
 
 import java.io.File;
+import java.io.IOException;
 import java.util.Arrays;
 
 import org.apache.hadoop.conf.Configuration;
@@ -59,7 +61,7 @@ public class TestPathData {
     fs.close();
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testWithDirStringAndConf() throws Exception {
     String dirString = "d1";
     PathData item = new PathData(dirString, conf);
@@ -72,7 +74,7 @@ public class TestPathData {
     checkPathData(dirString, item);
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testUnqualifiedUriContents() throws Exception {
     String dirString = "d1";
     PathData item = new PathData(dirString, conf);
@@ -83,7 +85,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testQualifiedUriContents() throws Exception {
     String dirString = fs.makeQualified(new Path("d1")).toString();
     PathData item = new PathData(dirString, conf);
@@ -94,7 +96,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testCwdContents() throws Exception {
     String dirString = Path.CUR_DIR;
     PathData item = new PathData(dirString, conf);
@@ -105,7 +107,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testToFile() throws Exception {
     PathData item = new PathData(".", conf);
     assertEquals(new File(testDir.toString()), item.toFile());
@@ -115,7 +117,56 @@ public class TestPathData {
     assertEquals(new File(testDir + "/d1/f1"), item.toFile());
   }
 
-  @Test
+  @Test (timeout = 5000)
+  public void testToFileRawWindowsPaths() throws Exception {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    // Can we handle raw Windows paths? The files need not exist for
+    // these tests to succeed.
+    String[] winPaths = {
+        "n:\\",
+        "N:\\",
+        "N:\\foo",
+        "N:\\foo\\bar",
+        "N:/",
+        "N:/foo",
+        "N:/foo/bar"
+    };
+
+    PathData item;
+
+    for (String path : winPaths) {
+      item = new PathData(path, conf);
+      assertEquals(new File(path), item.toFile());
+    }
+
+    item = new PathData("foo\\bar", conf);
+    assertEquals(new File(testDir + "\\foo\\bar"), item.toFile());
+  }
+
+  @Test (timeout = 5000)
+  public void testInvalidWindowsPath() throws Exception {
+    if (!Path.WINDOWS) {
+      return;
+    }
+
+    // Verify that the following invalid paths are rejected.
+    String [] winPaths = {
+        "N:\\foo/bar"
+    };
+
+    for (String path : winPaths) {
+      try {
+        PathData item = new PathData(path, conf);
+        fail("Did not throw for invalid path " + path);
+      } catch (IOException ioe) {
+      }
+    }
+  }
+
+  @Test (timeout = 30000)
   public void testAbsoluteGlob() throws Exception {
     PathData[] items = PathData.expandAsGlob(testDir+"/d1/f1*", conf);
     assertEquals(
@@ -124,7 +175,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRelativeGlob() throws Exception {
     PathData[] items = PathData.expandAsGlob("d1/f1*", conf);
     assertEquals(
@@ -133,7 +184,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testRelativeGlobBack() throws Exception {
     fs.setWorkingDirectory(new Path("d1"));
     PathData[] items = PathData.expandAsGlob("../d2/*", conf);
@@ -143,7 +194,7 @@ public class TestPathData {
     );
   }
 
-  @Test
+  @Test (timeout = 30000)
   public void testWithStringAndConfForBuggyPath() throws Exception {
     String dirString = "file:///tmp";
     Path tmpDir = new Path(dirString);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestTextCommand.java Thu Mar  7 02:57:40 2013
@@ -26,9 +26,11 @@ import java.io.InputStream;
 import java.io.IOException;
 import java.io.StringWriter;
 import java.lang.reflect.Method;
+import java.net.URI;
 
 import org.apache.commons.io.IOUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
 import org.junit.Test;
 
 /**
@@ -38,12 +40,13 @@ import org.junit.Test;
 public class TestTextCommand {
   private static final String TEST_ROOT_DIR =
     System.getProperty("test.build.data", "build/test/data/") + "/testText";
-  private static final String AVRO_FILENAME = TEST_ROOT_DIR + "/weather.avro";
+  private static final String AVRO_FILENAME =
+    new Path(TEST_ROOT_DIR, "weather.avro").toUri().getPath();
 
   /**
    * Tests whether binary Avro data files are displayed correctly.
    */
-  @Test
+  @Test (timeout = 30000)
   public void testDisplayForAvroFiles() throws Exception {
     // Create a small Avro data file on the local file system.
     createAvroFile(generateWeatherAvroBinaryData());
@@ -51,7 +54,7 @@ public class TestTextCommand {
     // Prepare and call the Text command's protected getInputStream method
     // using reflection.
     Configuration conf = new Configuration();
-    File localPath = new File(AVRO_FILENAME);
+    URI localPath = new URI(AVRO_FILENAME);
     PathData pathData = new PathData(localPath, conf);
     Display.Text text = new Display.Text();
     text.setConf(conf);

Modified: hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java?rev=1453669&r1=1453668&r2=1453669&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java Thu Mar  7 02:57:40 2013
@@ -61,6 +61,7 @@ import org.apache.hadoop.io.compress.zli
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel;
 import org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionStrategy;
 import org.apache.hadoop.io.compress.zlib.ZlibFactory;
+import org.apache.hadoop.io.compress.bzip2.Bzip2Factory;
 import org.apache.hadoop.util.LineReader;
 import org.apache.hadoop.util.NativeCodeLoader;
 import org.apache.hadoop.util.ReflectionUtils;
@@ -94,12 +95,33 @@ public class TestCodec {
     codecTest(conf, seed, count, "org.apache.hadoop.io.compress.GzipCodec");
   }
 
-  @Test
+  @Test(timeout=20000)
   public void testBZip2Codec() throws IOException {
+    Configuration conf = new Configuration();
+    conf.set("io.compression.codec.bzip2.library", "java-builtin");
     codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
     codecTest(conf, seed, count, "org.apache.hadoop.io.compress.BZip2Codec");
   }
   
+  @Test(timeout=20000)
+  public void testBZip2NativeCodec() throws IOException {
+    Configuration conf = new Configuration();
+    conf.set("io.compression.codec.bzip2.library", "system-native");
+    if (NativeCodeLoader.isNativeCodeLoaded()) {
+      if (Bzip2Factory.isNativeBzip2Loaded(conf)) {
+        codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
+        codecTest(conf, seed, count, 
+                  "org.apache.hadoop.io.compress.BZip2Codec");
+        conf.set("io.compression.codec.bzip2.library", "java-builtin");
+        codecTest(conf, seed, 0, "org.apache.hadoop.io.compress.BZip2Codec");
+        codecTest(conf, seed, count, 
+                  "org.apache.hadoop.io.compress.BZip2Codec");
+      } else {
+        LOG.warn("Native hadoop library available but native bzip2 is not");
+      }
+    }
+  }
+  
   @Test
   public void testSnappyCodec() throws IOException {
     if (SnappyCodec.isNativeCodeLoaded()) {
@@ -457,14 +479,37 @@ public class TestCodec {
     sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.DefaultCodec", 1000000);
   }
 
-  @Test
+  @Test(timeout=20000)
   public void testSequenceFileBZip2Codec() throws IOException, ClassNotFoundException,
       InstantiationException, IllegalAccessException {
+    Configuration conf = new Configuration();
+    conf.set("io.compression.codec.bzip2.library", "java-builtin");
     sequenceFileCodecTest(conf, 0, "org.apache.hadoop.io.compress.BZip2Codec", 100);
     sequenceFileCodecTest(conf, 100, "org.apache.hadoop.io.compress.BZip2Codec", 100);
     sequenceFileCodecTest(conf, 200000, "org.apache.hadoop.io.compress.BZip2Codec", 1000000);
   }
 
+  @Test(timeout=20000)
+  public void testSequenceFileBZip2NativeCodec() throws IOException, 
+                        ClassNotFoundException, InstantiationException, 
+                        IllegalAccessException {
+    Configuration conf = new Configuration();
+    conf.set("io.compression.codec.bzip2.library", "system-native");
+    if (NativeCodeLoader.isNativeCodeLoaded()) {
+      if (Bzip2Factory.isNativeBzip2Loaded(conf)) {
+        sequenceFileCodecTest(conf, 0, 
+                              "org.apache.hadoop.io.compress.BZip2Codec", 100);
+        sequenceFileCodecTest(conf, 100, 
+                              "org.apache.hadoop.io.compress.BZip2Codec", 100);
+        sequenceFileCodecTest(conf, 200000, 
+                              "org.apache.hadoop.io.compress.BZip2Codec", 
+                              1000000);
+      } else {
+        LOG.warn("Native hadoop library available but native bzip2 is not");
+      }
+    }
+  }
+
   @Test
   public void testSequenceFileDeflateCodec() throws IOException, ClassNotFoundException,
       InstantiationException, IllegalAccessException {