You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2012/12/12 20:15:45 UTC
svn commit: r1420921 [2/2] - in
/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common:
./ src/main/bin/ src/main/java/org/apache/hadoop/fs/
src/main/java/org/apache/hadoop/fs/shell/
src/main/java/org/apache/hadoop/io/ src/main/ja...
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c Wed Dec 12 19:15:26 2012
@@ -16,12 +16,15 @@
* limitations under the License.
*/
-#include <dlfcn.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#ifdef UNIX
+#include <dlfcn.h>
#include "config.h"
+#endif
+
#include "org_apache_hadoop_io_compress_zlib.h"
#include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h"
@@ -35,48 +38,124 @@ static jfieldID ZlibCompressor_directBuf
static jfieldID ZlibCompressor_finish;
static jfieldID ZlibCompressor_finished;
+#ifdef UNIX
static int (*dlsym_deflateInit2_)(z_streamp, int, int, int, int, int, const char *, int);
static int (*dlsym_deflate)(z_streamp, int);
static int (*dlsym_deflateSetDictionary)(z_streamp, const Bytef *, uInt);
static int (*dlsym_deflateReset)(z_streamp);
static int (*dlsym_deflateEnd)(z_streamp);
+#endif
+
+#ifdef WINDOWS
+#include <Strsafe.h>
+typedef int (__cdecl *__dlsym_deflateInit2_) (z_streamp, int, int, int, int, int, const char *, int);
+typedef int (__cdecl *__dlsym_deflate) (z_streamp, int);
+typedef int (__cdecl *__dlsym_deflateSetDictionary) (z_streamp, const Bytef *, uInt);
+typedef int (__cdecl *__dlsym_deflateReset) (z_streamp);
+typedef int (__cdecl *__dlsym_deflateEnd) (z_streamp);
+static __dlsym_deflateInit2_ dlsym_deflateInit2_;
+static __dlsym_deflate dlsym_deflate;
+static __dlsym_deflateSetDictionary dlsym_deflateSetDictionary;
+static __dlsym_deflateReset dlsym_deflateReset;
+static __dlsym_deflateEnd dlsym_deflateEnd;
+
+// Try to load zlib.dll from the dir where hadoop.dll is located.
+HANDLE LoadZlibTryHadoopNativeDir() {
+ HMODULE libz = NULL;
+ PCWSTR HADOOP_DLL = L"hadoop.dll";
+ size_t HADOOP_DLL_LEN = 10;
+ WCHAR path[MAX_PATH] = { 0 };
+ BOOL isPathValid = FALSE;
+
+ // Get hadoop.dll full path
+ HMODULE hModule = GetModuleHandle(HADOOP_DLL);
+ if (hModule != NULL) {
+ if (GetModuleFileName(hModule, path, MAX_PATH) > 0) {
+ size_t size = 0;
+ if (StringCchLength(path, MAX_PATH, &size) == S_OK) {
+
+ // Update path variable to have the full path to the zlib.dll
+ size = size - HADOOP_DLL_LEN;
+ if (size >= 0) {
+ path[size] = L'\0';
+ if (StringCchCat(path, MAX_PATH, HADOOP_ZLIB_LIBRARY) == S_OK) {
+ isPathValid = TRUE;
+ }
+ }
+ }
+ }
+ }
+
+ if (isPathValid) {
+ libz = LoadLibrary(path);
+ }
+
+ // fallback to system paths
+ if (!libz) {
+ libz = LoadLibrary(HADOOP_ZLIB_LIBRARY);
+ }
+
+ return libz;
+}
+#endif
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_initIDs(
JNIEnv *env, jclass class
) {
+#ifdef UNIX
// Load libz.so
void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
- if (!libz) {
+ if (!libz) {
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
return;
}
+#endif
+
+#ifdef WINDOWS
+ HMODULE libz = LoadZlibTryHadoopNativeDir();
+
+ if (!libz) {
+ THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
+ return;
+ }
+#endif
+#ifdef UNIX
// Locate the requisite symbols from libz.so
dlerror(); // Clear any existing error
- LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
- LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
- LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
- LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
- LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
+#endif
+
+#ifdef WINDOWS
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflateInit2_, dlsym_deflateInit2_, env, libz, "deflateInit2_");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflate, dlsym_deflate, env, libz, "deflate");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflateSetDictionary, dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflateReset, dlsym_deflateReset, env, libz, "deflateReset");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflateEnd, dlsym_deflateEnd, env, libz, "deflateEnd");
+#endif
// Initialize the requisite fieldIds
- ZlibCompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
+ ZlibCompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
"Ljava/lang/Class;");
ZlibCompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
ZlibCompressor_finish = (*env)->GetFieldID(env, class, "finish", "Z");
ZlibCompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
- ZlibCompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
- "uncompressedDirectBuf",
+ ZlibCompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
+ "uncompressedDirectBuf",
"Ljava/nio/Buffer;");
- ZlibCompressor_uncompressedDirectBufOff = (*env)->GetFieldID(env, class,
+ ZlibCompressor_uncompressedDirectBufOff = (*env)->GetFieldID(env, class,
"uncompressedDirectBufOff", "I");
- ZlibCompressor_uncompressedDirectBufLen = (*env)->GetFieldID(env, class,
+ ZlibCompressor_uncompressedDirectBufLen = (*env)->GetFieldID(env, class,
"uncompressedDirectBufLen", "I");
- ZlibCompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
- "compressedDirectBuf",
+ ZlibCompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
+ "compressedDirectBuf",
"Ljava/nio/Buffer;");
- ZlibCompressor_directBufferSize = (*env)->GetFieldID(env, class,
+ ZlibCompressor_directBufferSize = (*env)->GetFieldID(env, class,
"directBufferSize", "I");
}
@@ -84,7 +163,9 @@ JNIEXPORT jlong JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
JNIEnv *env, jclass class, jint level, jint strategy, jint windowBits
) {
- // Create a z_stream
+ int rv = 0;
+ static const int memLevel = 8; // See zconf.h
+ // Create a z_stream
z_stream *stream = malloc(sizeof(z_stream));
if (!stream) {
THROW(env, "java/lang/OutOfMemoryError", NULL);
@@ -93,17 +174,16 @@ Java_org_apache_hadoop_io_compress_zlib_
memset((void*)stream, 0, sizeof(z_stream));
// Initialize stream
- static const int memLevel = 8; // See zconf.h
- int rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
+ rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
memLevel, strategy, ZLIB_VERSION, sizeof(z_stream));
-
+
if (rv != Z_OK) {
// Contingency - Report error by throwing appropriate exceptions
free(stream);
stream = NULL;
-
+
switch (rv) {
- case Z_MEM_ERROR:
+ case Z_MEM_ERROR:
{
THROW(env, "java/lang/OutOfMemoryError", NULL);
}
@@ -120,27 +200,28 @@ Java_org_apache_hadoop_io_compress_zlib_
break;
}
}
-
+
return JLONG(stream);
}
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_setDictionary(
- JNIEnv *env, jclass class, jlong stream,
+ JNIEnv *env, jclass class, jlong stream,
jarray b, jint off, jint len
) {
+ int rv = 0;
Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
if (!buf) {
return;
}
- int rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
+ rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
(*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
-
+
if (rv != Z_OK) {
// Contingency - Report error by throwing appropriate exceptions
switch (rv) {
case Z_STREAM_ERROR:
- {
+ {
THROW(env, "java/lang/IllegalArgumentException", NULL);
}
break;
@@ -157,75 +238,85 @@ JNIEXPORT jint JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
JNIEnv *env, jobject this
) {
+ jobject clazz = NULL;
+ jobject uncompressed_direct_buf = NULL;
+ jint uncompressed_direct_buf_off = 0;
+ jint uncompressed_direct_buf_len = 0;
+ jobject compressed_direct_buf = NULL;
+ jint compressed_direct_buf_len = 0;
+ jboolean finish;
+ Bytef* uncompressed_bytes = NULL;
+ Bytef* compressed_bytes = NULL;
+ int rv = 0;
+ jint no_compressed_bytes = 0;
// Get members of ZlibCompressor
z_stream *stream = ZSTREAM(
- (*env)->GetLongField(env, this,
+ (*env)->GetLongField(env, this,
ZlibCompressor_stream)
);
if (!stream) {
THROW(env, "java/lang/NullPointerException", NULL);
return (jint)0;
- }
+ }
// Get members of ZlibCompressor
- jobject clazz = (*env)->GetStaticObjectField(env, this,
+ clazz = (*env)->GetStaticObjectField(env, this,
ZlibCompressor_clazz);
- jobject uncompressed_direct_buf = (*env)->GetObjectField(env, this,
+ uncompressed_direct_buf = (*env)->GetObjectField(env, this,
ZlibCompressor_uncompressedDirectBuf);
- jint uncompressed_direct_buf_off = (*env)->GetIntField(env, this,
+ uncompressed_direct_buf_off = (*env)->GetIntField(env, this,
ZlibCompressor_uncompressedDirectBufOff);
- jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
+ uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
ZlibCompressor_uncompressedDirectBufLen);
- jobject compressed_direct_buf = (*env)->GetObjectField(env, this,
+ compressed_direct_buf = (*env)->GetObjectField(env, this,
ZlibCompressor_compressedDirectBuf);
- jint compressed_direct_buf_len = (*env)->GetIntField(env, this,
+ compressed_direct_buf_len = (*env)->GetIntField(env, this,
ZlibCompressor_directBufferSize);
- jboolean finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
+ finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
// Get the input direct buffer
LOCK_CLASS(env, clazz, "ZlibCompressor");
- Bytef* uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
+ uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
uncompressed_direct_buf);
UNLOCK_CLASS(env, clazz, "ZlibCompressor");
-
+
if (uncompressed_bytes == 0) {
return (jint)0;
}
-
+
// Get the output direct buffer
LOCK_CLASS(env, clazz, "ZlibCompressor");
- Bytef* compressed_bytes = (*env)->GetDirectBufferAddress(env,
+ compressed_bytes = (*env)->GetDirectBufferAddress(env,
compressed_direct_buf);
UNLOCK_CLASS(env, clazz, "ZlibCompressor");
if (compressed_bytes == 0) {
return (jint)0;
}
-
+
// Re-calibrate the z_stream
stream->next_in = uncompressed_bytes + uncompressed_direct_buf_off;
stream->next_out = compressed_bytes;
stream->avail_in = uncompressed_direct_buf_len;
- stream->avail_out = compressed_direct_buf_len;
-
+ stream->avail_out = compressed_direct_buf_len;
+
// Compress
- int rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
+ rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
- jint no_compressed_bytes = 0;
switch (rv) {
// Contingency? - Report error by throwing appropriate exceptions
case Z_STREAM_END:
{
(*env)->SetBooleanField(env, this, ZlibCompressor_finished, JNI_TRUE);
} // cascade
- case Z_OK:
+ case Z_OK:
{
uncompressed_direct_buf_off += uncompressed_direct_buf_len - stream->avail_in;
- (*env)->SetIntField(env, this,
+ (*env)->SetIntField(env, this,
ZlibCompressor_uncompressedDirectBufOff, uncompressed_direct_buf_off);
- (*env)->SetIntField(env, this,
+ (*env)->SetIntField(env, this,
ZlibCompressor_uncompressedDirectBufLen, stream->avail_in);
no_compressed_bytes = compressed_direct_buf_len - stream->avail_out;
}
@@ -238,7 +329,7 @@ Java_org_apache_hadoop_io_compress_zlib_
}
break;
}
-
+
return no_compressed_bytes;
}
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c Wed Dec 12 19:15:26 2012
@@ -16,12 +16,15 @@
* limitations under the License.
*/
-#include <dlfcn.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#ifdef UNIX
+#include <dlfcn.h>
#include "config.h"
+#endif
+
#include "org_apache_hadoop_io_compress_zlib.h"
#include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h"
@@ -35,48 +38,88 @@ static jfieldID ZlibDecompressor_directB
static jfieldID ZlibDecompressor_needDict;
static jfieldID ZlibDecompressor_finished;
+#ifdef UNIX
static int (*dlsym_inflateInit2_)(z_streamp, int, const char *, int);
static int (*dlsym_inflate)(z_streamp, int);
static int (*dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
static int (*dlsym_inflateReset)(z_streamp);
static int (*dlsym_inflateEnd)(z_streamp);
+#endif
+
+#ifdef WINDOWS
+#include <Strsafe.h>
+typedef int (__cdecl *__dlsym_inflateInit2_)(z_streamp, int, const char *, int);
+typedef int (__cdecl *__dlsym_inflate)(z_streamp, int);
+typedef int (__cdecl *__dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
+typedef int (__cdecl *__dlsym_inflateReset)(z_streamp);
+typedef int (__cdecl *__dlsym_inflateEnd)(z_streamp);
+static __dlsym_inflateInit2_ dlsym_inflateInit2_;
+static __dlsym_inflate dlsym_inflate;
+static __dlsym_inflateSetDictionary dlsym_inflateSetDictionary;
+static __dlsym_inflateReset dlsym_inflateReset;
+static __dlsym_inflateEnd dlsym_inflateEnd;
+extern HANDLE LoadZlibTryHadoopNativeDir();
+#endif
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_initIDs(
- JNIEnv *env, jclass class
+JNIEnv *env, jclass class
) {
// Load libz.so
- void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
+#ifdef UNIX
+ void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
if (!libz) {
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
return;
- }
+ }
+#endif
+
+#ifdef WINDOWS
+ HMODULE libz = LoadZlibTryHadoopNativeDir();
+
+ if (!libz) {
+ THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
+ return;
+ }
+#endif
+
// Locate the requisite symbols from libz.so
+#ifdef UNIX
dlerror(); // Clear any existing error
LOAD_DYNAMIC_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
LOAD_DYNAMIC_SYMBOL(dlsym_inflate, env, libz, "inflate");
LOAD_DYNAMIC_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
LOAD_DYNAMIC_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
LOAD_DYNAMIC_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
+#endif
+
+#ifdef WINDOWS
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflateInit2_, dlsym_inflateInit2_, env, libz, "inflateInit2_");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflate, dlsym_inflate, env, libz, "inflate");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflateSetDictionary, dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflateReset, dlsym_inflateReset, env, libz, "inflateReset");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflateEnd, dlsym_inflateEnd, env, libz, "inflateEnd");
+#endif
+
- // Initialize the requisite fieldIds
- ZlibDecompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
+ // Initialize the requisite fieldIds
+ ZlibDecompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
"Ljava/lang/Class;");
ZlibDecompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
ZlibDecompressor_needDict = (*env)->GetFieldID(env, class, "needDict", "Z");
ZlibDecompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
- ZlibDecompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
- "compressedDirectBuf",
+ ZlibDecompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
+ "compressedDirectBuf",
"Ljava/nio/Buffer;");
- ZlibDecompressor_compressedDirectBufOff = (*env)->GetFieldID(env, class,
+ ZlibDecompressor_compressedDirectBufOff = (*env)->GetFieldID(env, class,
"compressedDirectBufOff", "I");
- ZlibDecompressor_compressedDirectBufLen = (*env)->GetFieldID(env, class,
+ ZlibDecompressor_compressedDirectBufLen = (*env)->GetFieldID(env, class,
"compressedDirectBufLen", "I");
- ZlibDecompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
- "uncompressedDirectBuf",
+ ZlibDecompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
+ "uncompressedDirectBuf",
"Ljava/nio/Buffer;");
- ZlibDecompressor_directBufferSize = (*env)->GetFieldID(env, class,
+ ZlibDecompressor_directBufferSize = (*env)->GetFieldID(env, class,
"directBufferSize", "I");
}
@@ -84,21 +127,22 @@ JNIEXPORT jlong JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
JNIEnv *env, jclass cls, jint windowBits
) {
+ int rv = 0;
z_stream *stream = malloc(sizeof(z_stream));
memset((void*)stream, 0, sizeof(z_stream));
if (stream == 0) {
THROW(env, "java/lang/OutOfMemoryError", NULL);
return (jlong)0;
- }
-
- int rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
+ }
+
+ rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
if (rv != Z_OK) {
// Contingency - Report error by throwing appropriate exceptions
free(stream);
stream = NULL;
-
+
switch (rv) {
case Z_MEM_ERROR:
{
@@ -112,7 +156,7 @@ Java_org_apache_hadoop_io_compress_zlib_
break;
}
}
-
+
return JLONG(stream);
}
@@ -121,21 +165,22 @@ Java_org_apache_hadoop_io_compress_zlib_
JNIEnv *env, jclass cls, jlong stream,
jarray b, jint off, jint len
) {
+ int rv = 0;
Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
if (!buf) {
THROW(env, "java/lang/InternalError", NULL);
return;
}
- int rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
+ rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
(*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
-
+
if (rv != Z_OK) {
// Contingency - Report error by throwing appropriate exceptions
switch (rv) {
case Z_STREAM_ERROR:
case Z_DATA_ERROR:
{
- THROW(env, "java/lang/IllegalArgumentException",
+ THROW(env, "java/lang/IllegalArgumentException",
(ZSTREAM(stream))->msg);
}
break;
@@ -152,62 +197,71 @@ JNIEXPORT jint JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
JNIEnv *env, jobject this
) {
+ jobject clazz = NULL;
+ jarray compressed_direct_buf = NULL;
+ jint compressed_direct_buf_off = 0;
+ jint compressed_direct_buf_len = 0;
+ jarray uncompressed_direct_buf = NULL;
+ jint uncompressed_direct_buf_len = 0;
+ Bytef *compressed_bytes = NULL;
+ Bytef *uncompressed_bytes = NULL;
+ int rv = 0;
+ int no_decompressed_bytes = 0;
// Get members of ZlibDecompressor
z_stream *stream = ZSTREAM(
- (*env)->GetLongField(env, this,
+ (*env)->GetLongField(env, this,
ZlibDecompressor_stream)
);
if (!stream) {
THROW(env, "java/lang/NullPointerException", NULL);
return (jint)0;
- }
+ }
// Get members of ZlibDecompressor
- jobject clazz = (*env)->GetStaticObjectField(env, this,
+ clazz = (*env)->GetStaticObjectField(env, this,
ZlibDecompressor_clazz);
- jarray compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
+ compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
ZlibDecompressor_compressedDirectBuf);
- jint compressed_direct_buf_off = (*env)->GetIntField(env, this,
+ compressed_direct_buf_off = (*env)->GetIntField(env, this,
ZlibDecompressor_compressedDirectBufOff);
- jint compressed_direct_buf_len = (*env)->GetIntField(env, this,
+ compressed_direct_buf_len = (*env)->GetIntField(env, this,
ZlibDecompressor_compressedDirectBufLen);
- jarray uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
+ uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
ZlibDecompressor_uncompressedDirectBuf);
- jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
+ uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
ZlibDecompressor_directBufferSize);
// Get the input direct buffer
LOCK_CLASS(env, clazz, "ZlibDecompressor");
- Bytef *compressed_bytes = (*env)->GetDirectBufferAddress(env,
+ compressed_bytes = (*env)->GetDirectBufferAddress(env,
compressed_direct_buf);
UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
-
+
if (!compressed_bytes) {
return (jint)0;
}
-
+
// Get the output direct buffer
LOCK_CLASS(env, clazz, "ZlibDecompressor");
- Bytef *uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
+ uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
uncompressed_direct_buf);
UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
if (!uncompressed_bytes) {
return (jint)0;
}
-
+
// Re-calibrate the z_stream
stream->next_in = compressed_bytes + compressed_direct_buf_off;
stream->next_out = uncompressed_bytes;
stream->avail_in = compressed_direct_buf_len;
stream->avail_out = uncompressed_direct_buf_len;
-
+
// Decompress
- int rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
+ rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
// Contingency? - Report error by throwing appropriate exceptions
- int no_decompressed_bytes = 0;
switch (rv) {
case Z_STREAM_END:
{
@@ -216,9 +270,9 @@ Java_org_apache_hadoop_io_compress_zlib_
case Z_OK:
{
compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
- (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
+ (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
compressed_direct_buf_off);
- (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
+ (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
stream->avail_in);
no_decompressed_bytes = uncompressed_direct_buf_len - stream->avail_out;
}
@@ -227,9 +281,9 @@ Java_org_apache_hadoop_io_compress_zlib_
{
(*env)->SetBooleanField(env, this, ZlibDecompressor_needDict, JNI_TRUE);
compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
- (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
+ (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
compressed_direct_buf_off);
- (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
+ (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
stream->avail_in);
}
break;
@@ -251,7 +305,7 @@ Java_org_apache_hadoop_io_compress_zlib_
}
break;
}
-
+
return no_decompressed_bytes;
}
@@ -299,4 +353,3 @@ Java_org_apache_hadoop_io_compress_zlib_
/**
* vim: sw=2: ts=2: et:
*/
-
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h Wed Dec 12 19:15:26 2012
@@ -19,14 +19,23 @@
#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
#define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
-#include <dlfcn.h>
-#include <jni.h>
+#include "org_apache_hadoop.h"
+
+#ifdef UNIX
+#include <config.h>
#include <stddef.h>
-#include <zconf.h>
#include <zlib.h>
+#include <zconf.h>
+#include <dlfcn.h>
+#include <jni.h>
+#endif
-#include "config.h"
-#include "org_apache_hadoop.h"
+#ifdef WINDOWS
+#include <jni.h>
+#define HADOOP_ZLIB_LIBRARY L"zlib1.dll"
+#include <zlib.h>
+#include <zconf.h>
+#endif
/* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
#define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Wed Dec 12 19:15:26 2012
@@ -18,6 +18,10 @@
#define _GNU_SOURCE
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_io_nativeio_NativeIO.h"
+
+#ifdef UNIX
#include <assert.h>
#include <errno.h>
#include <fcntl.h>
@@ -30,14 +34,19 @@
#include <sys/types.h>
#include <sys/syscall.h>
#include <unistd.h>
-
#include "config.h"
-#include "org_apache_hadoop.h"
-#include "org_apache_hadoop_io_nativeio_NativeIO.h"
+#endif
+
+#ifdef WINDOWS
+#include <assert.h>
+#include <Windows.h>
+#include "winutils.h"
+#endif
+
#include "file_descriptor.h"
#include "errno_enum.h"
-// the NativeIO$Stat inner class and its constructor
+// the NativeIO$POSIX$Stat inner class and its constructor
static jclass stat_clazz;
static jmethodID stat_ctor;
@@ -52,26 +61,32 @@ static jobject pw_lock_object;
// Internal functions
static void throw_ioe(JNIEnv* env, int errnum);
+#ifdef UNIX
static ssize_t get_pw_buflen();
+#endif
/**
* Returns non-zero if the user has specified that the system
* has non-threadsafe implementations of getpwuid_r or getgrgid_r.
**/
static int workaround_non_threadsafe_calls(JNIEnv *env, jclass clazz) {
- jfieldID needs_workaround_field = (*env)->GetStaticFieldID(env, clazz,
- "workaroundNonThreadSafePasswdCalls", "Z");
+ jboolean result;
+ jfieldID needs_workaround_field = (*env)->GetStaticFieldID(
+ env, clazz,
+ "workaroundNonThreadSafePasswdCalls",
+ "Z");
PASS_EXCEPTIONS_RET(env, 0);
assert(needs_workaround_field);
- jboolean result = (*env)->GetStaticBooleanField(
+ result = (*env)->GetStaticBooleanField(
env, clazz, needs_workaround_field);
return result;
}
+#ifdef UNIX
static void stat_init(JNIEnv *env, jclass nativeio_class) {
// Init Stat
- jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
+ jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat");
if (!clazz) {
return; // exception has been raised
}
@@ -84,6 +99,7 @@ static void stat_init(JNIEnv *env, jclas
if (!stat_ctor) {
return; // exception has been raised
}
+
jclass obj_class = (*env)->FindClass(env, "java/lang/Object");
if (!obj_class) {
return; // exception has been raised
@@ -98,6 +114,7 @@ static void stat_init(JNIEnv *env, jclas
pw_lock_object = (*env)->NewObject(env, obj_class, obj_ctor);
PASS_EXCEPTIONS(env);
pw_lock_object = (*env)->NewGlobalRef(env, pw_lock_object);
+
PASS_EXCEPTIONS(env);
}
}
@@ -112,6 +129,7 @@ static void stat_deinit(JNIEnv *env) {
pw_lock_object = NULL;
}
}
+#endif
static void nioe_init(JNIEnv *env) {
// Init NativeIOException
@@ -120,8 +138,15 @@ static void nioe_init(JNIEnv *env) {
PASS_EXCEPTIONS(env);
nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
+#ifdef UNIX
nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
"(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
+#endif
+
+#ifdef WINDOWS
+ nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
+ "(Ljava/lang/String;I)V");
+#endif
}
static void nioe_deinit(JNIEnv *env) {
@@ -142,32 +167,46 @@ static void nioe_deinit(JNIEnv *env) {
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
JNIEnv *env, jclass clazz) {
-
+#ifdef UNIX
stat_init(env, clazz);
PASS_EXCEPTIONS_GOTO(env, error);
+#endif
nioe_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
fd_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
+#ifdef UNIX
errno_enum_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
+#endif
return;
error:
// these are all idempodent and safe to call even if the
// class wasn't initted yet
+#ifdef UNIX
stat_deinit(env);
+#endif
nioe_deinit(env);
fd_deinit(env);
+#ifdef UNIX
errno_enum_deinit(env);
+#endif
}
/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_POSIX
+ * Method: fstat
+ * Signature: (Ljava/io/FileDescriptor;)Lorg/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat;
* public static native Stat fstat(FileDescriptor fd);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT jobject JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_fstat(
JNIEnv *env, jclass clazz, jobject fd_object)
{
+#ifdef UNIX
jobject ret = NULL;
int fd = fd_get(env, fd_object);
@@ -186,14 +225,26 @@ Java_org_apache_hadoop_io_nativeio_Nativ
cleanup:
return ret;
+#endif
+
+#ifdef WINDOWS
+ THROW(env, "java/io/IOException",
+ "The function POSIX.fstat() is not supported on Windows");
+ return NULL;
+#endif
}
+
+
/**
* public static native void posix_fadvise(
* FileDescriptor fd, long offset, long len, int flags);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_posix_1fadvise(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_posix_1fadvise(
JNIEnv *env, jclass clazz,
jobject fd_object, jlong offset, jlong len, jint flags)
{
@@ -239,9 +290,12 @@ static int manual_sync_file_range (int f
/**
* public static native void sync_file_range(
* FileDescriptor fd, long offset, long len, int flags);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_sync_1file_1range(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_sync_1file_1range(
JNIEnv *env, jclass clazz,
jobject fd_object, jlong offset, jlong len, jint flags)
{
@@ -283,13 +337,20 @@ static int toFreeBSDFlags(int flags)
#endif
/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_POSIX
+ * Method: open
+ * Signature: (Ljava/lang/String;II)Ljava/io/FileDescriptor;
* public static native FileDescriptor open(String path, int flags, int mode);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT jobject JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_open(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_open(
JNIEnv *env, jclass clazz, jstring j_path,
jint flags, jint mode)
{
+#ifdef UNIX
#ifdef __FreeBSD__
flags = toFreeBSDFlags(flags);
#endif
@@ -317,16 +378,90 @@ cleanup:
(*env)->ReleaseStringUTFChars(env, j_path, path);
}
return ret;
+#endif
+
+#ifdef WINDOWS
+ THROW(env, "java/io/IOException",
+ "The function POSIX.open() is not supported on Windows");
+ return NULL;
+#endif
}
-/**
- * public static native void chmod(String path, int mode) throws IOException;
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method: createFile
+ * Signature: (Ljava/lang/String;JJJ)Ljava/io/FileDescriptor;
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
-JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_chmod(
- JNIEnv *env, jclass clazz, jstring j_path,
- jint mode)
+JNIEXPORT jobject JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_createFile
+ (JNIEnv *env, jclass clazz, jstring j_path,
+ jlong desiredAccess, jlong shareMode, jlong creationDisposition)
+{
+#ifdef UNIX
+ THROW(env, "java/io/IOException",
+ "The function Windows.createFile() is not supported on Unix");
+ return NULL;
+#endif
+
+#ifdef WINDOWS
+ DWORD dwRtnCode = ERROR_SUCCESS;
+ BOOL isSymlink = FALSE;
+ BOOL isJunction = FALSE;
+ DWORD dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL | FILE_FLAG_BACKUP_SEMANTICS;
+ jobject ret = (jobject) NULL;
+ HANDLE hFile = INVALID_HANDLE_VALUE;
+ WCHAR *path = (WCHAR *) (*env)->GetStringChars(env, j_path, (jboolean*)NULL);
+ if (path == NULL) goto cleanup;
+
+ // Set the flag for a symbolic link or a junctions point only when it exists.
+ // According to MSDN if the call to CreateFile() function creates a file,
+ // there is no change in behavior. So we do not throw if no file is found.
+ //
+ dwRtnCode = SymbolicLinkCheck(path, &isSymlink);
+ if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
+ throw_ioe(env, dwRtnCode);
+ goto cleanup;
+ }
+ dwRtnCode = JunctionPointCheck(path, &isJunction);
+ if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
+ throw_ioe(env, dwRtnCode);
+ goto cleanup;
+ }
+ if (isSymlink || isJunction)
+ dwFlagsAndAttributes |= FILE_FLAG_OPEN_REPARSE_POINT;
+
+ hFile = CreateFile(path,
+ (DWORD) desiredAccess,
+ (DWORD) shareMode,
+ (LPSECURITY_ATTRIBUTES ) NULL,
+ (DWORD) creationDisposition,
+ dwFlagsAndAttributes,
+ NULL);
+ if (hFile == INVALID_HANDLE_VALUE) {
+ throw_ioe(env, GetLastError());
+ goto cleanup;
+ }
+
+ ret = fd_create(env, (long) hFile);
+cleanup:
+ if (path != NULL) {
+ (*env)->ReleaseStringChars(env, j_path, (const jchar*)path);
+ }
+ return (jobject) ret;
+#endif
+}
+
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_POSIX
+ * Method: chmod
+ * Signature: (Ljava/lang/String;I)V
+ */
+JNIEXPORT void JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_chmodImpl
+ (JNIEnv *env, jclass clazz, jstring j_path, jint mode)
{
+#ifdef UNIX
const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
if (path == NULL) return; // JVM throws Exception for us
@@ -335,15 +470,30 @@ Java_org_apache_hadoop_io_nativeio_Nativ
}
(*env)->ReleaseStringUTFChars(env, j_path, path);
+#endif
+
+#ifdef WINDOWS
+ DWORD dwRtnCode = ERROR_SUCCESS;
+ LPCWSTR path = (LPCWSTR) (*env)->GetStringChars(env, j_path, NULL);
+ if (path == NULL) return; // JVM throws Exception for us
+
+ if ((dwRtnCode = ChangeFileModeByMask((LPCWSTR) path, mode)) != ERROR_SUCCESS)
+ {
+ throw_ioe(env, dwRtnCode);
+ }
+
+ (*env)->ReleaseStringChars(env, j_path, (const jchar*) path);
+#endif
}
/*
* static native String getUserName(int uid);
*/
JNIEXPORT jstring JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_getUserName(JNIEnv *env,
-jclass clazz, jint uid)
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getUserName(
+ JNIEnv *env, jclass clazz, jint uid)
{
+#ifdef UNIX
int pw_lock_locked = 0;
if (pw_lock_object != NULL) {
if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
@@ -395,15 +545,26 @@ cleanup:
}
if (pw_buf != NULL) free(pw_buf);
return jstr_username;
+#endif // UNIX
+
+#ifdef WINDOWS
+ THROW(env, "java/io/IOException",
+ "The function POSIX.getUserName() is not supported on Windows");
+ return NULL;
+#endif
}
/*
* static native String getGroupName(int gid);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT jstring JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_getGroupName(JNIEnv *env,
-jclass clazz, jint gid)
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getGroupName(
+ JNIEnv *env, jclass clazz, jint gid)
{
+#ifdef UNIX
int pw_lock_locked = 0;
if (pw_lock_object != NULL) {
@@ -457,14 +618,21 @@ cleanup:
}
if (pw_buf != NULL) free(pw_buf);
return jstr_groupname;
-}
+#endif // UNIX
+#ifdef WINDOWS
+ THROW(env, "java/io/IOException",
+ "The function POSIX.getUserName() is not supported on Windows");
+ return NULL;
+#endif
+}
/*
* Throw a java.IO.IOException, generating the message from errno.
*/
static void throw_ioe(JNIEnv* env, int errnum)
{
+#ifdef UNIX
char message[80];
jstring jstr_message;
@@ -489,9 +657,51 @@ static void throw_ioe(JNIEnv* env, int e
err:
if (jstr_message != NULL)
(*env)->ReleaseStringUTFChars(env, jstr_message, message);
-}
+#endif
+
+#ifdef WINDOWS
+ DWORD len = 0;
+ LPWSTR buffer = NULL;
+ const jchar* message = NULL;
+ jstring jstr_message = NULL;
+ jthrowable obj = NULL;
+
+ len = FormatMessageW(
+ FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
+ NULL, *(DWORD*) (&errnum), // reinterpret cast
+ MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
+ (LPWSTR) &buffer, 0, NULL);
+
+ if (len > 0)
+ {
+ message = (const jchar*) buffer;
+ }
+ else
+ {
+ message = (const jchar*) L"Unknown error.";
+ }
+
+ if ((jstr_message = (*env)->NewString(env, message, len)) == NULL)
+ goto err;
+ LocalFree(buffer);
+ buffer = NULL; // Set buffer to NULL to avoid double free
+
+ obj = (jthrowable)(*env)->NewObject(env, nioe_clazz, nioe_ctor,
+ jstr_message, errnum);
+ if (obj == NULL) goto err;
+ (*env)->Throw(env, obj);
+ return;
+err:
+ if (jstr_message != NULL)
+ (*env)->ReleaseStringChars(env, jstr_message, message);
+ LocalFree(buffer);
+ return;
+#endif
+}
+
+#ifdef UNIX
/*
* Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
*/
@@ -502,6 +712,145 @@ ssize_t get_pw_buflen() {
#endif
return (ret > 512) ? ret : 512;
}
+#endif
+
+
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method: getOwnerOnWindows
+ * Signature: (Ljava/io/FileDescriptor;)Ljava/lang/String;
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT jstring JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_getOwner
+ (JNIEnv *env, jclass clazz, jobject fd_object)
+{
+#ifdef UNIX
+ THROW(env, "java/io/IOException",
+ "The function Windows.getOwner() is not supported on Unix");
+ return NULL;
+#endif
+
+#ifdef WINDOWS
+ PSID pSidOwner = NULL;
+ PSECURITY_DESCRIPTOR pSD = NULL;
+ LPWSTR ownerName = (LPWSTR)NULL;
+ DWORD dwRtnCode = ERROR_SUCCESS;
+ jstring jstr_username = NULL;
+ HANDLE hFile = (HANDLE) fd_get(env, fd_object);
+ PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+ dwRtnCode = GetSecurityInfo(
+ hFile,
+ SE_FILE_OBJECT,
+ OWNER_SECURITY_INFORMATION,
+ &pSidOwner,
+ NULL,
+ NULL,
+ NULL,
+ &pSD);
+ if (dwRtnCode != ERROR_SUCCESS) {
+ throw_ioe(env, dwRtnCode);
+ goto cleanup;
+ }
+
+ dwRtnCode = GetAccntNameFromSid(pSidOwner, &ownerName);
+ if (dwRtnCode != ERROR_SUCCESS) {
+ throw_ioe(env, dwRtnCode);
+ goto cleanup;
+ }
+
+ jstr_username = (*env)->NewString(env, ownerName, (jsize) wcslen(ownerName));
+ if (jstr_username == NULL) goto cleanup;
+
+cleanup:
+ LocalFree(ownerName);
+ LocalFree(pSD);
+ return jstr_username;
+#endif
+}
+
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method: setFilePointer
+ * Signature: (Ljava/io/FileDescriptor;JJ)J
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_setFilePointer
+ (JNIEnv *env, jclass clazz, jobject fd_object, jlong distanceToMove, jlong moveMethod)
+{
+#ifdef UNIX
+ THROW(env, "java/io/IOException",
+ "The function setFilePointer(FileDescriptor) is not supported on Unix");
+ return NULL;
+#endif
+
+#ifdef WINDOWS
+ DWORD distanceToMoveLow = (DWORD) distanceToMove;
+ LONG distanceToMoveHigh = (LONG) (distanceToMove >> 32);
+ DWORD distanceMovedLow = 0;
+ HANDLE hFile = (HANDLE) fd_get(env, fd_object);
+ PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+ distanceMovedLow = SetFilePointer(hFile,
+ distanceToMoveLow, &distanceToMoveHigh, (DWORD) moveMethod);
+
+ if (distanceMovedLow == INVALID_SET_FILE_POINTER) {
+ throw_ioe(env, GetLastError());
+ return -1;
+ }
+
+cleanup:
+
+ return ((jlong) distanceToMoveHigh << 32) | (jlong) distanceMovedLow;
+#endif
+}
+
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method: getLengthFollowSymlink
+ * Signature: (Ljava/lang/String;)J
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_getLengthFollowSymlink
+ (JNIEnv *env, jclass clazz, jstring j_path)
+{
+#ifdef UNIX
+ THROW(env, "java/io/IOException",
+ "The function getLengthFollowSymlink(String) is not supported on Unix");
+ return 0;
+#endif
+
+#ifdef WINDOWS
+ DWORD dwRtnCode = ERROR_SUCCESS;
+ BY_HANDLE_FILE_INFORMATION fileInfo = { 0 };
+ LARGE_INTEGER fileSize = { 0 };
+
+ const wchar_t *path = (const wchar_t*) (*env)->GetStringChars(env, j_path, NULL);
+ if (path == NULL) return 0; // JVM throws Exception for us
+
+ dwRtnCode = GetFileInformationByName(path, TRUE, &fileInfo);
+ if (dwRtnCode != ERROR_SUCCESS) {
+ throw_ioe(env, dwRtnCode);
+ }
+
+ (*env)->ReleaseStringChars(env, j_path, path);
+
+ fileSize.HighPart = fileInfo.nFileSizeHigh;
+ fileSize.LowPart = fileInfo.nFileSizeLow;
+
+ return (jlong)(fileSize.QuadPart);
+#endif
+}
+
/**
* vim: sw=2: ts=2: et:
*/
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c Wed Dec 12 19:15:26 2012
@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
+
#include <jni.h>
#include "file_descriptor.h"
#include "org_apache_hadoop.h"
@@ -26,6 +26,10 @@ static jfieldID fd_descriptor;
// the no-argument constructor
static jmethodID fd_constructor;
+#ifdef WINDOWS
+// the internal field for the long handle
+static jfieldID fd_handle;
+#endif
void fd_init(JNIEnv* env)
{
@@ -37,6 +41,12 @@ void fd_init(JNIEnv* env)
fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
PASS_EXCEPTIONS(env);
+
+#ifdef WINDOWS
+ fd_handle = (*env)->GetFieldID(env, fd_class, "handle", "J");
+ PASS_EXCEPTIONS(env);
+#endif
+
fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
}
@@ -46,9 +56,13 @@ void fd_deinit(JNIEnv *env) {
fd_class = NULL;
}
fd_descriptor = NULL;
+#ifdef WINDOWS
+ fd_handle = NULL;
+#endif
fd_constructor = NULL;
}
+#ifdef UNIX
/*
* Given an instance 'obj' of java.io.FileDescriptor, return the
* underlying fd, or throw if unavailable
@@ -71,4 +85,31 @@ jobject fd_create(JNIEnv *env, int fd) {
(*env)->SetIntField(env, obj, fd_descriptor, fd);
return obj;
-}
+}
+#endif
+
+#ifdef WINDOWS
+/*
+ * Given an instance 'obj' of java.io.FileDescriptor, return the
+ * underlying fd, or throw if unavailable
+ */
+long fd_get(JNIEnv* env, jobject obj) {
+ if (obj == NULL) {
+ THROW(env, "java/lang/NullPointerException",
+ "FileDescriptor object is null");
+ return -1;
+ }
+ return (long) (*env)->GetLongField(env, obj, fd_handle);
+}
+
+/*
+ * Create a FileDescriptor object corresponding to the given int fd
+ */
+jobject fd_create(JNIEnv *env, long fd) {
+ jobject obj = (*env)->NewObject(env, fd_class, fd_constructor);
+ PASS_EXCEPTIONS_RET(env, (jobject) NULL);
+
+ (*env)->SetLongField(env, obj, fd_handle, fd);
+ return obj;
+}
+#endif
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h Wed Dec 12 19:15:26 2012
@@ -18,11 +18,19 @@
#define FILE_DESCRIPTOR_H
#include <jni.h>
+#include "org_apache_hadoop.h"
void fd_init(JNIEnv *env);
void fd_deinit(JNIEnv *env);
+#ifdef UNIX
int fd_get(JNIEnv* env, jobject obj);
jobject fd_create(JNIEnv *env, int fd);
+#endif
+
+#ifdef WINDOWS
+long fd_get(JNIEnv* env, jobject obj);
+jobject fd_create(JNIEnv *env, long fd);
+#endif
#endif
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c Wed Dec 12 19:15:26 2012
@@ -16,18 +16,22 @@
* limitations under the License.
*/
-#include <arpa/inet.h>
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_util_NativeCrc32.h"
+
#include <assert.h>
-#include <inttypes.h>
#include <stdlib.h>
#include <stdint.h>
#include <string.h>
-#include <unistd.h>
+#ifdef UNIX
+#include <inttypes.h>
+#include <arpa/inet.h>
+#include <unistd.h>
#include "config.h"
-#include "org_apache_hadoop.h"
-#include "org_apache_hadoop_util_NativeCrc32.h"
#include "gcc_optimizations.h"
+#endif // UNIX
+
#include "bulk_crc32.h"
static void throw_checksum_exception(JNIEnv *env,
@@ -36,6 +40,9 @@ static void throw_checksum_exception(JNI
char message[1024];
jstring jstr_message;
char *filename;
+ jclass checksum_exception_clazz;
+ jmethodID checksum_exception_ctor;
+ jthrowable obj;
// Get filename as C string, or "null" if not provided
if (j_filename == NULL) {
@@ -50,28 +57,38 @@ static void throw_checksum_exception(JNI
}
// Format error message
+#ifdef WINDOWS
+ _snprintf_s(
+ message,
+ sizeof(message),
+ _TRUNCATE,
+ "Checksum error: %s at %I64d exp: %d got: %d",
+ filename, pos, expected_crc, got_crc);
+#else
snprintf(message, sizeof(message),
"Checksum error: %s at %"PRId64" exp: %"PRId32" got: %"PRId32,
filename, pos, expected_crc, got_crc);
+#endif // WINDOWS
+
if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL) {
goto cleanup;
}
// Throw exception
- jclass checksum_exception_clazz = (*env)->FindClass(
+ checksum_exception_clazz = (*env)->FindClass(
env, "org/apache/hadoop/fs/ChecksumException");
if (checksum_exception_clazz == NULL) {
goto cleanup;
}
- jmethodID checksum_exception_ctor = (*env)->GetMethodID(env,
+ checksum_exception_ctor = (*env)->GetMethodID(env,
checksum_exception_clazz, "<init>",
"(Ljava/lang/String;J)V");
if (checksum_exception_ctor == NULL) {
goto cleanup;
}
- jthrowable obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
+ obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
checksum_exception_ctor, jstr_message, pos);
if (obj == NULL) goto cleanup;
@@ -103,6 +120,14 @@ JNIEXPORT void JNICALL Java_org_apache_h
jobject j_data, jint data_offset, jint data_len,
jstring j_filename, jlong base_pos)
{
+ uint8_t *sums_addr;
+ uint8_t *data_addr;
+ uint32_t *sums;
+ uint8_t *data;
+ int crc_type;
+ crc32_error_t error_data;
+ int ret;
+
if (unlikely(!j_sums || !j_data)) {
THROW(env, "java/lang/NullPointerException",
"input ByteBuffers must not be null");
@@ -110,8 +135,8 @@ JNIEXPORT void JNICALL Java_org_apache_h
}
// Convert direct byte buffers to C pointers
- uint8_t *sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
- uint8_t *data_addr = (*env)->GetDirectBufferAddress(env, j_data);
+ sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
+ data_addr = (*env)->GetDirectBufferAddress(env, j_data);
if (unlikely(!sums_addr || !data_addr)) {
THROW(env, "java/lang/IllegalArgumentException",
@@ -129,16 +154,15 @@ JNIEXPORT void JNICALL Java_org_apache_h
return;
}
- uint32_t *sums = (uint32_t *)(sums_addr + sums_offset);
- uint8_t *data = data_addr + data_offset;
+ sums = (uint32_t *)(sums_addr + sums_offset);
+ data = data_addr + data_offset;
// Convert to correct internal C constant for CRC type
- int crc_type = convert_java_crc_type(env, j_crc_type);
+ crc_type = convert_java_crc_type(env, j_crc_type);
if (crc_type == -1) return; // exception already thrown
// Setup complete. Actually verify checksums.
- crc32_error_t error_data;
- int ret = bulk_verify_crc(data, data_len, sums, crc_type,
+ ret = bulk_verify_crc(data, data_len, sums, crc_type,
bytes_per_checksum, &error_data);
if (likely(ret == CHECKSUMS_VALID)) {
return;
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c Wed Dec 12 19:15:26 2012
@@ -21,25 +21,31 @@
* All rights reserved. Use of this source code is governed by a
* BSD-style license that can be found in the LICENSE file.
*/
+
+#include "org_apache_hadoop.h"
+
#include <assert.h>
-#include <arpa/inet.h>
#include <errno.h>
#include <stdint.h>
+
+#ifdef UNIX
+#include <arpa/inet.h>
#include <unistd.h>
+#endif // UNIX
#include "crc32_zlib_polynomial_tables.h"
#include "crc32c_tables.h"
#include "bulk_crc32.h"
#include "gcc_optimizations.h"
-#ifndef __FreeBSD__
+#if (!defined(__FreeBSD__) && !defined(WINDOWS))
#define USE_PIPELINED
#endif
#define CRC_INITIAL_VAL 0xffffffff
typedef uint32_t (*crc_update_func_t)(uint32_t, const uint8_t *, size_t);
-static inline uint32_t crc_val(uint32_t crc);
+static uint32_t crc_val(uint32_t crc);
static uint32_t crc32_zlib_sb8(uint32_t crc, const uint8_t *buf, size_t length);
static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length);
@@ -187,7 +193,7 @@ return_crc_error:
/**
* Extract the final result of a CRC
*/
-static inline uint32_t crc_val(uint32_t crc) {
+uint32_t crc_val(uint32_t crc) {
return ~crc;
}
@@ -200,11 +206,13 @@ static uint32_t crc32c_sb8(uint32_t crc,
uint32_t end_bytes = length - running_length;
int li;
for (li=0; li < running_length/8; li++) {
+ uint32_t term1;
+ uint32_t term2;
crc ^= *(uint32_t *)buf;
buf += 4;
- uint32_t term1 = CRC32C_T8_7[crc & 0x000000FF] ^
+ term1 = CRC32C_T8_7[crc & 0x000000FF] ^
CRC32C_T8_6[(crc >> 8) & 0x000000FF];
- uint32_t term2 = crc >> 16;
+ term2 = crc >> 16;
crc = term1 ^
CRC32C_T8_5[term2 & 0x000000FF] ^
CRC32C_T8_4[(term2 >> 8) & 0x000000FF];
@@ -234,11 +242,13 @@ static uint32_t crc32_zlib_sb8(
uint32_t end_bytes = length - running_length;
int li;
for (li=0; li < running_length/8; li++) {
+ uint32_t term1;
+ uint32_t term2;
crc ^= *(uint32_t *)buf;
buf += 4;
- uint32_t term1 = CRC32_T8_7[crc & 0x000000FF] ^
+ term1 = CRC32_T8_7[crc & 0x000000FF] ^
CRC32_T8_6[(crc >> 8) & 0x000000FF];
- uint32_t term2 = crc >> 16;
+ term2 = crc >> 16;
crc = term1 ^
CRC32_T8_5[term2 & 0x000000FF] ^
CRC32_T8_4[(term2 >> 8) & 0x000000FF];
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h Wed Dec 12 19:15:26 2012
@@ -19,7 +19,10 @@
#define BULK_CRC32_H_INCLUDED
#include <stdint.h>
+
+#ifdef UNIX
#include <unistd.h> /* for size_t */
+#endif // UNIX
// Constants for different CRC algorithms
#define CRC32C_POLYNOMIAL 1
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h Wed Dec 12 19:15:26 2012
@@ -17,19 +17,22 @@
*/
/**
- * This file includes some common utilities
+ * This file includes some common utilities
* for all native code used in hadoop.
*/
-
+
#if !defined ORG_APACHE_HADOOP_H
#define ORG_APACHE_HADOOP_H
-#include <dlfcn.h>
-#include <jni.h>
-
-#include "config.h"
+#if defined(_WIN32)
+#undef UNIX
+#define WINDOWS
+#else
+#undef WINDOWS
+#define UNIX
+#endif
-/* A helper macro to 'throw' a java exception. */
+/* A helper macro to 'throw' a java exception. */
#define THROW(env, exception_name, message) \
{ \
jclass ecls = (*env)->FindClass(env, exception_name); \
@@ -55,13 +58,21 @@
if ((*env)->ExceptionCheck(env)) return (ret); \
}
-/**
- * A helper function to dlsym a 'symbol' from a given library-handle.
- *
+/**
+ * Unix definitions
+ */
+#ifdef UNIX
+#include <config.h>
+#include <dlfcn.h>
+#include <jni.h>
+
+/**
+ * A helper function to dlsym a 'symbol' from a given library-handle.
+ *
* @param env jni handle to report contingencies.
* @param handle handle to the dlopen'ed library.
* @param symbol symbol to load.
- * @return returns the address where the symbol is loaded in memory,
+ * @return returns the address where the symbol is loaded in memory,
* <code>NULL</code> on error.
*/
static __attribute__ ((unused))
@@ -84,6 +95,76 @@ void *do_dlsym(JNIEnv *env, void *handle
if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
return; \
}
+#endif
+// Unix part end
+
+
+/**
+ * Windows definitions
+ */
+#ifdef WINDOWS
+
+/* Force using Unicode throughout the code */
+#ifndef UNICODE
+#define UNICODE
+#endif
+
+/* Microsoft C Compiler does not support the C99 inline keyword */
+#ifndef __cplusplus
+#define inline __inline;
+#endif // _cplusplus
+
+/* Optimization macros supported by GCC but for which there is no
+ direct equivalent in the Microsoft C compiler */
+#define likely(_c) (_c)
+#define unlikely(_c) (_c)
+
+/* Disable certain warnings in the native CRC32 code. */
+#pragma warning(disable:4018) // Signed/unsigned mismatch.
+#pragma warning(disable:4244) // Possible loss of data in conversion.
+#pragma warning(disable:4267) // Possible loss of data.
+#pragma warning(disable:4996) // Use of deprecated function.
+
+#include <Windows.h>
+#include <stdio.h>
+#include <jni.h>
+
+#define snprintf(a, b ,c, d) _snprintf_s((a), (b), _TRUNCATE, (c), (d))
+
+/* A helper macro to dlsym the requisite dynamic symbol and bail-out on error. */
+#define LOAD_DYNAMIC_SYMBOL(func_type, func_ptr, env, handle, symbol) \
+ if ((func_ptr = (func_type) do_dlsym(env, handle, symbol)) == NULL) { \
+ return; \
+ }
+
+/**
+ * A helper function to dynamic load a 'symbol' from a given library-handle.
+ *
+ * @param env jni handle to report contingencies.
+ * @param handle handle to the dynamic library.
+ * @param symbol symbol to load.
+ * @return returns the address where the symbol is loaded in memory,
+ * <code>NULL</code> on error.
+ */
+static FARPROC WINAPI do_dlsym(JNIEnv *env, HMODULE handle, LPCSTR symbol) {
+ DWORD dwErrorCode = ERROR_SUCCESS;
+ FARPROC func_ptr = NULL;
+
+ if (!env || !handle || !symbol) {
+ THROW(env, "java/lang/InternalError", NULL);
+ return NULL;
+ }
+
+ func_ptr = GetProcAddress(handle, symbol);
+ if (func_ptr == NULL)
+ {
+ THROW(env, "java/lang/UnsatisfiedLinkError", symbol);
+ }
+ return func_ptr;
+}
+#endif
+// Windows part end
+
#define LOCK_CLASS(env, clazz, classname) \
if ((*env)->MonitorEnter(env, clazz) != 0) { \
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c Wed Dec 12 19:15:26 2012
@@ -16,6 +16,8 @@
* limitations under the License.
*/
+#include "org_apache_hadoop.h"
+
#include "bulk_crc32.h"
#include <stdint.h>
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.c Wed Dec 12 19:15:26 2012
@@ -145,7 +145,7 @@ static BOOL IsPrefixedAlready(__in PCWST
{
static const PCWSTR LongPathPrefix = L"\\\\?\\";
int Prefixlen = (int)wcslen(LongPathPrefix);
- int i = 0;
+ size_t i = 0;
if (path == NULL || wcslen(path) < Prefixlen)
{
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.vcxproj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.vcxproj?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.vcxproj (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/libwinutils.vcxproj Wed Dec 12 19:15:26 2012
@@ -70,12 +70,16 @@
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<LinkIncremental>true</LinkIncremental>
+ <OutDir />
+ <IntDir>..\..\..\target\winutils\$(Configuration)\</IntDir>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<LinkIncremental>false</LinkIncremental>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<LinkIncremental>false</LinkIncremental>
+ <OutDir>..\..\..\target\bin\</OutDir>
+ <IntDir>..\..\..\target\winutils\$(Platform)\$(Configuration)\</IntDir>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
@@ -146,4 +150,4 @@
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
<ImportGroup Label="ExtensionTargets">
</ImportGroup>
-</Project>
+</Project>
\ No newline at end of file
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/winutils.sln Wed Dec 12 19:15:26 2012
@@ -1,39 +1,39 @@
-
-Microsoft Visual Studio Solution File, Format Version 11.00
-# Visual Studio 2010
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winutils", "winutils.vcxproj", "{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}"
- ProjectSection(ProjectDependencies) = postProject
- {12131AA7-902E-4A6D-9CE3-043261D22A12} = {12131AA7-902E-4A6D-9CE3-043261D22A12}
- EndProjectSection
-EndProject
-Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libwinutils", "libwinutils.vcxproj", "{12131AA7-902E-4A6D-9CE3-043261D22A12}"
-EndProject
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- Debug|Win32 = Debug|Win32
- Debug|x64 = Debug|x64
- Release|Win32 = Release|Win32
- Release|x64 = Release|x64
- EndGlobalSection
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
- {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.ActiveCfg = Debug|x64
- {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.Build.0 = Debug|x64
- {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.ActiveCfg = Debug|x64
- {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.Build.0 = Debug|x64
- {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.ActiveCfg = Release|Win32
- {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.Build.0 = Release|Win32
- {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.ActiveCfg = Release|x64
- {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.Build.0 = Release|x64
- {12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.ActiveCfg = Debug|x64
- {12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.Build.0 = Debug|x64
- {12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.ActiveCfg = Debug|x64
- {12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.Build.0 = Debug|x64
- {12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.ActiveCfg = Release|Win32
- {12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.Build.0 = Release|Win32
- {12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.ActiveCfg = Release|x64
- {12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.Build.0 = Release|x64
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
-EndGlobal
+
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Visual Studio 2010
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "winutils", "winutils.vcxproj", "{D94B3BD7-39CC-47A0-AE9A-353FDE506F33}"
+ ProjectSection(ProjectDependencies) = postProject
+ {12131AA7-902E-4A6D-9CE3-043261D22A12} = {12131AA7-902E-4A6D-9CE3-043261D22A12}
+ EndProjectSection
+EndProject
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "libwinutils", "libwinutils.vcxproj", "{12131AA7-902E-4A6D-9CE3-043261D22A12}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Win32 = Debug|Win32
+ Debug|x64 = Debug|x64
+ Release|Win32 = Release|Win32
+ Release|x64 = Release|x64
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.ActiveCfg = Debug|x64
+ {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|Win32.Build.0 = Debug|x64
+ {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.ActiveCfg = Debug|x64
+ {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Debug|x64.Build.0 = Debug|x64
+ {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.ActiveCfg = Release|Win32
+ {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|Win32.Build.0 = Release|Win32
+ {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.ActiveCfg = Release|x64
+ {D94B3BD7-39CC-47A0-AE9A-353FDE506F33}.Release|x64.Build.0 = Release|x64
+ {12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.ActiveCfg = Debug|x64
+ {12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|Win32.Build.0 = Debug|x64
+ {12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.ActiveCfg = Debug|x64
+ {12131AA7-902E-4A6D-9CE3-043261D22A12}.Debug|x64.Build.0 = Debug|x64
+ {12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.ActiveCfg = Release|Win32
+ {12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|Win32.Build.0 = Release|Win32
+ {12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.ActiveCfg = Release|x64
+ {12131AA7-902E-4A6D-9CE3-043261D22A12}.Release|x64.Build.0 = Release|x64
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/main/winutils/winutils.vcxproj Wed Dec 12 19:15:26 2012
@@ -70,12 +70,16 @@
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<LinkIncremental>true</LinkIncremental>
+ <OutDir />
+ <IntDir>..\..\..\target\winutils\$(Configuration)\</IntDir>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
<LinkIncremental>false</LinkIncremental>
</PropertyGroup>
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<LinkIncremental>false</LinkIncremental>
+ <IntDir>..\..\..\target\winutils\$(Platform)\$(Configuration)\</IntDir>
+ <OutDir>..\..\..\target\bin\</OutDir>
</PropertyGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
<ClCompile>
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java Wed Dec 12 19:15:26 2012
@@ -29,6 +29,8 @@ import org.apache.hadoop.util.Shell;
import junit.framework.TestCase;
+import static org.junit.Assert.fail;
+
public class TestPath extends TestCase {
public void testToString() {
toStringTest("/");
@@ -165,6 +167,37 @@ public class TestPath extends TestCase {
assertEquals(new Path("foo/bar/baz","../../../../..").toString(), "../..");
}
+ /** Test that Windows paths are correctly handled */
+ public void testWindowsPaths() throws URISyntaxException, IOException {
+ if (!Path.WINDOWS) {
+ return;
+ }
+
+ assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar");
+ assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar");
+ assertEquals(new Path("/c:/foo/bar").toString(), "c:/foo/bar");
+ assertEquals(new Path("file://c:/foo/bar").toString(), "file://c:/foo/bar");
+ }
+
+ /** Test invalid paths on Windows are correctly rejected */
+ public void testInvalidWindowsPaths() throws URISyntaxException, IOException {
+ if (!Path.WINDOWS) {
+ return;
+ }
+
+ String [] invalidPaths = {
+ "hdfs:\\\\\\tmp"
+ };
+
+ for (String path : invalidPaths) {
+ try {
+ Path item = new Path(path);
+ fail("Did not throw for invalid path " + path);
+ } catch (IllegalArgumentException iae) {
+ }
+ }
+ }
+
/** Test Path objects created from other Path objects */
public void testChildParentResolution() throws URISyntaxException, IOException {
Path parent = new Path("foo1://bar1/baz1");
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java Wed Dec 12 19:15:26 2012
@@ -19,8 +19,10 @@ package org.apache.hadoop.fs.shell;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
import java.io.File;
+import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
@@ -122,18 +124,46 @@ public class TestPathData {
}
// Can we handle raw Windows paths? The files need not exist for
- // the tests to succeed.
+ // these tests to succeed.
String[] winPaths = {
"n:\\",
"N:\\",
"N:\\foo",
- "N:\\foo\\bar"
+ "N:\\foo\\bar",
+ "N:/",
+ "N:/foo",
+ "N:/foo/bar"
};
+ PathData item;
+
for (String path : winPaths) {
- PathData item = new PathData(path, conf);
+ item = new PathData(path, conf);
assertEquals(new File(path), item.toFile());
}
+
+ item = new PathData("foo\\bar", conf);
+ assertEquals(new File(testDir + "\\foo\\bar"), item.toFile());
+ }
+
+ @Test
+ public void testInvalidWindowsPath() throws Exception {
+ if (!Path.WINDOWS) {
+ return;
+ }
+
+ // Verify that the following invalid paths are rejected.
+ String [] winPaths = {
+ "N:\\foo/bar"
+ };
+
+ for (String path : winPaths) {
+ try {
+ PathData item = new PathData(path, conf);
+ fail("Did not throw for invalid path " + path);
+ } catch (IOException ioe) {
+ }
+ }
}
@Test
Modified: hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1420921&r1=1420920&r2=1420921&view=diff
==============================================================================
--- hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java (original)
+++ hadoop/common/branches/branch-trunk-win/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java Wed Dec 12 19:15:26 2012
@@ -21,6 +21,8 @@ import java.io.File;
import java.io.FileDescriptor;
import java.io.FileInputStream;
import java.io.FileOutputStream;
+import java.io.FileReader;
+import java.io.FileWriter;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicReference;
import java.util.ArrayList;
@@ -59,9 +61,13 @@ public class TestNativeIO {
@Test
public void testFstat() throws Exception {
+ if (Path.WINDOWS) {
+ return;
+ }
+
FileOutputStream fos = new FileOutputStream(
new File(TEST_DIR, "testfstat"));
- NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
+ NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
fos.close();
LOG.info("Stat: " + String.valueOf(stat));
@@ -69,7 +75,8 @@ public class TestNativeIO {
assertNotNull(stat.getGroup());
assertTrue(!stat.getGroup().isEmpty());
assertEquals("Stat mode field should indicate a regular file",
- NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
+ NativeIO.POSIX.Stat.S_IFREG,
+ stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
}
/**
@@ -80,6 +87,10 @@ public class TestNativeIO {
*/
@Test
public void testMultiThreadedFstat() throws Exception {
+ if (Path.WINDOWS) {
+ return;
+ }
+
final FileOutputStream fos = new FileOutputStream(
new File(TEST_DIR, "testfstat"));
@@ -93,12 +104,13 @@ public class TestNativeIO {
long et = Time.now() + 5000;
while (Time.now() < et) {
try {
- NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
+ NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
assertEquals(System.getProperty("user.name"), stat.getOwner());
assertNotNull(stat.getGroup());
assertTrue(!stat.getGroup().isEmpty());
assertEquals("Stat mode field should indicate a regular file",
- NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
+ NativeIO.POSIX.Stat.S_IFREG,
+ stat.getMode() & NativeIO.POSIX.Stat.S_IFMT);
} catch (Throwable t) {
thrown.set(t);
}
@@ -121,11 +133,15 @@ public class TestNativeIO {
@Test
public void testFstatClosedFd() throws Exception {
+ if (Path.WINDOWS) {
+ return;
+ }
+
FileOutputStream fos = new FileOutputStream(
new File(TEST_DIR, "testfstat2"));
fos.close();
try {
- NativeIO.Stat stat = NativeIO.getFstat(fos.getFD());
+ NativeIO.POSIX.Stat stat = NativeIO.POSIX.getFstat(fos.getFD());
} catch (NativeIOException nioe) {
LOG.info("Got expected exception", nioe);
assertEquals(Errno.EBADF, nioe.getErrno());
@@ -133,12 +149,105 @@ public class TestNativeIO {
}
@Test
+ public void testSetFilePointer() throws Exception {
+ if (!Path.WINDOWS) {
+ return;
+ }
+
+ LOG.info("Set a file pointer on Windows");
+ try {
+ File testfile = new File(TEST_DIR, "testSetFilePointer");
+ assertTrue("Create test subject",
+ testfile.exists() || testfile.createNewFile());
+ FileWriter writer = new FileWriter(testfile);
+ try {
+ for (int i = 0; i < 200; i++)
+ if (i < 100)
+ writer.write('a');
+ else
+ writer.write('b');
+ writer.flush();
+ } catch (Exception writerException) {
+ fail("Got unexpected exception: " + writerException.getMessage());
+ } finally {
+ writer.close();
+ }
+
+ FileDescriptor fd = NativeIO.Windows.createFile(
+ testfile.getCanonicalPath(),
+ NativeIO.Windows.GENERIC_READ,
+ NativeIO.Windows.FILE_SHARE_READ |
+ NativeIO.Windows.FILE_SHARE_WRITE |
+ NativeIO.Windows.FILE_SHARE_DELETE,
+ NativeIO.Windows.OPEN_EXISTING);
+ NativeIO.Windows.setFilePointer(fd, 120, NativeIO.Windows.FILE_BEGIN);
+ FileReader reader = new FileReader(fd);
+ try {
+ int c = reader.read();
+ assertTrue("Unexpected character: " + c, c == 'b');
+ } catch (Exception readerException) {
+ fail("Got unexpected exception: " + readerException.getMessage());
+ } finally {
+ reader.close();
+ }
+ } catch (Exception e) {
+ fail("Got unexpected exception: " + e.getMessage());
+ }
+ }
+
+ @Test
+ public void testCreateFile() throws Exception {
+ if (!Path.WINDOWS) {
+ return;
+ }
+
+ LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
+ try {
+ File testfile = new File(TEST_DIR, "testCreateFile");
+ assertTrue("Create test subject",
+ testfile.exists() || testfile.createNewFile());
+
+ FileDescriptor fd = NativeIO.Windows.createFile(
+ testfile.getCanonicalPath(),
+ NativeIO.Windows.GENERIC_READ,
+ NativeIO.Windows.FILE_SHARE_READ |
+ NativeIO.Windows.FILE_SHARE_WRITE |
+ NativeIO.Windows.FILE_SHARE_DELETE,
+ NativeIO.Windows.OPEN_EXISTING);
+
+ FileInputStream fin = new FileInputStream(fd);
+ try {
+ fin.read();
+
+ File newfile = new File(TEST_DIR, "testRenamedFile");
+
+ boolean renamed = testfile.renameTo(newfile);
+ assertTrue("Rename failed.", renamed);
+
+ fin.read();
+ } catch (Exception e) {
+ fail("Got unexpected exception: " + e.getMessage());
+ }
+ finally {
+ fin.close();
+ }
+ } catch (Exception e) {
+ fail("Got unexpected exception: " + e.getMessage());
+ }
+
+ }
+
+ @Test
public void testOpenMissingWithoutCreate() throws Exception {
+ if (Path.WINDOWS) {
+ return;
+ }
+
LOG.info("Open a missing file without O_CREAT and it should fail");
try {
- FileDescriptor fd = NativeIO.open(
+ FileDescriptor fd = NativeIO.POSIX.open(
new File(TEST_DIR, "doesntexist").getAbsolutePath(),
- NativeIO.O_WRONLY, 0700);
+ NativeIO.POSIX.O_WRONLY, 0700);
fail("Able to open a new file without O_CREAT");
} catch (NativeIOException nioe) {
LOG.info("Got expected exception", nioe);
@@ -148,10 +257,14 @@ public class TestNativeIO {
@Test
public void testOpenWithCreate() throws Exception {
+ if (Path.WINDOWS) {
+ return;
+ }
+
LOG.info("Test creating a file with O_CREAT");
- FileDescriptor fd = NativeIO.open(
+ FileDescriptor fd = NativeIO.POSIX.open(
new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
- NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+ NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
assertNotNull(true);
assertTrue(fd.valid());
FileOutputStream fos = new FileOutputStream(fd);
@@ -162,9 +275,9 @@ public class TestNativeIO {
LOG.info("Test exclusive create");
try {
- fd = NativeIO.open(
+ fd = NativeIO.POSIX.open(
new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
- NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL, 0700);
+ NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT | NativeIO.POSIX.O_EXCL, 0700);
fail("Was able to create existing file with O_EXCL");
} catch (NativeIOException nioe) {
LOG.info("Got expected exception for failed exclusive create", nioe);
@@ -178,10 +291,14 @@ public class TestNativeIO {
*/
@Test
public void testFDDoesntLeak() throws IOException {
+ if (Path.WINDOWS) {
+ return;
+ }
+
for (int i = 0; i < 10000; i++) {
- FileDescriptor fd = NativeIO.open(
+ FileDescriptor fd = NativeIO.POSIX.open(
new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
- NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+ NativeIO.POSIX.O_WRONLY | NativeIO.POSIX.O_CREAT, 0700);
assertNotNull(true);
assertTrue(fd.valid());
FileOutputStream fos = new FileOutputStream(fd);
@@ -195,8 +312,12 @@ public class TestNativeIO {
*/
@Test
public void testChmod() throws Exception {
+ if (Path.WINDOWS) {
+ return;
+ }
+
try {
- NativeIO.chmod("/this/file/doesnt/exist", 777);
+ NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777);
fail("Chmod of non-existent file didn't fail");
} catch (NativeIOException nioe) {
assertEquals(Errno.ENOENT, nioe.getErrno());
@@ -205,21 +326,26 @@ public class TestNativeIO {
File toChmod = new File(TEST_DIR, "testChmod");
assertTrue("Create test subject",
toChmod.exists() || toChmod.mkdir());
- NativeIO.chmod(toChmod.getAbsolutePath(), 0777);
+ NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0777);
assertPermissions(toChmod, 0777);
- NativeIO.chmod(toChmod.getAbsolutePath(), 0000);
+ NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0000);
assertPermissions(toChmod, 0000);
- NativeIO.chmod(toChmod.getAbsolutePath(), 0644);
+ NativeIO.POSIX.chmod(toChmod.getAbsolutePath(), 0644);
assertPermissions(toChmod, 0644);
}
@Test
public void testPosixFadvise() throws Exception {
+ if (Path.WINDOWS) {
+ return;
+ }
+
FileInputStream fis = new FileInputStream("/dev/zero");
try {
- NativeIO.posix_fadvise(fis.getFD(), 0, 0,
- NativeIO.POSIX_FADV_SEQUENTIAL);
+ NativeIO.POSIX.posix_fadvise(
+ fis.getFD(), 0, 0,
+ NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
} catch (UnsupportedOperationException uoe) {
// we should just skip the unit test on machines where we don't
// have fadvise support
@@ -232,8 +358,9 @@ public class TestNativeIO {
}
try {
- NativeIO.posix_fadvise(fis.getFD(), 0, 1024,
- NativeIO.POSIX_FADV_SEQUENTIAL);
+ NativeIO.POSIX.posix_fadvise(
+ fis.getFD(), 0, 1024,
+ NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
fail("Did not throw on bad file");
} catch (NativeIOException nioe) {
@@ -241,8 +368,9 @@ public class TestNativeIO {
}
try {
- NativeIO.posix_fadvise(null, 0, 1024,
- NativeIO.POSIX_FADV_SEQUENTIAL);
+ NativeIO.POSIX.posix_fadvise(
+ null, 0, 1024,
+ NativeIO.POSIX.POSIX_FADV_SEQUENTIAL);
fail("Did not throw on null file");
} catch (NullPointerException npe) {
@@ -256,8 +384,9 @@ public class TestNativeIO {
new File(TEST_DIR, "testSyncFileRange"));
try {
fos.write("foo".getBytes());
- NativeIO.sync_file_range(fos.getFD(), 0, 1024,
- NativeIO.SYNC_FILE_RANGE_WRITE);
+ NativeIO.POSIX.sync_file_range(
+ fos.getFD(), 0, 1024,
+ NativeIO.POSIX.SYNC_FILE_RANGE_WRITE);
// no way to verify that this actually has synced,
// but if it doesn't throw, we can assume it worked
} catch (UnsupportedOperationException uoe) {
@@ -268,8 +397,9 @@ public class TestNativeIO {
fos.close();
}
try {
- NativeIO.sync_file_range(fos.getFD(), 0, 1024,
- NativeIO.SYNC_FILE_RANGE_WRITE);
+ NativeIO.POSIX.sync_file_range(
+ fos.getFD(), 0, 1024,
+ NativeIO.POSIX.SYNC_FILE_RANGE_WRITE);
fail("Did not throw on bad file");
} catch (NativeIOException nioe) {
assertEquals(Errno.EBADF, nioe.getErrno());
@@ -285,12 +415,20 @@ public class TestNativeIO {
@Test
public void testGetUserName() throws IOException {
- assertFalse(NativeIO.getUserName(0).isEmpty());
+ if (Path.WINDOWS) {
+ return;
+ }
+
+ assertFalse(NativeIO.POSIX.getUserName(0).isEmpty());
}
@Test
public void testGetGroupName() throws IOException {
- assertFalse(NativeIO.getGroupName(0).isEmpty());
+ if (Path.WINDOWS) {
+ return;
+ }
+
+ assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty());
}
}