You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/08 05:39:50 UTC
svn commit: r1079148 [3/3] - in /hadoop/common/branches/yahoo-merge: ./
src/java/org/apache/hadoop/io/ src/java/org/apache/hadoop/io/nativeio/
src/native/ src/native/lib/ src/native/src/
src/native/src/org/apache/hadoop/io/nativeio/ src/test/core/org/a...
Modified: hadoop/common/branches/yahoo-merge/src/native/configure.ac
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/native/configure.ac?rev=1079148&r1=1079147&r2=1079148&view=diff
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/native/configure.ac (original)
+++ hadoop/common/branches/yahoo-merge/src/native/configure.ac Tue Mar 8 04:39:49 2011
@@ -38,6 +38,7 @@ AC_INIT(src/org_apache_hadoop.h)
AC_CONFIG_SRCDIR([src/org_apache_hadoop.h])
AC_CONFIG_AUX_DIR(config)
AC_CONFIG_HEADER([config.h])
+AC_SYS_LARGEFILE
AM_INIT_AUTOMAKE(hadoop,1.0.0)
@@ -95,6 +96,9 @@ AC_C_CONST
# Checks for library functions.
AC_CHECK_FUNCS([memset])
+# Check for nonstandard STRERROR_R
+AC_FUNC_STRERROR_R
+
AC_CONFIG_FILES([Makefile])
AC_OUTPUT
Modified: hadoop/common/branches/yahoo-merge/src/native/lib/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/native/lib/Makefile.am?rev=1079148&r1=1079147&r2=1079148&view=diff
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/native/lib/Makefile.am (original)
+++ hadoop/common/branches/yahoo-merge/src/native/lib/Makefile.am Tue Mar 8 04:39:49 2011
@@ -36,7 +36,7 @@ AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_
lib_LTLIBRARIES = libhadoop.la
libhadoop_la_SOURCES =
-libhadoop_la_LDFLAGS = -version-info 1:0:0
+libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
libhadoop_la_LIBADD = $(HADOOP_OBJS) -ldl -ljvm
#
Added: hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1079148&view=auto
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (added)
+++ hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Tue Mar 8 04:39:49 2011
@@ -0,0 +1,277 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+// get the autoconf settings
+#include "config.h"
+
+#include <assert.h>
+#include <errno.h>
+#include <fcntl.h>
+#include <grp.h>
+#include <jni.h>
+#include <pwd.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/stat.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_io_nativeio_NativeIO.h"
+#include "file_descriptor.h"
+#include "errno_enum.h"
+
+// the NativeIO$Stat inner class and its constructor
+static jclass stat_clazz;
+static jmethodID stat_ctor;
+
+// the NativeIOException class and its constructor
+static jclass nioe_clazz;
+static jmethodID nioe_ctor;
+
+// Internal functions
+static void throw_ioe(JNIEnv* env, int errnum);
+static ssize_t get_pw_buflen();
+
+
+static void stat_init(JNIEnv *env) {
+ // Init Stat
+ jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
+ PASS_EXCEPTIONS(env);
+ stat_clazz = (*env)->NewGlobalRef(env, clazz);
+ stat_ctor = (*env)->GetMethodID(env, stat_clazz, "<init>",
+ "(Ljava/lang/String;Ljava/lang/String;I)V");
+}
+
+static void stat_deinit(JNIEnv *env) {
+ if (stat_clazz != NULL) {
+ (*env)->DeleteGlobalRef(env, stat_clazz);
+ stat_clazz = NULL;
+ }
+}
+
+static void nioe_init(JNIEnv *env) {
+ // Init NativeIOException
+ nioe_clazz = (*env)->FindClass(
+ env, "org/apache/hadoop/io/nativeio/NativeIOException");
+ PASS_EXCEPTIONS(env);
+
+ nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
+ nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
+ "(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
+}
+
+static void nioe_deinit(JNIEnv *env) {
+ if (nioe_clazz != NULL) {
+ (*env)->DeleteGlobalRef(env, nioe_clazz);
+ nioe_clazz = NULL;
+ }
+ nioe_ctor = NULL;
+}
+
+/*
+ * private static native void initNative();
+ *
+ * We rely on this function rather than lazy initialization because
+ * the lazy approach may have a race if multiple callers try to
+ * init at the same time.
+ */
+JNIEXPORT void JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
+ JNIEnv *env, jclass clazz) {
+
+ stat_init(env);
+ PASS_EXCEPTIONS_GOTO(env, error);
+ nioe_init(env);
+ PASS_EXCEPTIONS_GOTO(env, error);
+ fd_init(env);
+ PASS_EXCEPTIONS_GOTO(env, error);
+ errno_enum_init(env);
+ PASS_EXCEPTIONS_GOTO(env, error);
+ return;
+error:
+ // these are all idempodent and safe to call even if the
+ // class wasn't initted yet
+ stat_deinit(env);
+ nioe_deinit(env);
+ fd_deinit(env);
+ errno_enum_deinit(env);
+}
+
+/*
+ * public static native Stat fstat(FileDescriptor fd);
+ */
+JNIEXPORT jobject JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
+ JNIEnv *env, jclass clazz, jobject fd_object)
+{
+ jobject ret = NULL;
+ char *pw_buf = NULL;
+
+ int fd = fd_get(env, fd_object);
+ PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+ struct stat s;
+ int rc = fstat(fd, &s);
+ if (rc != 0) {
+ throw_ioe(env, errno);
+ goto cleanup;
+ }
+
+ size_t pw_buflen = get_pw_buflen();
+ if ((pw_buf = malloc(pw_buflen)) == NULL) {
+ THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+ goto cleanup;
+ }
+
+ // Grab username
+ struct passwd pwd, *pwdp;
+ while ((rc = getpwuid_r(s.st_uid, &pwd, pw_buf, pw_buflen, &pwdp)) != 0) {
+ if (rc != ERANGE) {
+ throw_ioe(env, rc);
+ goto cleanup;
+ }
+ free(pw_buf);
+ pw_buflen *= 2;
+ if ((pw_buf = malloc(pw_buflen)) == NULL) {
+ THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+ goto cleanup;
+ }
+ }
+ assert(pwdp == &pwd);
+
+ jstring jstr_username = (*env)->NewStringUTF(env, pwd.pw_name);
+ if (jstr_username == NULL) goto cleanup;
+
+ // Grab group
+ struct group grp, *grpp;
+ while ((rc = getgrgid_r(s.st_gid, &grp, pw_buf, pw_buflen, &grpp)) != 0) {
+ if (rc != ERANGE) {
+ throw_ioe(env, rc);
+ goto cleanup;
+ }
+ free(pw_buf);
+ pw_buflen *= 2;
+ if ((pw_buf = malloc(pw_buflen)) == NULL) {
+ THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for pw buffer");
+ goto cleanup;
+ }
+ }
+ assert(grpp == &grp);
+
+ jstring jstr_groupname = (*env)->NewStringUTF(env, grp.gr_name);
+ PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+ // Construct result
+ ret = (*env)->NewObject(env, stat_clazz, stat_ctor,
+ jstr_username, jstr_groupname, s.st_mode);
+
+cleanup:
+ if (pw_buf != NULL) free(pw_buf);
+ return ret;
+}
+
+
+/*
+ * public static native FileDescriptor open(String path, int flags, int mode);
+ */
+JNIEXPORT jobject JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_open(
+ JNIEnv *env, jclass clazz, jstring j_path,
+ jint flags, jint mode)
+{
+ jobject ret = NULL;
+
+ const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
+ if (path == NULL) goto cleanup; // JVM throws Exception for us
+
+ int fd;
+ if (flags & O_CREAT) {
+ fd = open(path, flags, mode);
+ } else {
+ fd = open(path, flags);
+ }
+
+ if (fd == -1) {
+ throw_ioe(env, errno);
+ goto cleanup;
+ }
+
+ ret = fd_create(env, fd);
+
+cleanup:
+ if (path != NULL) {
+ (*env)->ReleaseStringUTFChars(env, j_path, path);
+ }
+ return ret;
+}
+
+/*
+ * Throw a java.IO.IOException, generating the message from errno.
+ */
+static void throw_ioe(JNIEnv* env, int errnum)
+{
+ const char* message;
+ char buffer[80];
+ jstring jstr_message;
+
+ buffer[0] = 0;
+#ifdef STRERROR_R_CHAR_P
+ // GNU strerror_r
+ message = strerror_r(errnum, buffer, sizeof(buffer));
+ assert (message != NULL);
+#else
+ int ret = strerror_r(errnum, buffer, sizeof(buffer));
+ if (ret == 0) {
+ message = buffer;
+ } else {
+ message = "Unknown error";
+ }
+#endif
+ jobject errno_obj = errno_to_enum(env, errnum);
+
+ if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL)
+ goto err;
+
+ jthrowable obj = (jthrowable)(*env)->NewObject(env, nioe_clazz, nioe_ctor,
+ jstr_message, errno_obj);
+ if (obj == NULL) goto err;
+
+ (*env)->Throw(env, obj);
+ return;
+
+err:
+ if (jstr_message != NULL)
+ (*env)->ReleaseStringUTFChars(env, jstr_message, message);
+}
+
+
+/*
+ * Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
+ */
+ssize_t get_pw_buflen() {
+ size_t ret = 0;
+ #ifdef _SC_GETPW_R_SIZE_MAX
+ ret = sysconf(_SC_GETPW_R_SIZE_MAX);
+ #endif
+ return (ret > 512) ? ret : 512;
+}
+/**
+ * vim: sw=2: ts=2: et:
+ */
+
Added: hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c?rev=1079148&view=auto
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c (added)
+++ hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.c Tue Mar 8 04:39:49 2011
@@ -0,0 +1,119 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+ #include <assert.h>
+ #include <errno.h>
+ #include <jni.h>
+
+#include "org_apache_hadoop.h"
+
+typedef struct errno_mapping {
+ int errno_val;
+ char *errno_str;
+} errno_mapping_t;
+
+// Macro to define structs like {FOO, "FOO"} for each errno value
+#define MAPPING(x) {x, #x}
+static errno_mapping_t ERRNO_MAPPINGS[] = {
+ MAPPING(EPERM),
+ MAPPING(ENOENT),
+ MAPPING(ESRCH),
+ MAPPING(EINTR),
+ MAPPING(EIO),
+ MAPPING(ENXIO),
+ MAPPING(E2BIG),
+ MAPPING(ENOEXEC),
+ MAPPING(EBADF),
+ MAPPING(ECHILD),
+ MAPPING(EAGAIN),
+ MAPPING(ENOMEM),
+ MAPPING(EACCES),
+ MAPPING(EFAULT),
+ MAPPING(ENOTBLK),
+ MAPPING(EBUSY),
+ MAPPING(EEXIST),
+ MAPPING(EXDEV),
+ MAPPING(ENODEV),
+ MAPPING(ENOTDIR),
+ MAPPING(EISDIR),
+ MAPPING(EINVAL),
+ MAPPING(ENFILE),
+ MAPPING(EMFILE),
+ MAPPING(ENOTTY),
+ MAPPING(ETXTBSY),
+ MAPPING(EFBIG),
+ MAPPING(ENOSPC),
+ MAPPING(ESPIPE),
+ MAPPING(EROFS),
+ MAPPING(EMLINK),
+ MAPPING(EPIPE),
+ MAPPING(EDOM),
+ MAPPING(ERANGE),
+ {-1, NULL}
+};
+
+static jclass enum_class;
+static jmethodID enum_valueOf;
+static jclass errno_class;
+
+void errno_enum_init(JNIEnv *env) {
+ if (enum_class != NULL) return;
+
+ enum_class = (*env)->FindClass(env, "java/lang/Enum");
+ PASS_EXCEPTIONS(env);
+ enum_class = (*env)->NewGlobalRef(env, enum_class);
+ enum_valueOf = (*env)->GetStaticMethodID(env, enum_class,
+ "valueOf", "(Ljava/lang/Class;Ljava/lang/String;)Ljava/lang/Enum;");
+ PASS_EXCEPTIONS(env);
+
+ errno_class = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/Errno");
+ PASS_EXCEPTIONS(env);
+ errno_class = (*env)->NewGlobalRef(env, errno_class);
+}
+
+void errno_enum_deinit(JNIEnv *env) {
+ if (enum_class != NULL) {
+ (*env)->DeleteGlobalRef(env, enum_class);
+ enum_class = NULL;
+ }
+ if (errno_class != NULL) {
+ (*env)->DeleteGlobalRef(env, errno_class);
+ errno_class = NULL;
+ }
+ enum_valueOf = NULL;
+}
+
+
+static char *errno_to_string(int errnum) {
+ int i;
+ for (i = 0; ERRNO_MAPPINGS[i].errno_str != NULL; i++) {
+ if (ERRNO_MAPPINGS[i].errno_val == errnum)
+ return ERRNO_MAPPINGS[i].errno_str;
+ }
+ return "UNKNOWN";
+}
+
+jobject errno_to_enum(JNIEnv *env, int errnum) {
+ char *str = errno_to_string(errnum);
+ assert(str != NULL);
+
+ jstring jstr = (*env)->NewStringUTF(env, str);
+ PASS_EXCEPTIONS_RET(env, NULL);
+
+ return (*env)->CallStaticObjectMethod(
+ env, enum_class, enum_valueOf, errno_class, jstr);
+}
Added: hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h?rev=1079148&view=auto
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h (added)
+++ hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/errno_enum.h Tue Mar 8 04:39:49 2011
@@ -0,0 +1,27 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef ERRNO_ENUM_H
+#define ERRNO_ENUM_H
+
+#include <jni.h>
+
+void errno_enum_init(JNIEnv *env);
+void errno_enum_deinit(JNIEnv *env);
+jobject errno_to_enum(JNIEnv *env, int errnum);
+
+#endif
Added: hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c?rev=1079148&view=auto
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c (added)
+++ hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c Tue Mar 8 04:39:49 2011
@@ -0,0 +1,69 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <jni.h>
+#include "file_descriptor.h"
+#include "org_apache_hadoop.h"
+
+// class of java.io.FileDescriptor
+static jclass fd_class;
+// the internal field for the integer fd
+static jfieldID fd_descriptor;
+// the no-argument constructor
+static jmethodID fd_constructor;
+
+
+void fd_init(JNIEnv* env)
+{
+ if (fd_class != NULL) return; // already initted
+
+ fd_class = (*env)->FindClass(env, "java/io/FileDescriptor");
+ PASS_EXCEPTIONS(env);
+ fd_class = (*env)->NewGlobalRef(env, fd_class);
+
+ fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
+ PASS_EXCEPTIONS(env);
+ fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
+}
+
+void fd_deinit(JNIEnv *env) {
+ if (fd_class != NULL) {
+ (*env)->DeleteGlobalRef(env, fd_class);
+ fd_class = NULL;
+ }
+ fd_descriptor = NULL;
+ fd_constructor = NULL;
+}
+
+/*
+ * Given an instance 'obj' of java.io.FileDescriptor, return the
+ * underlying fd, or throw if unavailable
+ */
+int fd_get(JNIEnv* env, jobject obj) {
+ return (*env)->GetIntField(env, obj, fd_descriptor);
+}
+
+/*
+ * Create a FileDescriptor object corresponding to the given int fd
+ */
+jobject fd_create(JNIEnv *env, int fd) {
+ jobject obj = (*env)->NewObject(env, fd_class, fd_constructor);
+ PASS_EXCEPTIONS_RET(env, NULL);
+
+ (*env)->SetIntField(env, obj, fd_descriptor, fd);
+ return obj;
+}
Added: hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h?rev=1079148&view=auto
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h (added)
+++ hadoop/common/branches/yahoo-merge/src/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h Tue Mar 8 04:39:49 2011
@@ -0,0 +1,28 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef FILE_DESCRIPTOR_H
+#define FILE_DESCRIPTOR_H
+
+#include <jni.h>
+
+void fd_init(JNIEnv *env);
+void fd_deinit(JNIEnv *env);
+
+int fd_get(JNIEnv* env, jobject obj);
+jobject fd_create(JNIEnv *env, int fd);
+
+#endif
Modified: hadoop/common/branches/yahoo-merge/src/native/src/org_apache_hadoop.h
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/native/src/org_apache_hadoop.h?rev=1079148&r1=1079147&r2=1079148&view=diff
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/native/src/org_apache_hadoop.h (original)
+++ hadoop/common/branches/yahoo-merge/src/native/src/org_apache_hadoop.h Tue Mar 8 04:39:49 2011
@@ -50,6 +50,22 @@
} \
}
+/* Helper macro to return if an exception is pending */
+#define PASS_EXCEPTIONS(env) \
+ { \
+ if ((*env)->ExceptionCheck(env)) return; \
+ }
+
+#define PASS_EXCEPTIONS_GOTO(env, target) \
+ { \
+ if ((*env)->ExceptionCheck(env)) goto target; \
+ }
+
+#define PASS_EXCEPTIONS_RET(env, ret) \
+ { \
+ if ((*env)->ExceptionCheck(env)) return (ret); \
+ }
+
/**
* A helper function to dlsym a 'symbol' from a given library-handle.
*
Added: hadoop/common/branches/yahoo-merge/src/test/core/org/apache/hadoop/io/TestSecureIOUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/test/core/org/apache/hadoop/io/TestSecureIOUtils.java?rev=1079148&view=auto
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/test/core/org/apache/hadoop/io/TestSecureIOUtils.java (added)
+++ hadoop/common/branches/yahoo-merge/src/test/core/org/apache/hadoop/io/TestSecureIOUtils.java Tue Mar 8 04:39:49 2011
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io;
+
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.io.nativeio.NativeIO;
+
+import org.junit.BeforeClass;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assume.*;
+import static org.junit.Assert.*;
+import java.io.IOException;
+import java.io.File;
+import java.io.FileInputStream;
+import java.io.FileOutputStream;
+
+public class TestSecureIOUtils {
+ private static String realOwner, realGroup;
+ private static final File testFilePath =
+ new File(System.getProperty("test.build.data"), "TestSecureIOContext");
+
+ @BeforeClass
+ public static void makeTestFile() throws Exception {
+ FileOutputStream fos = new FileOutputStream(testFilePath);
+ fos.write("hello".getBytes("UTF-8"));
+ fos.close();
+
+ Configuration conf = new Configuration();
+ FileSystem rawFS = FileSystem.getLocal(conf).getRaw();
+ FileStatus stat = rawFS.getFileStatus(
+ new Path(testFilePath.toString()));
+ realOwner = stat.getOwner();
+ realGroup = stat.getGroup();
+ }
+
+ @Test
+ public void testReadUnrestricted() throws IOException {
+ SecureIOUtils.openForRead(testFilePath, null, null).close();
+ }
+
+ @Test
+ public void testReadCorrectlyRestrictedWithSecurity() throws IOException {
+ SecureIOUtils
+ .openForRead(testFilePath, realOwner, realGroup).close();
+ }
+
+ @Test(expected=IOException.class)
+ public void testReadIncorrectlyRestrictedWithSecurity() throws IOException {
+ SecureIOUtils
+ .openForRead(testFilePath, "invalidUser", null).close();
+ fail("Didn't throw expection for wrong ownership!");
+ }
+
+ @Test
+ public void testCreateForWrite() throws IOException {
+ try {
+ SecureIOUtils.createForWrite(testFilePath, 0777);
+ fail("Was able to create file at " + testFilePath);
+ } catch (SecureIOUtils.AlreadyExistsException aee) {
+ // expected
+ }
+ }
+}
Added: hadoop/common/branches/yahoo-merge/src/test/core/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/yahoo-merge/src/test/core/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1079148&view=auto
==============================================================================
--- hadoop/common/branches/yahoo-merge/src/test/core/org/apache/hadoop/io/nativeio/TestNativeIO.java (added)
+++ hadoop/common/branches/yahoo-merge/src/test/core/org/apache/hadoop/io/nativeio/TestNativeIO.java Tue Mar 8 04:39:49 2011
@@ -0,0 +1,137 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.io.nativeio;
+
+import java.io.File;
+import java.io.FileDescriptor;
+import java.io.FileOutputStream;
+import java.io.IOException;
+import org.junit.Before;
+import org.junit.Test;
+import static org.junit.Assume.*;
+import static org.junit.Assert.*;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.util.NativeCodeLoader;
+
+public class TestNativeIO {
+ static final Log LOG = LogFactory.getLog(TestNativeIO.class);
+
+ static final File TEST_DIR = new File(
+ System.getProperty("test.build.data"), "testnativeio");
+
+ @Before
+ public void checkLoaded() {
+ assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
+ }
+
+ @Before
+ public void setupTestDir() throws IOException {
+ FileUtil.fullyDelete(TEST_DIR);
+ TEST_DIR.mkdirs();
+ }
+
+ @Test
+ public void testFstat() throws Exception {
+ FileOutputStream fos = new FileOutputStream(
+ new File(TEST_DIR, "testfstat"));
+ NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+ fos.close();
+ LOG.info("Stat: " + String.valueOf(stat));
+
+ assertEquals(System.getProperty("user.name"), stat.getOwner());
+ assertNotNull(stat.getGroup());
+ assertTrue(!"".equals(stat.getGroup()));
+ assertEquals("Stat mode field should indicate a regular file",
+ NativeIO.Stat.S_IFREG, stat.getMode() & NativeIO.Stat.S_IFMT);
+ }
+
+ @Test
+ public void testFstatClosedFd() throws Exception {
+ FileOutputStream fos = new FileOutputStream(
+ new File(TEST_DIR, "testfstat2"));
+ fos.close();
+ try {
+ NativeIO.Stat stat = NativeIO.fstat(fos.getFD());
+ } catch (NativeIOException nioe) {
+ LOG.info("Got expected exception", nioe);
+ assertEquals(Errno.EBADF, nioe.getErrno());
+ }
+ }
+
+ @Test
+ public void testOpenMissingWithoutCreate() throws Exception {
+ LOG.info("Open a missing file without O_CREAT and it should fail");
+ try {
+ FileDescriptor fd = NativeIO.open(
+ new File(TEST_DIR, "doesntexist").getAbsolutePath(),
+ NativeIO.O_WRONLY, 0700);
+ fail("Able to open a new file without O_CREAT");
+ } catch (NativeIOException nioe) {
+ LOG.info("Got expected exception", nioe);
+ assertEquals(Errno.ENOENT, nioe.getErrno());
+ }
+ }
+
+ @Test
+ public void testOpenWithCreate() throws Exception {
+ LOG.info("Test creating a file with O_CREAT");
+ FileDescriptor fd = NativeIO.open(
+ new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
+ NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+ assertNotNull(true);
+ assertTrue(fd.valid());
+ FileOutputStream fos = new FileOutputStream(fd);
+ fos.write("foo".getBytes());
+ fos.close();
+
+ assertFalse(fd.valid());
+
+ LOG.info("Test exclusive create");
+ try {
+ fd = NativeIO.open(
+ new File(TEST_DIR, "testWorkingOpen").getAbsolutePath(),
+ NativeIO.O_WRONLY | NativeIO.O_CREAT | NativeIO.O_EXCL, 0700);
+ fail("Was able to create existing file with O_EXCL");
+ } catch (NativeIOException nioe) {
+ LOG.info("Got expected exception for failed exclusive create", nioe);
+ assertEquals(Errno.EEXIST, nioe.getErrno());
+ }
+ }
+
+ /**
+ * Test that opens and closes a file 10000 times - this would crash with
+ * "Too many open files" if we leaked fds using this access pattern.
+ */
+ @Test
+ public void testFDDoesntLeak() throws IOException {
+ for (int i = 0; i < 10000; i++) {
+ FileDescriptor fd = NativeIO.open(
+ new File(TEST_DIR, "testNoFdLeak").getAbsolutePath(),
+ NativeIO.O_WRONLY | NativeIO.O_CREAT, 0700);
+ assertNotNull(true);
+ assertTrue(fd.valid());
+ FileOutputStream fos = new FileOutputStream(fd);
+ fos.write("foo".getBytes());
+ fos.close();
+ }
+ }
+
+}