You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2013/03/06 20:15:22 UTC
svn commit: r1453486 [3/7] - in
/hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/main/bin/
src/main/conf/ src/main/docs/src/documentation/content/xdocs/
src/main/java/ src/main/java/org/apache/hadoop/fs/
src/main/java/org/apache/hadoop/...
Added: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.sln
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.sln?rev=1453486&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.sln (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.sln Wed Mar 6 19:15:18 2013
@@ -0,0 +1,48 @@
+
+Microsoft Visual Studio Solution File, Format Version 11.00
+# Visual Studio 2010
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "native", "native.vcxproj", "{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}"
+EndProject
+Global
+ GlobalSection(SolutionConfigurationPlatforms) = preSolution
+ Debug|Mixed Platforms = Debug|Mixed Platforms
+ Debug|Win32 = Debug|Win32
+ Debug|x64 = Debug|x64
+ Release|Mixed Platforms = Release|Mixed Platforms
+ Release|Win32 = Release|Win32
+ Release|x64 = Release|x64
+ EndGlobalSection
+ GlobalSection(ProjectConfigurationPlatforms) = postSolution
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed Platforms.ActiveCfg = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Mixed Platforms.Build.0 = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.ActiveCfg = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|Win32.Build.0 = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.ActiveCfg = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Debug|x64.Build.0 = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed Platforms.ActiveCfg = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Mixed Platforms.Build.0 = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.ActiveCfg = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|Win32.Build.0 = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.ActiveCfg = Release|x64
+ {4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}.Release|x64.Build.0 = Release|x64
+ EndGlobalSection
+ GlobalSection(SolutionProperties) = preSolution
+ HideSolutionNode = FALSE
+ EndGlobalSection
+EndGlobal
Added: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj?rev=1453486&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj Wed Mar 6 19:15:18 2013
@@ -0,0 +1,96 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup Label="ProjectConfigurations">
+ <ProjectConfiguration Include="Release|x64">
+ <Configuration>Release</Configuration>
+ <Platform>x64</Platform>
+ </ProjectConfiguration>
+ </ItemGroup>
+ <PropertyGroup Label="Globals">
+ <ProjectGuid>{4C0C12D2-3CB0-47F8-BCD0-55BD5732DFA7}</ProjectGuid>
+ <Keyword>Win32Proj</Keyword>
+ <RootNamespace>native</RootNamespace>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="Configuration">
+ <ConfigurationType>DynamicLibrary</ConfigurationType>
+ <UseDebugLibraries>false</UseDebugLibraries>
+ <WholeProgramOptimization>true</WholeProgramOptimization>
+ <CharacterSet>Unicode</CharacterSet>
+ </PropertyGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
+ <ImportGroup Label="ExtensionSettings">
+ </ImportGroup>
+ <ImportGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'" Label="PropertySheets">
+ <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
+ </ImportGroup>
+ <PropertyGroup Label="UserMacros" />
+ <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ <LinkIncremental>false</LinkIncremental>
+ <OutDir>..\..\..\target\bin\</OutDir>
+ <IntDir>..\..\..\target\native\$(Configuration)\</IntDir>
+ <TargetName>hadoop</TargetName>
+ </PropertyGroup>
+ <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
+ <ClCompile>
+ <WarningLevel>Level3</WarningLevel>
+ <PrecompiledHeader>NotUsing</PrecompiledHeader>
+ <Optimization>MaxSpeed</Optimization>
+ <FunctionLevelLinking>true</FunctionLevelLinking>
+ <IntrinsicFunctions>true</IntrinsicFunctions>
+ <PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;_USRDLL;NATIVE_EXPORTS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
+ <AdditionalIncludeDirectories>..\winutils\include;..\..\..\target\native\javah;%JAVA_HOME%\include;%JAVA_HOME%\include\win32;.\src;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
+ <CompileAs>CompileAsC</CompileAs>
+ <DisableSpecificWarnings>4244</DisableSpecificWarnings>
+ </ClCompile>
+ <Link>
+ <SubSystem>Windows</SubSystem>
+ <GenerateDebugInformation>true</GenerateDebugInformation>
+ <EnableCOMDATFolding>true</EnableCOMDATFolding>
+ <OptimizeReferences>true</OptimizeReferences>
+ <AdditionalDependencies>Ws2_32.lib;libwinutils.lib;%(AdditionalDependencies)</AdditionalDependencies>
+ <AdditionalLibraryDirectories>..\..\..\target\bin;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ </Link>
+ </ItemDefinitionGroup>
+ <ItemGroup>
+ <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4.c" />
+ <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Compressor.c" />
+ <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Decompressor.c" />
+ <ClCompile Include="src\org\apache\hadoop\io\nativeio\file_descriptor.c" />
+ <ClCompile Include="src\org\apache\hadoop\io\nativeio\NativeIO.c" />
+ <ClCompile Include="src\org\apache\hadoop\security\JniBasedUnixGroupsMappingWin.c" />
+ <ClCompile Include="src\org\apache\hadoop\util\bulk_crc32.c" />
+ <ClCompile Include="src\org\apache\hadoop\util\NativeCodeLoader.c" />
+ <ClCompile Include="src\org\apache\hadoop\util\NativeCrc32.c" />
+ </ItemGroup>
+ <ItemGroup>
+ <ClInclude Include="..\src\org\apache\hadoop\util\crc32c_tables.h" />
+ <ClInclude Include="..\src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h" />
+ <ClInclude Include="src\org\apache\hadoop\io\nativeio\file_descriptor.h" />
+ <ClInclude Include="src\org\apache\hadoop\util\bulk_crc32.h" />
+ <ClInclude Include="src\org\apache\hadoop\util\crc32c_tables.h" />
+ <ClInclude Include="src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h" />
+ <ClInclude Include="src\org_apache_hadoop.h" />
+ </ItemGroup>
+ <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
+ <ImportGroup Label="ExtensionTargets">
+ </ImportGroup>
+</Project>
Added: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj.filters
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj.filters?rev=1453486&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj.filters (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/native.vcxproj.filters Wed Mar 6 19:15:18 2013
@@ -0,0 +1,87 @@
+<?xml version="1.0" encoding="utf-8"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
+ <ItemGroup>
+ <Filter Include="Source Files">
+ <UniqueIdentifier>{4FC737F1-C7A5-4376-A066-2A32D752A2FF}</UniqueIdentifier>
+ <Extensions>cpp;c;cc;cxx;def;odl;idl;hpj;bat;asm;asmx</Extensions>
+ </Filter>
+ <Filter Include="Header Files">
+ <UniqueIdentifier>{93995380-89BD-4b04-88EB-625FBE52EBFB}</UniqueIdentifier>
+ <Extensions>h;hpp;hxx;hm;inl;inc;xsd</Extensions>
+ </Filter>
+ <Filter Include="Resource Files">
+ <UniqueIdentifier>{67DA6AB6-F800-4c08-8B7A-83BB121AAD01}</UniqueIdentifier>
+ <Extensions>rc;ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe;resx;tiff;tif;png;wav;mfcribbon-ms</Extensions>
+ </Filter>
+ </ItemGroup>
+ <ItemGroup>
+ <ClCompile Include="src\org\apache\hadoop\io\nativeio\NativeIO.c">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="src\org\apache\hadoop\io\nativeio\file_descriptor.c">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="src\org\apache\hadoop\util\bulk_crc32.c">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="src\org\apache\hadoop\util\NativeCrc32.c">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="src\org\apache\hadoop\util\NativeCodeLoader.c">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\lz4.c">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Compressor.c">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="src\org\apache\hadoop\io\compress\lz4\Lz4Decompressor.c">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="src\org\apache\hadoop\security\JniBasedUnixGroupsMappingWin.c">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ </ItemGroup>
+ <ItemGroup>
+ <ClInclude Include="..\src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h">
+ <Filter>Source Files</Filter>
+ </ClInclude>
+ <ClInclude Include="..\src\org\apache\hadoop\util\crc32c_tables.h">
+ <Filter>Source Files</Filter>
+ </ClInclude>
+ <ClInclude Include="src\org\apache\hadoop\io\nativeio\file_descriptor.h">
+ <Filter>Source Files</Filter>
+ </ClInclude>
+ <ClInclude Include="src\org\apache\hadoop\util\bulk_crc32.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="src\org\apache\hadoop\util\crc32_zlib_polynomial_tables.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="src\org\apache\hadoop\util\crc32c_tables.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="src\org_apache_hadoop.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ </ItemGroup>
+</Project>
\ No newline at end of file
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Compressor.c Wed Mar 6 19:15:18 2013
@@ -16,10 +16,14 @@
* limitations under the License.
*/
-#include "config.h"
+
#include "org_apache_hadoop.h"
#include "org_apache_hadoop_io_compress_lz4_Lz4Compressor.h"
+#ifdef UNIX
+#include "config.h"
+#endif // UNIX
+
//****************************
// Simple Functions
//****************************
@@ -61,6 +65,9 @@ JNIEXPORT void JNICALL Java_org_apache_h
JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Compressor_compressBytesDirect
(JNIEnv *env, jobject thisj){
+ const char* uncompressed_bytes;
+ char *compressed_bytes;
+
// Get members of Lz4Compressor
jobject clazz = (*env)->GetStaticObjectField(env, thisj, Lz4Compressor_clazz);
jobject uncompressed_direct_buf = (*env)->GetObjectField(env, thisj, Lz4Compressor_uncompressedDirectBuf);
@@ -70,7 +77,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
// Get the input direct buffer
LOCK_CLASS(env, clazz, "Lz4Compressor");
- const char* uncompressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
+ uncompressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
UNLOCK_CLASS(env, clazz, "Lz4Compressor");
if (uncompressed_bytes == 0) {
@@ -79,7 +86,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
// Get the output direct buffer
LOCK_CLASS(env, clazz, "Lz4Compressor");
- char* compressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
+ compressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
UNLOCK_CLASS(env, clazz, "Lz4Compressor");
if (compressed_bytes == 0) {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.c Wed Mar 6 19:15:18 2013
@@ -16,10 +16,13 @@
* limitations under the License.
*/
-#include "config.h"
#include "org_apache_hadoop.h"
#include "org_apache_hadoop_io_compress_lz4_Lz4Decompressor.h"
+#ifdef UNIX
+#include "config.h"
+#endif // UNIX
+
int LZ4_uncompress_unknownOutputSize(const char* source, char* dest, int isize, int maxOutputSize);
/*
@@ -58,6 +61,9 @@ JNIEXPORT void JNICALL Java_org_apache_h
JNIEXPORT jint JNICALL Java_org_apache_hadoop_io_compress_lz4_Lz4Decompressor_decompressBytesDirect
(JNIEnv *env, jobject thisj){
+ const char *compressed_bytes;
+ char *uncompressed_bytes;
+
// Get members of Lz4Decompressor
jobject clazz = (*env)->GetStaticObjectField(env,thisj, Lz4Decompressor_clazz);
jobject compressed_direct_buf = (*env)->GetObjectField(env,thisj, Lz4Decompressor_compressedDirectBuf);
@@ -67,7 +73,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
// Get the input direct buffer
LOCK_CLASS(env, clazz, "Lz4Decompressor");
- const char* compressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
+ compressed_bytes = (const char*)(*env)->GetDirectBufferAddress(env, compressed_direct_buf);
UNLOCK_CLASS(env, clazz, "Lz4Decompressor");
if (compressed_bytes == 0) {
@@ -76,7 +82,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
// Get the output direct buffer
LOCK_CLASS(env, clazz, "Lz4Decompressor");
- char* uncompressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
+ uncompressed_bytes = (char *)(*env)->GetDirectBufferAddress(env, uncompressed_direct_buf);
UNLOCK_CLASS(env, clazz, "Lz4Decompressor");
if (uncompressed_bytes == 0) {
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyCompressor.c Wed Mar 6 19:15:18 2013
@@ -16,12 +16,18 @@
* limitations under the License.
*/
-#include <dlfcn.h>
+
+#if defined HADOOP_SNAPPY_LIBRARY
+
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#ifdef UNIX
+#include <dlfcn.h>
#include "config.h"
+#endif // UNIX
+
#include "org_apache_hadoop_io_compress_snappy.h"
#include "org_apache_hadoop_io_compress_snappy_SnappyCompressor.h"
@@ -81,7 +87,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
UNLOCK_CLASS(env, clazz, "SnappyCompressor");
if (uncompressed_bytes == 0) {
- return 0;
+ return (jint)0;
}
// Get the output direct buffer
@@ -90,7 +96,7 @@ JNIEXPORT jint JNICALL Java_org_apache_h
UNLOCK_CLASS(env, clazz, "SnappyCompressor");
if (compressed_bytes == 0) {
- return 0;
+ return (jint)0;
}
/* size_t should always be 4 bytes or larger. */
@@ -109,3 +115,5 @@ JNIEXPORT jint JNICALL Java_org_apache_h
(*env)->SetIntField(env, thisj, SnappyCompressor_uncompressedDirectBufLen, 0);
return (jint)buf_len;
}
+
+#endif //define HADOOP_SNAPPY_LIBRARY
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.c Wed Mar 6 19:15:18 2013
@@ -16,12 +16,18 @@
* limitations under the License.
*/
-#include <dlfcn.h>
+
+#if defined HADOOP_SNAPPY_LIBRARY
+
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#ifdef UNIX
#include "config.h"
+#include <dlfcn.h>
+#endif
+
#include "org_apache_hadoop_io_compress_snappy.h"
#include "org_apache_hadoop_io_compress_snappy_SnappyDecompressor.h"
@@ -103,3 +109,5 @@ JNIEXPORT jint JNICALL Java_org_apache_h
return (jint)uncompressed_direct_buf_len;
}
+
+#endif //define HADOOP_SNAPPY_LIBRARY
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c Wed Mar 6 19:15:18 2013
@@ -16,12 +16,15 @@
* limitations under the License.
*/
-#include <dlfcn.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#ifdef UNIX
+#include <dlfcn.h>
#include "config.h"
+#endif
+
#include "org_apache_hadoop_io_compress_zlib.h"
#include "org_apache_hadoop_io_compress_zlib_ZlibCompressor.h"
@@ -35,48 +38,124 @@ static jfieldID ZlibCompressor_directBuf
static jfieldID ZlibCompressor_finish;
static jfieldID ZlibCompressor_finished;
+#ifdef UNIX
static int (*dlsym_deflateInit2_)(z_streamp, int, int, int, int, int, const char *, int);
static int (*dlsym_deflate)(z_streamp, int);
static int (*dlsym_deflateSetDictionary)(z_streamp, const Bytef *, uInt);
static int (*dlsym_deflateReset)(z_streamp);
static int (*dlsym_deflateEnd)(z_streamp);
+#endif
+
+#ifdef WINDOWS
+#include <Strsafe.h>
+typedef int (__cdecl *__dlsym_deflateInit2_) (z_streamp, int, int, int, int, int, const char *, int);
+typedef int (__cdecl *__dlsym_deflate) (z_streamp, int);
+typedef int (__cdecl *__dlsym_deflateSetDictionary) (z_streamp, const Bytef *, uInt);
+typedef int (__cdecl *__dlsym_deflateReset) (z_streamp);
+typedef int (__cdecl *__dlsym_deflateEnd) (z_streamp);
+static __dlsym_deflateInit2_ dlsym_deflateInit2_;
+static __dlsym_deflate dlsym_deflate;
+static __dlsym_deflateSetDictionary dlsym_deflateSetDictionary;
+static __dlsym_deflateReset dlsym_deflateReset;
+static __dlsym_deflateEnd dlsym_deflateEnd;
+
+// Try to load zlib.dll from the dir where hadoop.dll is located.
+HANDLE LoadZlibTryHadoopNativeDir() {
+ HMODULE libz = NULL;
+ PCWSTR HADOOP_DLL = L"hadoop.dll";
+ size_t HADOOP_DLL_LEN = 10;
+ WCHAR path[MAX_PATH] = { 0 };
+ BOOL isPathValid = FALSE;
+
+ // Get hadoop.dll full path
+ HMODULE hModule = GetModuleHandle(HADOOP_DLL);
+ if (hModule != NULL) {
+ if (GetModuleFileName(hModule, path, MAX_PATH) > 0) {
+ size_t size = 0;
+ if (StringCchLength(path, MAX_PATH, &size) == S_OK) {
+
+ // Update path variable to have the full path to the zlib.dll
+ size = size - HADOOP_DLL_LEN;
+ if (size >= 0) {
+ path[size] = L'\0';
+ if (StringCchCat(path, MAX_PATH, HADOOP_ZLIB_LIBRARY) == S_OK) {
+ isPathValid = TRUE;
+ }
+ }
+ }
+ }
+ }
+
+ if (isPathValid) {
+ libz = LoadLibrary(path);
+ }
+
+ // fallback to system paths
+ if (!libz) {
+ libz = LoadLibrary(HADOOP_ZLIB_LIBRARY);
+ }
+
+ return libz;
+}
+#endif
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_initIDs(
JNIEnv *env, jclass class
) {
+#ifdef UNIX
// Load libz.so
void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
- if (!libz) {
+ if (!libz) {
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
return;
}
+#endif
+
+#ifdef WINDOWS
+ HMODULE libz = LoadZlibTryHadoopNativeDir();
+
+ if (!libz) {
+ THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
+ return;
+ }
+#endif
+#ifdef UNIX
// Locate the requisite symbols from libz.so
dlerror(); // Clear any existing error
- LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
- LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
- LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
- LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
- LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflateInit2_, env, libz, "deflateInit2_");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflate, env, libz, "deflate");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflateReset, env, libz, "deflateReset");
+ LOAD_DYNAMIC_SYMBOL(dlsym_deflateEnd, env, libz, "deflateEnd");
+#endif
+
+#ifdef WINDOWS
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflateInit2_, dlsym_deflateInit2_, env, libz, "deflateInit2_");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflate, dlsym_deflate, env, libz, "deflate");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflateSetDictionary, dlsym_deflateSetDictionary, env, libz, "deflateSetDictionary");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflateReset, dlsym_deflateReset, env, libz, "deflateReset");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_deflateEnd, dlsym_deflateEnd, env, libz, "deflateEnd");
+#endif
// Initialize the requisite fieldIds
- ZlibCompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
+ ZlibCompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
"Ljava/lang/Class;");
ZlibCompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
ZlibCompressor_finish = (*env)->GetFieldID(env, class, "finish", "Z");
ZlibCompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
- ZlibCompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
- "uncompressedDirectBuf",
+ ZlibCompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
+ "uncompressedDirectBuf",
"Ljava/nio/Buffer;");
- ZlibCompressor_uncompressedDirectBufOff = (*env)->GetFieldID(env, class,
+ ZlibCompressor_uncompressedDirectBufOff = (*env)->GetFieldID(env, class,
"uncompressedDirectBufOff", "I");
- ZlibCompressor_uncompressedDirectBufLen = (*env)->GetFieldID(env, class,
+ ZlibCompressor_uncompressedDirectBufLen = (*env)->GetFieldID(env, class,
"uncompressedDirectBufLen", "I");
- ZlibCompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
- "compressedDirectBuf",
+ ZlibCompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
+ "compressedDirectBuf",
"Ljava/nio/Buffer;");
- ZlibCompressor_directBufferSize = (*env)->GetFieldID(env, class,
+ ZlibCompressor_directBufferSize = (*env)->GetFieldID(env, class,
"directBufferSize", "I");
}
@@ -84,7 +163,9 @@ JNIEXPORT jlong JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_init(
JNIEnv *env, jclass class, jint level, jint strategy, jint windowBits
) {
- // Create a z_stream
+ int rv = 0;
+ static const int memLevel = 8; // See zconf.h
+ // Create a z_stream
z_stream *stream = malloc(sizeof(z_stream));
if (!stream) {
THROW(env, "java/lang/OutOfMemoryError", NULL);
@@ -93,17 +174,16 @@ Java_org_apache_hadoop_io_compress_zlib_
memset((void*)stream, 0, sizeof(z_stream));
// Initialize stream
- static const int memLevel = 8; // See zconf.h
- int rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
+ rv = (*dlsym_deflateInit2_)(stream, level, Z_DEFLATED, windowBits,
memLevel, strategy, ZLIB_VERSION, sizeof(z_stream));
-
+
if (rv != Z_OK) {
// Contingency - Report error by throwing appropriate exceptions
free(stream);
stream = NULL;
-
+
switch (rv) {
- case Z_MEM_ERROR:
+ case Z_MEM_ERROR:
{
THROW(env, "java/lang/OutOfMemoryError", NULL);
}
@@ -120,27 +200,28 @@ Java_org_apache_hadoop_io_compress_zlib_
break;
}
}
-
+
return JLONG(stream);
}
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_setDictionary(
- JNIEnv *env, jclass class, jlong stream,
+ JNIEnv *env, jclass class, jlong stream,
jarray b, jint off, jint len
) {
+ int rv = 0;
Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
if (!buf) {
return;
}
- int rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
+ rv = dlsym_deflateSetDictionary(ZSTREAM(stream), buf + off, len);
(*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
-
+
if (rv != Z_OK) {
// Contingency - Report error by throwing appropriate exceptions
switch (rv) {
case Z_STREAM_ERROR:
- {
+ {
THROW(env, "java/lang/IllegalArgumentException", NULL);
}
break;
@@ -157,75 +238,85 @@ JNIEXPORT jint JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibCompressor_deflateBytesDirect(
JNIEnv *env, jobject this
) {
+ jobject clazz = NULL;
+ jobject uncompressed_direct_buf = NULL;
+ jint uncompressed_direct_buf_off = 0;
+ jint uncompressed_direct_buf_len = 0;
+ jobject compressed_direct_buf = NULL;
+ jint compressed_direct_buf_len = 0;
+ jboolean finish;
+ Bytef* uncompressed_bytes = NULL;
+ Bytef* compressed_bytes = NULL;
+ int rv = 0;
+ jint no_compressed_bytes = 0;
// Get members of ZlibCompressor
z_stream *stream = ZSTREAM(
- (*env)->GetLongField(env, this,
+ (*env)->GetLongField(env, this,
ZlibCompressor_stream)
);
if (!stream) {
THROW(env, "java/lang/NullPointerException", NULL);
return (jint)0;
- }
+ }
// Get members of ZlibCompressor
- jobject clazz = (*env)->GetStaticObjectField(env, this,
+ clazz = (*env)->GetStaticObjectField(env, this,
ZlibCompressor_clazz);
- jobject uncompressed_direct_buf = (*env)->GetObjectField(env, this,
+ uncompressed_direct_buf = (*env)->GetObjectField(env, this,
ZlibCompressor_uncompressedDirectBuf);
- jint uncompressed_direct_buf_off = (*env)->GetIntField(env, this,
+ uncompressed_direct_buf_off = (*env)->GetIntField(env, this,
ZlibCompressor_uncompressedDirectBufOff);
- jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
+ uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
ZlibCompressor_uncompressedDirectBufLen);
- jobject compressed_direct_buf = (*env)->GetObjectField(env, this,
+ compressed_direct_buf = (*env)->GetObjectField(env, this,
ZlibCompressor_compressedDirectBuf);
- jint compressed_direct_buf_len = (*env)->GetIntField(env, this,
+ compressed_direct_buf_len = (*env)->GetIntField(env, this,
ZlibCompressor_directBufferSize);
- jboolean finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
+ finish = (*env)->GetBooleanField(env, this, ZlibCompressor_finish);
// Get the input direct buffer
LOCK_CLASS(env, clazz, "ZlibCompressor");
- Bytef* uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
+ uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
uncompressed_direct_buf);
UNLOCK_CLASS(env, clazz, "ZlibCompressor");
-
+
if (uncompressed_bytes == 0) {
return (jint)0;
}
-
+
// Get the output direct buffer
LOCK_CLASS(env, clazz, "ZlibCompressor");
- Bytef* compressed_bytes = (*env)->GetDirectBufferAddress(env,
+ compressed_bytes = (*env)->GetDirectBufferAddress(env,
compressed_direct_buf);
UNLOCK_CLASS(env, clazz, "ZlibCompressor");
if (compressed_bytes == 0) {
return (jint)0;
}
-
+
// Re-calibrate the z_stream
stream->next_in = uncompressed_bytes + uncompressed_direct_buf_off;
stream->next_out = compressed_bytes;
stream->avail_in = uncompressed_direct_buf_len;
- stream->avail_out = compressed_direct_buf_len;
-
+ stream->avail_out = compressed_direct_buf_len;
+
// Compress
- int rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
+ rv = dlsym_deflate(stream, finish ? Z_FINISH : Z_NO_FLUSH);
- jint no_compressed_bytes = 0;
switch (rv) {
// Contingency? - Report error by throwing appropriate exceptions
case Z_STREAM_END:
{
(*env)->SetBooleanField(env, this, ZlibCompressor_finished, JNI_TRUE);
} // cascade
- case Z_OK:
+ case Z_OK:
{
uncompressed_direct_buf_off += uncompressed_direct_buf_len - stream->avail_in;
- (*env)->SetIntField(env, this,
+ (*env)->SetIntField(env, this,
ZlibCompressor_uncompressedDirectBufOff, uncompressed_direct_buf_off);
- (*env)->SetIntField(env, this,
+ (*env)->SetIntField(env, this,
ZlibCompressor_uncompressedDirectBufLen, stream->avail_in);
no_compressed_bytes = compressed_direct_buf_len - stream->avail_out;
}
@@ -238,7 +329,7 @@ Java_org_apache_hadoop_io_compress_zlib_
}
break;
}
-
+
return no_compressed_bytes;
}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c Wed Mar 6 19:15:18 2013
@@ -16,12 +16,15 @@
* limitations under the License.
*/
-#include <dlfcn.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
+#ifdef UNIX
+#include <dlfcn.h>
#include "config.h"
+#endif
+
#include "org_apache_hadoop_io_compress_zlib.h"
#include "org_apache_hadoop_io_compress_zlib_ZlibDecompressor.h"
@@ -35,48 +38,88 @@ static jfieldID ZlibDecompressor_directB
static jfieldID ZlibDecompressor_needDict;
static jfieldID ZlibDecompressor_finished;
+#ifdef UNIX
static int (*dlsym_inflateInit2_)(z_streamp, int, const char *, int);
static int (*dlsym_inflate)(z_streamp, int);
static int (*dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
static int (*dlsym_inflateReset)(z_streamp);
static int (*dlsym_inflateEnd)(z_streamp);
+#endif
+
+#ifdef WINDOWS
+#include <Strsafe.h>
+typedef int (__cdecl *__dlsym_inflateInit2_)(z_streamp, int, const char *, int);
+typedef int (__cdecl *__dlsym_inflate)(z_streamp, int);
+typedef int (__cdecl *__dlsym_inflateSetDictionary)(z_streamp, const Bytef *, uInt);
+typedef int (__cdecl *__dlsym_inflateReset)(z_streamp);
+typedef int (__cdecl *__dlsym_inflateEnd)(z_streamp);
+static __dlsym_inflateInit2_ dlsym_inflateInit2_;
+static __dlsym_inflate dlsym_inflate;
+static __dlsym_inflateSetDictionary dlsym_inflateSetDictionary;
+static __dlsym_inflateReset dlsym_inflateReset;
+static __dlsym_inflateEnd dlsym_inflateEnd;
+extern HANDLE LoadZlibTryHadoopNativeDir();
+#endif
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_initIDs(
- JNIEnv *env, jclass class
+JNIEnv *env, jclass class
) {
// Load libz.so
- void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
+#ifdef UNIX
+ void *libz = dlopen(HADOOP_ZLIB_LIBRARY, RTLD_LAZY | RTLD_GLOBAL);
if (!libz) {
THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load libz.so");
return;
- }
+ }
+#endif
+
+#ifdef WINDOWS
+ HMODULE libz = LoadZlibTryHadoopNativeDir();
+
+ if (!libz) {
+ THROW(env, "java/lang/UnsatisfiedLinkError", "Cannot load zlib1.dll");
+ return;
+ }
+#endif
+
// Locate the requisite symbols from libz.so
+#ifdef UNIX
dlerror(); // Clear any existing error
LOAD_DYNAMIC_SYMBOL(dlsym_inflateInit2_, env, libz, "inflateInit2_");
LOAD_DYNAMIC_SYMBOL(dlsym_inflate, env, libz, "inflate");
LOAD_DYNAMIC_SYMBOL(dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
LOAD_DYNAMIC_SYMBOL(dlsym_inflateReset, env, libz, "inflateReset");
LOAD_DYNAMIC_SYMBOL(dlsym_inflateEnd, env, libz, "inflateEnd");
+#endif
+
+#ifdef WINDOWS
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflateInit2_, dlsym_inflateInit2_, env, libz, "inflateInit2_");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflate, dlsym_inflate, env, libz, "inflate");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflateSetDictionary, dlsym_inflateSetDictionary, env, libz, "inflateSetDictionary");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflateReset, dlsym_inflateReset, env, libz, "inflateReset");
+ LOAD_DYNAMIC_SYMBOL(__dlsym_inflateEnd, dlsym_inflateEnd, env, libz, "inflateEnd");
+#endif
+
- // Initialize the requisite fieldIds
- ZlibDecompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
+ // Initialize the requisite fieldIds
+ ZlibDecompressor_clazz = (*env)->GetStaticFieldID(env, class, "clazz",
"Ljava/lang/Class;");
ZlibDecompressor_stream = (*env)->GetFieldID(env, class, "stream", "J");
ZlibDecompressor_needDict = (*env)->GetFieldID(env, class, "needDict", "Z");
ZlibDecompressor_finished = (*env)->GetFieldID(env, class, "finished", "Z");
- ZlibDecompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
- "compressedDirectBuf",
+ ZlibDecompressor_compressedDirectBuf = (*env)->GetFieldID(env, class,
+ "compressedDirectBuf",
"Ljava/nio/Buffer;");
- ZlibDecompressor_compressedDirectBufOff = (*env)->GetFieldID(env, class,
+ ZlibDecompressor_compressedDirectBufOff = (*env)->GetFieldID(env, class,
"compressedDirectBufOff", "I");
- ZlibDecompressor_compressedDirectBufLen = (*env)->GetFieldID(env, class,
+ ZlibDecompressor_compressedDirectBufLen = (*env)->GetFieldID(env, class,
"compressedDirectBufLen", "I");
- ZlibDecompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
- "uncompressedDirectBuf",
+ ZlibDecompressor_uncompressedDirectBuf = (*env)->GetFieldID(env, class,
+ "uncompressedDirectBuf",
"Ljava/nio/Buffer;");
- ZlibDecompressor_directBufferSize = (*env)->GetFieldID(env, class,
+ ZlibDecompressor_directBufferSize = (*env)->GetFieldID(env, class,
"directBufferSize", "I");
}
@@ -84,21 +127,22 @@ JNIEXPORT jlong JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_init(
JNIEnv *env, jclass cls, jint windowBits
) {
+ int rv = 0;
z_stream *stream = malloc(sizeof(z_stream));
memset((void*)stream, 0, sizeof(z_stream));
if (stream == 0) {
THROW(env, "java/lang/OutOfMemoryError", NULL);
return (jlong)0;
- }
-
- int rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
+ }
+
+ rv = dlsym_inflateInit2_(stream, windowBits, ZLIB_VERSION, sizeof(z_stream));
if (rv != Z_OK) {
// Contingency - Report error by throwing appropriate exceptions
free(stream);
stream = NULL;
-
+
switch (rv) {
case Z_MEM_ERROR:
{
@@ -112,7 +156,7 @@ Java_org_apache_hadoop_io_compress_zlib_
break;
}
}
-
+
return JLONG(stream);
}
@@ -121,21 +165,22 @@ Java_org_apache_hadoop_io_compress_zlib_
JNIEnv *env, jclass cls, jlong stream,
jarray b, jint off, jint len
) {
+ int rv = 0;
Bytef *buf = (*env)->GetPrimitiveArrayCritical(env, b, 0);
if (!buf) {
THROW(env, "java/lang/InternalError", NULL);
return;
}
- int rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
+ rv = dlsym_inflateSetDictionary(ZSTREAM(stream), buf + off, len);
(*env)->ReleasePrimitiveArrayCritical(env, b, buf, 0);
-
+
if (rv != Z_OK) {
// Contingency - Report error by throwing appropriate exceptions
switch (rv) {
case Z_STREAM_ERROR:
case Z_DATA_ERROR:
{
- THROW(env, "java/lang/IllegalArgumentException",
+ THROW(env, "java/lang/IllegalArgumentException",
(ZSTREAM(stream))->msg);
}
break;
@@ -152,62 +197,71 @@ JNIEXPORT jint JNICALL
Java_org_apache_hadoop_io_compress_zlib_ZlibDecompressor_inflateBytesDirect(
JNIEnv *env, jobject this
) {
+ jobject clazz = NULL;
+ jarray compressed_direct_buf = NULL;
+ jint compressed_direct_buf_off = 0;
+ jint compressed_direct_buf_len = 0;
+ jarray uncompressed_direct_buf = NULL;
+ jint uncompressed_direct_buf_len = 0;
+ Bytef *compressed_bytes = NULL;
+ Bytef *uncompressed_bytes = NULL;
+ int rv = 0;
+ int no_decompressed_bytes = 0;
// Get members of ZlibDecompressor
z_stream *stream = ZSTREAM(
- (*env)->GetLongField(env, this,
+ (*env)->GetLongField(env, this,
ZlibDecompressor_stream)
);
if (!stream) {
THROW(env, "java/lang/NullPointerException", NULL);
return (jint)0;
- }
+ }
// Get members of ZlibDecompressor
- jobject clazz = (*env)->GetStaticObjectField(env, this,
+ clazz = (*env)->GetStaticObjectField(env, this,
ZlibDecompressor_clazz);
- jarray compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
+ compressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
ZlibDecompressor_compressedDirectBuf);
- jint compressed_direct_buf_off = (*env)->GetIntField(env, this,
+ compressed_direct_buf_off = (*env)->GetIntField(env, this,
ZlibDecompressor_compressedDirectBufOff);
- jint compressed_direct_buf_len = (*env)->GetIntField(env, this,
+ compressed_direct_buf_len = (*env)->GetIntField(env, this,
ZlibDecompressor_compressedDirectBufLen);
- jarray uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
+ uncompressed_direct_buf = (jarray)(*env)->GetObjectField(env, this,
ZlibDecompressor_uncompressedDirectBuf);
- jint uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
+ uncompressed_direct_buf_len = (*env)->GetIntField(env, this,
ZlibDecompressor_directBufferSize);
// Get the input direct buffer
LOCK_CLASS(env, clazz, "ZlibDecompressor");
- Bytef *compressed_bytes = (*env)->GetDirectBufferAddress(env,
+ compressed_bytes = (*env)->GetDirectBufferAddress(env,
compressed_direct_buf);
UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
-
+
if (!compressed_bytes) {
return (jint)0;
}
-
+
// Get the output direct buffer
LOCK_CLASS(env, clazz, "ZlibDecompressor");
- Bytef *uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
+ uncompressed_bytes = (*env)->GetDirectBufferAddress(env,
uncompressed_direct_buf);
UNLOCK_CLASS(env, clazz, "ZlibDecompressor");
if (!uncompressed_bytes) {
return (jint)0;
}
-
+
// Re-calibrate the z_stream
stream->next_in = compressed_bytes + compressed_direct_buf_off;
stream->next_out = uncompressed_bytes;
stream->avail_in = compressed_direct_buf_len;
stream->avail_out = uncompressed_direct_buf_len;
-
+
// Decompress
- int rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
+ rv = dlsym_inflate(stream, Z_PARTIAL_FLUSH);
// Contingency? - Report error by throwing appropriate exceptions
- int no_decompressed_bytes = 0;
switch (rv) {
case Z_STREAM_END:
{
@@ -216,9 +270,9 @@ Java_org_apache_hadoop_io_compress_zlib_
case Z_OK:
{
compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
- (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
+ (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
compressed_direct_buf_off);
- (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
+ (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
stream->avail_in);
no_decompressed_bytes = uncompressed_direct_buf_len - stream->avail_out;
}
@@ -227,9 +281,9 @@ Java_org_apache_hadoop_io_compress_zlib_
{
(*env)->SetBooleanField(env, this, ZlibDecompressor_needDict, JNI_TRUE);
compressed_direct_buf_off += compressed_direct_buf_len - stream->avail_in;
- (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
+ (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufOff,
compressed_direct_buf_off);
- (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
+ (*env)->SetIntField(env, this, ZlibDecompressor_compressedDirectBufLen,
stream->avail_in);
}
break;
@@ -251,7 +305,7 @@ Java_org_apache_hadoop_io_compress_zlib_
}
break;
}
-
+
return no_decompressed_bytes;
}
@@ -299,4 +353,3 @@ Java_org_apache_hadoop_io_compress_zlib_
/**
* vim: sw=2: ts=2: et:
*/
-
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/compress/zlib/org_apache_hadoop_io_compress_zlib.h Wed Mar 6 19:15:18 2013
@@ -19,14 +19,23 @@
#if !defined ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
#define ORG_APACHE_HADOOP_IO_COMPRESS_ZLIB_ZLIB_H
-#include <dlfcn.h>
-#include <jni.h>
+#include "org_apache_hadoop.h"
+
+#ifdef UNIX
+#include <config.h>
#include <stddef.h>
-#include <zconf.h>
#include <zlib.h>
+#include <zconf.h>
+#include <dlfcn.h>
+#include <jni.h>
+#endif
-#include "config.h"
-#include "org_apache_hadoop.h"
+#ifdef WINDOWS
+#include <jni.h>
+#define HADOOP_ZLIB_LIBRARY L"zlib1.dll"
+#include <zlib.h>
+#include <zconf.h>
+#endif
/* A helper macro to convert the java 'stream-handle' to a z_stream pointer. */
#define ZSTREAM(stream) ((z_stream*)((ptrdiff_t)(stream)))
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c Wed Mar 6 19:15:18 2013
@@ -18,6 +18,10 @@
#define _GNU_SOURCE
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_io_nativeio_NativeIO.h"
+
+#ifdef UNIX
#include <assert.h>
#include <errno.h>
#include <fcntl.h>
@@ -31,14 +35,19 @@
#include <sys/syscall.h>
#include <sys/types.h>
#include <unistd.h>
-
#include "config.h"
-#include "org_apache_hadoop.h"
-#include "org_apache_hadoop_io_nativeio_NativeIO.h"
+#endif
+
+#ifdef WINDOWS
+#include <assert.h>
+#include <Windows.h>
+#include "winutils.h"
+#endif
+
#include "file_descriptor.h"
#include "errno_enum.h"
-// the NativeIO$Stat inner class and its constructor
+// the NativeIO$POSIX$Stat inner class and its constructor
static jclass stat_clazz;
static jmethodID stat_ctor;
@@ -53,26 +62,32 @@ static jobject pw_lock_object;
// Internal functions
static void throw_ioe(JNIEnv* env, int errnum);
+#ifdef UNIX
static ssize_t get_pw_buflen();
+#endif
/**
* Returns non-zero if the user has specified that the system
* has non-threadsafe implementations of getpwuid_r or getgrgid_r.
**/
static int workaround_non_threadsafe_calls(JNIEnv *env, jclass clazz) {
- jfieldID needs_workaround_field = (*env)->GetStaticFieldID(env, clazz,
- "workaroundNonThreadSafePasswdCalls", "Z");
+ jboolean result;
+ jfieldID needs_workaround_field = (*env)->GetStaticFieldID(
+ env, clazz,
+ "workaroundNonThreadSafePasswdCalls",
+ "Z");
PASS_EXCEPTIONS_RET(env, 0);
assert(needs_workaround_field);
- jboolean result = (*env)->GetStaticBooleanField(
+ result = (*env)->GetStaticBooleanField(
env, clazz, needs_workaround_field);
return result;
}
+#ifdef UNIX
static void stat_init(JNIEnv *env, jclass nativeio_class) {
// Init Stat
- jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$Stat");
+ jclass clazz = (*env)->FindClass(env, "org/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat");
if (!clazz) {
return; // exception has been raised
}
@@ -85,6 +100,7 @@ static void stat_init(JNIEnv *env, jclas
if (!stat_ctor) {
return; // exception has been raised
}
+
jclass obj_class = (*env)->FindClass(env, "java/lang/Object");
if (!obj_class) {
return; // exception has been raised
@@ -99,6 +115,7 @@ static void stat_init(JNIEnv *env, jclas
pw_lock_object = (*env)->NewObject(env, obj_class, obj_ctor);
PASS_EXCEPTIONS(env);
pw_lock_object = (*env)->NewGlobalRef(env, pw_lock_object);
+
PASS_EXCEPTIONS(env);
}
}
@@ -113,6 +130,7 @@ static void stat_deinit(JNIEnv *env) {
pw_lock_object = NULL;
}
}
+#endif
static void nioe_init(JNIEnv *env) {
// Init NativeIOException
@@ -121,8 +139,15 @@ static void nioe_init(JNIEnv *env) {
PASS_EXCEPTIONS(env);
nioe_clazz = (*env)->NewGlobalRef(env, nioe_clazz);
+#ifdef UNIX
nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
"(Ljava/lang/String;Lorg/apache/hadoop/io/nativeio/Errno;)V");
+#endif
+
+#ifdef WINDOWS
+ nioe_ctor = (*env)->GetMethodID(env, nioe_clazz, "<init>",
+ "(Ljava/lang/String;I)V");
+#endif
}
static void nioe_deinit(JNIEnv *env) {
@@ -143,32 +168,46 @@ static void nioe_deinit(JNIEnv *env) {
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_nativeio_NativeIO_initNative(
JNIEnv *env, jclass clazz) {
-
+#ifdef UNIX
stat_init(env, clazz);
PASS_EXCEPTIONS_GOTO(env, error);
+#endif
nioe_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
fd_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
+#ifdef UNIX
errno_enum_init(env);
PASS_EXCEPTIONS_GOTO(env, error);
+#endif
return;
error:
// these are all idempodent and safe to call even if the
// class wasn't initted yet
+#ifdef UNIX
stat_deinit(env);
+#endif
nioe_deinit(env);
fd_deinit(env);
+#ifdef UNIX
errno_enum_deinit(env);
+#endif
}
/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_POSIX
+ * Method: fstat
+ * Signature: (Ljava/io/FileDescriptor;)Lorg/apache/hadoop/io/nativeio/NativeIO$POSIX$Stat;
* public static native Stat fstat(FileDescriptor fd);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT jobject JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_fstat(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_fstat(
JNIEnv *env, jclass clazz, jobject fd_object)
{
+#ifdef UNIX
jobject ret = NULL;
int fd = fd_get(env, fd_object);
@@ -187,14 +226,26 @@ Java_org_apache_hadoop_io_nativeio_Nativ
cleanup:
return ret;
+#endif
+
+#ifdef WINDOWS
+ THROW(env, "java/io/IOException",
+ "The function POSIX.fstat() is not supported on Windows");
+ return NULL;
+#endif
}
+
+
/**
* public static native void posix_fadvise(
* FileDescriptor fd, long offset, long len, int flags);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_posix_1fadvise(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_posix_1fadvise(
JNIEnv *env, jclass clazz,
jobject fd_object, jlong offset, jlong len, jint flags)
{
@@ -240,9 +291,12 @@ static int manual_sync_file_range (int f
/**
* public static native void sync_file_range(
* FileDescriptor fd, long offset, long len, int flags);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_sync_1file_1range(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_sync_1file_1range(
JNIEnv *env, jclass clazz,
jobject fd_object, jlong offset, jlong len, jint flags)
{
@@ -284,13 +338,20 @@ static int toFreeBSDFlags(int flags)
#endif
/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_POSIX
+ * Method: open
+ * Signature: (Ljava/lang/String;II)Ljava/io/FileDescriptor;
* public static native FileDescriptor open(String path, int flags, int mode);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT jobject JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_open(
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_open(
JNIEnv *env, jclass clazz, jstring j_path,
jint flags, jint mode)
{
+#ifdef UNIX
#ifdef __FreeBSD__
flags = toFreeBSDFlags(flags);
#endif
@@ -318,16 +379,90 @@ cleanup:
(*env)->ReleaseStringUTFChars(env, j_path, path);
}
return ret;
+#endif
+
+#ifdef WINDOWS
+ THROW(env, "java/io/IOException",
+ "The function POSIX.open() is not supported on Windows");
+ return NULL;
+#endif
}
-/**
- * public static native void chmod(String path, int mode) throws IOException;
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method: createFile
+ * Signature: (Ljava/lang/String;JJJ)Ljava/io/FileDescriptor;
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
-JNIEXPORT void JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_chmod(
- JNIEnv *env, jclass clazz, jstring j_path,
- jint mode)
+JNIEXPORT jobject JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_createFile
+ (JNIEnv *env, jclass clazz, jstring j_path,
+ jlong desiredAccess, jlong shareMode, jlong creationDisposition)
+{
+#ifdef UNIX
+ THROW(env, "java/io/IOException",
+ "The function Windows.createFile() is not supported on Unix");
+ return NULL;
+#endif
+
+#ifdef WINDOWS
+ DWORD dwRtnCode = ERROR_SUCCESS;
+ BOOL isSymlink = FALSE;
+ BOOL isJunction = FALSE;
+ DWORD dwFlagsAndAttributes = FILE_ATTRIBUTE_NORMAL | FILE_FLAG_BACKUP_SEMANTICS;
+ jobject ret = (jobject) NULL;
+ HANDLE hFile = INVALID_HANDLE_VALUE;
+ WCHAR *path = (WCHAR *) (*env)->GetStringChars(env, j_path, (jboolean*)NULL);
+ if (path == NULL) goto cleanup;
+
+ // Set the flag for a symbolic link or a junctions point only when it exists.
+ // According to MSDN if the call to CreateFile() function creates a file,
+ // there is no change in behavior. So we do not throw if no file is found.
+ //
+ dwRtnCode = SymbolicLinkCheck(path, &isSymlink);
+ if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
+ throw_ioe(env, dwRtnCode);
+ goto cleanup;
+ }
+ dwRtnCode = JunctionPointCheck(path, &isJunction);
+ if (dwRtnCode != ERROR_SUCCESS && dwRtnCode != ERROR_FILE_NOT_FOUND) {
+ throw_ioe(env, dwRtnCode);
+ goto cleanup;
+ }
+ if (isSymlink || isJunction)
+ dwFlagsAndAttributes |= FILE_FLAG_OPEN_REPARSE_POINT;
+
+ hFile = CreateFile(path,
+ (DWORD) desiredAccess,
+ (DWORD) shareMode,
+ (LPSECURITY_ATTRIBUTES ) NULL,
+ (DWORD) creationDisposition,
+ dwFlagsAndAttributes,
+ NULL);
+ if (hFile == INVALID_HANDLE_VALUE) {
+ throw_ioe(env, GetLastError());
+ goto cleanup;
+ }
+
+ ret = fd_create(env, (long) hFile);
+cleanup:
+ if (path != NULL) {
+ (*env)->ReleaseStringChars(env, j_path, (const jchar*)path);
+ }
+ return (jobject) ret;
+#endif
+}
+
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_POSIX
+ * Method: chmod
+ * Signature: (Ljava/lang/String;I)V
+ */
+JNIEXPORT void JNICALL Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_chmodImpl
+ (JNIEnv *env, jclass clazz, jstring j_path, jint mode)
{
+#ifdef UNIX
const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
if (path == NULL) return; // JVM throws Exception for us
@@ -336,15 +471,30 @@ Java_org_apache_hadoop_io_nativeio_Nativ
}
(*env)->ReleaseStringUTFChars(env, j_path, path);
+#endif
+
+#ifdef WINDOWS
+ DWORD dwRtnCode = ERROR_SUCCESS;
+ LPCWSTR path = (LPCWSTR) (*env)->GetStringChars(env, j_path, NULL);
+ if (path == NULL) return; // JVM throws Exception for us
+
+ if ((dwRtnCode = ChangeFileModeByMask((LPCWSTR) path, mode)) != ERROR_SUCCESS)
+ {
+ throw_ioe(env, dwRtnCode);
+ }
+
+ (*env)->ReleaseStringChars(env, j_path, (const jchar*) path);
+#endif
}
/*
* static native String getUserName(int uid);
*/
JNIEXPORT jstring JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_getUserName(JNIEnv *env,
-jclass clazz, jint uid)
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getUserName(
+ JNIEnv *env, jclass clazz, jint uid)
{
+#ifdef UNIX
int pw_lock_locked = 0;
if (pw_lock_object != NULL) {
if ((*env)->MonitorEnter(env, pw_lock_object) != JNI_OK) {
@@ -396,15 +546,26 @@ cleanup:
}
if (pw_buf != NULL) free(pw_buf);
return jstr_username;
+#endif // UNIX
+
+#ifdef WINDOWS
+ THROW(env, "java/io/IOException",
+ "The function POSIX.getUserName() is not supported on Windows");
+ return NULL;
+#endif
}
/*
* static native String getGroupName(int gid);
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
*/
JNIEXPORT jstring JNICALL
-Java_org_apache_hadoop_io_nativeio_NativeIO_getGroupName(JNIEnv *env,
-jclass clazz, jint gid)
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024POSIX_getGroupName(
+ JNIEnv *env, jclass clazz, jint gid)
{
+#ifdef UNIX
int pw_lock_locked = 0;
if (pw_lock_object != NULL) {
@@ -458,14 +619,21 @@ cleanup:
}
if (pw_buf != NULL) free(pw_buf);
return jstr_groupname;
-}
+#endif // UNIX
+#ifdef WINDOWS
+ THROW(env, "java/io/IOException",
+ "The function POSIX.getUserName() is not supported on Windows");
+ return NULL;
+#endif
+}
/*
* Throw a java.IO.IOException, generating the message from errno.
*/
static void throw_ioe(JNIEnv* env, int errnum)
{
+#ifdef UNIX
char message[80];
jstring jstr_message;
@@ -490,9 +658,51 @@ static void throw_ioe(JNIEnv* env, int e
err:
if (jstr_message != NULL)
(*env)->ReleaseStringUTFChars(env, jstr_message, message);
-}
+#endif
+#ifdef WINDOWS
+ DWORD len = 0;
+ LPWSTR buffer = NULL;
+ const jchar* message = NULL;
+ jstring jstr_message = NULL;
+ jthrowable obj = NULL;
+
+ len = FormatMessageW(
+ FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
+ NULL, *(DWORD*) (&errnum), // reinterpret cast
+ MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
+ (LPWSTR) &buffer, 0, NULL);
+
+ if (len > 0)
+ {
+ message = (const jchar*) buffer;
+ }
+ else
+ {
+ message = (const jchar*) L"Unknown error.";
+ }
+
+ if ((jstr_message = (*env)->NewString(env, message, len)) == NULL)
+ goto err;
+ LocalFree(buffer);
+ buffer = NULL; // Set buffer to NULL to avoid double free
+ obj = (jthrowable)(*env)->NewObject(env, nioe_clazz, nioe_ctor,
+ jstr_message, errnum);
+ if (obj == NULL) goto err;
+
+ (*env)->Throw(env, obj);
+ return;
+
+err:
+ if (jstr_message != NULL)
+ (*env)->ReleaseStringChars(env, jstr_message, message);
+ LocalFree(buffer);
+ return;
+#endif
+}
+
+#ifdef UNIX
/*
* Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
*/
@@ -503,6 +713,104 @@ ssize_t get_pw_buflen() {
#endif
return (ret > 512) ? ret : 512;
}
+#endif
+
+
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method: getOwnerOnWindows
+ * Signature: (Ljava/io/FileDescriptor;)Ljava/lang/String;
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT jstring JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_getOwner
+ (JNIEnv *env, jclass clazz, jobject fd_object)
+{
+#ifdef UNIX
+ THROW(env, "java/io/IOException",
+ "The function Windows.getOwner() is not supported on Unix");
+ return NULL;
+#endif
+
+#ifdef WINDOWS
+ PSID pSidOwner = NULL;
+ PSECURITY_DESCRIPTOR pSD = NULL;
+ LPWSTR ownerName = (LPWSTR)NULL;
+ DWORD dwRtnCode = ERROR_SUCCESS;
+ jstring jstr_username = NULL;
+ HANDLE hFile = (HANDLE) fd_get(env, fd_object);
+ PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+ dwRtnCode = GetSecurityInfo(
+ hFile,
+ SE_FILE_OBJECT,
+ OWNER_SECURITY_INFORMATION,
+ &pSidOwner,
+ NULL,
+ NULL,
+ NULL,
+ &pSD);
+ if (dwRtnCode != ERROR_SUCCESS) {
+ throw_ioe(env, dwRtnCode);
+ goto cleanup;
+ }
+
+ dwRtnCode = GetAccntNameFromSid(pSidOwner, &ownerName);
+ if (dwRtnCode != ERROR_SUCCESS) {
+ throw_ioe(env, dwRtnCode);
+ goto cleanup;
+ }
+
+ jstr_username = (*env)->NewString(env, ownerName, (jsize) wcslen(ownerName));
+ if (jstr_username == NULL) goto cleanup;
+
+cleanup:
+ LocalFree(ownerName);
+ LocalFree(pSD);
+ return jstr_username;
+#endif
+}
+
+/*
+ * Class: org_apache_hadoop_io_nativeio_NativeIO_Windows
+ * Method: setFilePointer
+ * Signature: (Ljava/io/FileDescriptor;JJ)J
+ *
+ * The "00024" in the function name is an artifact of how JNI encodes
+ * special characters. U+0024 is '$'.
+ */
+JNIEXPORT jlong JNICALL
+Java_org_apache_hadoop_io_nativeio_NativeIO_00024Windows_setFilePointer
+ (JNIEnv *env, jclass clazz, jobject fd_object, jlong distanceToMove, jlong moveMethod)
+{
+#ifdef UNIX
+ THROW(env, "java/io/IOException",
+ "The function setFilePointer(FileDescriptor) is not supported on Unix");
+ return NULL;
+#endif
+
+#ifdef WINDOWS
+ DWORD distanceToMoveLow = (DWORD) distanceToMove;
+ LONG distanceToMoveHigh = (LONG) (distanceToMove >> 32);
+ DWORD distanceMovedLow = 0;
+ HANDLE hFile = (HANDLE) fd_get(env, fd_object);
+ PASS_EXCEPTIONS_GOTO(env, cleanup);
+
+ distanceMovedLow = SetFilePointer(hFile,
+ distanceToMoveLow, &distanceToMoveHigh, (DWORD) moveMethod);
+
+ if (distanceMovedLow == INVALID_SET_FILE_POINTER) {
+ throw_ioe(env, GetLastError());
+ return -1;
+ }
+
+cleanup:
+
+ return ((jlong) distanceToMoveHigh << 32) | (jlong) distanceMovedLow;
+#endif
+}
JNIEXPORT void JNICALL
Java_org_apache_hadoop_io_nativeio_NativeIO_renameTo0(JNIEnv *env,
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.c Wed Mar 6 19:15:18 2013
@@ -14,7 +14,7 @@
* See the License for the specific language governing permissions and
* limitations under the License.
*/
-
+
#include <jni.h>
#include "file_descriptor.h"
#include "org_apache_hadoop.h"
@@ -26,6 +26,10 @@ static jfieldID fd_descriptor;
// the no-argument constructor
static jmethodID fd_constructor;
+#ifdef WINDOWS
+// the internal field for the long handle
+static jfieldID fd_handle;
+#endif
void fd_init(JNIEnv* env)
{
@@ -37,6 +41,12 @@ void fd_init(JNIEnv* env)
fd_descriptor = (*env)->GetFieldID(env, fd_class, "fd", "I");
PASS_EXCEPTIONS(env);
+
+#ifdef WINDOWS
+ fd_handle = (*env)->GetFieldID(env, fd_class, "handle", "J");
+ PASS_EXCEPTIONS(env);
+#endif
+
fd_constructor = (*env)->GetMethodID(env, fd_class, "<init>", "()V");
}
@@ -46,9 +56,13 @@ void fd_deinit(JNIEnv *env) {
fd_class = NULL;
}
fd_descriptor = NULL;
+#ifdef WINDOWS
+ fd_handle = NULL;
+#endif
fd_constructor = NULL;
}
+#ifdef UNIX
/*
* Given an instance 'obj' of java.io.FileDescriptor, return the
* underlying fd, or throw if unavailable
@@ -71,4 +85,31 @@ jobject fd_create(JNIEnv *env, int fd) {
(*env)->SetIntField(env, obj, fd_descriptor, fd);
return obj;
-}
+}
+#endif
+
+#ifdef WINDOWS
+/*
+ * Given an instance 'obj' of java.io.FileDescriptor, return the
+ * underlying fd, or throw if unavailable
+ */
+long fd_get(JNIEnv* env, jobject obj) {
+ if (obj == NULL) {
+ THROW(env, "java/lang/NullPointerException",
+ "FileDescriptor object is null");
+ return -1;
+ }
+ return (long) (*env)->GetLongField(env, obj, fd_handle);
+}
+
+/*
+ * Create a FileDescriptor object corresponding to the given int fd
+ */
+jobject fd_create(JNIEnv *env, long fd) {
+ jobject obj = (*env)->NewObject(env, fd_class, fd_constructor);
+ PASS_EXCEPTIONS_RET(env, (jobject) NULL);
+
+ (*env)->SetLongField(env, obj, fd_handle, fd);
+ return obj;
+}
+#endif
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/file_descriptor.h Wed Mar 6 19:15:18 2013
@@ -18,11 +18,19 @@
#define FILE_DESCRIPTOR_H
#include <jni.h>
+#include "org_apache_hadoop.h"
void fd_init(JNIEnv *env);
void fd_deinit(JNIEnv *env);
+#ifdef UNIX
int fd_get(JNIEnv* env, jobject obj);
jobject fd_create(JNIEnv *env, int fd);
+#endif
+
+#ifdef WINDOWS
+long fd_get(JNIEnv* env, jobject obj);
+jobject fd_create(JNIEnv *env, long fd);
+#endif
#endif
Added: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c?rev=1453486&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c (added)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsMappingWin.c Wed Mar 6 19:15:18 2013
@@ -0,0 +1,131 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#include <jni.h>
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_security_JniBasedUnixGroupsMapping.h"
+
+#include <assert.h>
+#include <Windows.h>
+#include "winutils.h"
+
+static jobjectArray emptyGroups = NULL;
+
+/*
+ * Throw a java.IO.IOException, generating the message from errno.
+ */
+static void throw_ioexception(JNIEnv* env, DWORD errnum)
+{
+ DWORD len = 0;
+ LPSTR buffer = NULL;
+ const char* message = NULL;
+
+ len = FormatMessageA(
+ FORMAT_MESSAGE_ALLOCATE_BUFFER | FORMAT_MESSAGE_FROM_SYSTEM,
+ NULL, *(DWORD*) (&errnum), // reinterpret cast
+ MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
+ (LPSTR*)&buffer, 0, NULL);
+
+ if (len > 0)
+ {
+ message = buffer;
+ }
+ else
+ {
+ message = "Unknown error.";
+ }
+
+ THROW(env, "java/io/IOException", message);
+
+ LocalFree(buffer);
+
+ return;
+}
+
+JNIEXPORT jobjectArray JNICALL
+Java_org_apache_hadoop_security_JniBasedUnixGroupsMapping_getGroupForUser
+(JNIEnv *env, jobject jobj, jstring juser) {
+ const WCHAR *user = NULL;
+ jobjectArray jgroups = NULL;
+ DWORD dwRtnCode = ERROR_SUCCESS;
+
+ LPLOCALGROUP_USERS_INFO_0 groups = NULL;
+ LPLOCALGROUP_USERS_INFO_0 tmpGroups = NULL;
+ DWORD ngroups = 0;
+
+ int i;
+
+ if (emptyGroups == NULL) {
+ jobjectArray lEmptyGroups = (jobjectArray)(*env)->NewObjectArray(env, 0,
+ (*env)->FindClass(env, "java/lang/String"), NULL);
+ if (lEmptyGroups == NULL) {
+ goto cleanup;
+ }
+ emptyGroups = (*env)->NewGlobalRef(env, lEmptyGroups);
+ if (emptyGroups == NULL) {
+ goto cleanup;
+ }
+ }
+ user = (*env)->GetStringChars(env, juser, NULL);
+ if (user == NULL) {
+ THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for user buffer");
+ goto cleanup;
+ }
+
+ dwRtnCode = GetLocalGroupsForUser(user, &groups, &ngroups);
+ if (dwRtnCode != ERROR_SUCCESS) {
+ throw_ioexception(env, dwRtnCode);
+ goto cleanup;
+ }
+
+ jgroups = (jobjectArray)(*env)->NewObjectArray(env, ngroups,
+ (*env)->FindClass(env, "java/lang/String"), NULL);
+ if (jgroups == NULL) {
+ THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for group buffer");
+ goto cleanup;
+ }
+
+ // use a tmp pointer to iterate over groups and keep the original pointer
+ // for memory deallocation
+ tmpGroups = groups;
+
+ // fill the output string array
+ for (i = 0; i < ngroups; i++) {
+ jsize groupStringLen = (jsize)wcslen(tmpGroups->lgrui0_name);
+ jstring jgrp = (*env)->NewString(env, tmpGroups->lgrui0_name, groupStringLen);
+ if (jgrp == NULL) {
+ THROW(env, "java/lang/OutOfMemoryError", "Couldn't allocate memory for groups buffer");
+ goto cleanup;
+ }
+ (*env)->SetObjectArrayElement(env, jgroups, i, jgrp);
+ // move on to the next group
+ tmpGroups++;
+ }
+
+cleanup:
+ if (groups != NULL) NetApiBufferFree(groups);
+
+ if (user != NULL) {
+ (*env)->ReleaseStringChars(env, juser, user);
+ }
+
+ if (dwRtnCode == ERROR_SUCCESS) {
+ return jgroups;
+ } else {
+ return emptyGroups;
+ }
+}
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCodeLoader.c Wed Mar 6 19:15:18 2013
@@ -16,7 +16,11 @@
* limitations under the License.
*/
+#include "org_apache_hadoop.h"
+
+#ifdef UNIX
#include "config.h"
+#endif // UNIX
#include <jni.h>
@@ -28,4 +32,4 @@ JNIEXPORT jboolean JNICALL Java_org_apac
#else
return JNI_FALSE;
#endif
-}
+}
\ No newline at end of file
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c Wed Mar 6 19:15:18 2013
@@ -16,18 +16,22 @@
* limitations under the License.
*/
-#include <arpa/inet.h>
+#include "org_apache_hadoop.h"
+#include "org_apache_hadoop_util_NativeCrc32.h"
+
#include <assert.h>
-#include <inttypes.h>
#include <stdlib.h>
#include <stdint.h>
#include <string.h>
-#include <unistd.h>
+#ifdef UNIX
+#include <inttypes.h>
+#include <arpa/inet.h>
+#include <unistd.h>
#include "config.h"
-#include "org_apache_hadoop.h"
-#include "org_apache_hadoop_util_NativeCrc32.h"
#include "gcc_optimizations.h"
+#endif // UNIX
+
#include "bulk_crc32.h"
static void throw_checksum_exception(JNIEnv *env,
@@ -36,6 +40,9 @@ static void throw_checksum_exception(JNI
char message[1024];
jstring jstr_message;
char *filename;
+ jclass checksum_exception_clazz;
+ jmethodID checksum_exception_ctor;
+ jthrowable obj;
// Get filename as C string, or "null" if not provided
if (j_filename == NULL) {
@@ -50,28 +57,38 @@ static void throw_checksum_exception(JNI
}
// Format error message
+#ifdef WINDOWS
+ _snprintf_s(
+ message,
+ sizeof(message),
+ _TRUNCATE,
+ "Checksum error: %s at %I64d exp: %d got: %d",
+ filename, pos, expected_crc, got_crc);
+#else
snprintf(message, sizeof(message),
"Checksum error: %s at %"PRId64" exp: %"PRId32" got: %"PRId32,
filename, pos, expected_crc, got_crc);
+#endif // WINDOWS
+
if ((jstr_message = (*env)->NewStringUTF(env, message)) == NULL) {
goto cleanup;
}
// Throw exception
- jclass checksum_exception_clazz = (*env)->FindClass(
+ checksum_exception_clazz = (*env)->FindClass(
env, "org/apache/hadoop/fs/ChecksumException");
if (checksum_exception_clazz == NULL) {
goto cleanup;
}
- jmethodID checksum_exception_ctor = (*env)->GetMethodID(env,
+ checksum_exception_ctor = (*env)->GetMethodID(env,
checksum_exception_clazz, "<init>",
"(Ljava/lang/String;J)V");
if (checksum_exception_ctor == NULL) {
goto cleanup;
}
- jthrowable obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
+ obj = (jthrowable)(*env)->NewObject(env, checksum_exception_clazz,
checksum_exception_ctor, jstr_message, pos);
if (obj == NULL) goto cleanup;
@@ -103,6 +120,14 @@ JNIEXPORT void JNICALL Java_org_apache_h
jobject j_data, jint data_offset, jint data_len,
jstring j_filename, jlong base_pos)
{
+ uint8_t *sums_addr;
+ uint8_t *data_addr;
+ uint32_t *sums;
+ uint8_t *data;
+ int crc_type;
+ crc32_error_t error_data;
+ int ret;
+
if (unlikely(!j_sums || !j_data)) {
THROW(env, "java/lang/NullPointerException",
"input ByteBuffers must not be null");
@@ -110,8 +135,8 @@ JNIEXPORT void JNICALL Java_org_apache_h
}
// Convert direct byte buffers to C pointers
- uint8_t *sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
- uint8_t *data_addr = (*env)->GetDirectBufferAddress(env, j_data);
+ sums_addr = (*env)->GetDirectBufferAddress(env, j_sums);
+ data_addr = (*env)->GetDirectBufferAddress(env, j_data);
if (unlikely(!sums_addr || !data_addr)) {
THROW(env, "java/lang/IllegalArgumentException",
@@ -129,16 +154,15 @@ JNIEXPORT void JNICALL Java_org_apache_h
return;
}
- uint32_t *sums = (uint32_t *)(sums_addr + sums_offset);
- uint8_t *data = data_addr + data_offset;
+ sums = (uint32_t *)(sums_addr + sums_offset);
+ data = data_addr + data_offset;
// Convert to correct internal C constant for CRC type
- int crc_type = convert_java_crc_type(env, j_crc_type);
+ crc_type = convert_java_crc_type(env, j_crc_type);
if (crc_type == -1) return; // exception already thrown
// Setup complete. Actually verify checksums.
- crc32_error_t error_data;
- int ret = bulk_verify_crc(data, data_len, sums, crc_type,
+ ret = bulk_verify_crc(data, data_len, sums, crc_type,
bytes_per_checksum, &error_data);
if (likely(ret == CHECKSUMS_VALID)) {
return;
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c Wed Mar 6 19:15:18 2013
@@ -21,25 +21,31 @@
* All rights reserved. Use of this source code is governed by a
* BSD-style license that can be found in the LICENSE file.
*/
+
+#include "org_apache_hadoop.h"
+
#include <assert.h>
-#include <arpa/inet.h>
#include <errno.h>
#include <stdint.h>
+
+#ifdef UNIX
+#include <arpa/inet.h>
#include <unistd.h>
+#endif // UNIX
#include "crc32_zlib_polynomial_tables.h"
#include "crc32c_tables.h"
#include "bulk_crc32.h"
#include "gcc_optimizations.h"
-#ifndef __FreeBSD__
+#if (!defined(__FreeBSD__) && !defined(WINDOWS))
#define USE_PIPELINED
#endif
#define CRC_INITIAL_VAL 0xffffffff
typedef uint32_t (*crc_update_func_t)(uint32_t, const uint8_t *, size_t);
-static inline uint32_t crc_val(uint32_t crc);
+static uint32_t crc_val(uint32_t crc);
static uint32_t crc32_zlib_sb8(uint32_t crc, const uint8_t *buf, size_t length);
static uint32_t crc32c_sb8(uint32_t crc, const uint8_t *buf, size_t length);
@@ -187,7 +193,7 @@ return_crc_error:
/**
* Extract the final result of a CRC
*/
-static inline uint32_t crc_val(uint32_t crc) {
+uint32_t crc_val(uint32_t crc) {
return ~crc;
}
@@ -200,11 +206,13 @@ static uint32_t crc32c_sb8(uint32_t crc,
uint32_t end_bytes = length - running_length;
int li;
for (li=0; li < running_length/8; li++) {
+ uint32_t term1;
+ uint32_t term2;
crc ^= *(uint32_t *)buf;
buf += 4;
- uint32_t term1 = CRC32C_T8_7[crc & 0x000000FF] ^
+ term1 = CRC32C_T8_7[crc & 0x000000FF] ^
CRC32C_T8_6[(crc >> 8) & 0x000000FF];
- uint32_t term2 = crc >> 16;
+ term2 = crc >> 16;
crc = term1 ^
CRC32C_T8_5[term2 & 0x000000FF] ^
CRC32C_T8_4[(term2 >> 8) & 0x000000FF];
@@ -234,11 +242,13 @@ static uint32_t crc32_zlib_sb8(
uint32_t end_bytes = length - running_length;
int li;
for (li=0; li < running_length/8; li++) {
+ uint32_t term1;
+ uint32_t term2;
crc ^= *(uint32_t *)buf;
buf += 4;
- uint32_t term1 = CRC32_T8_7[crc & 0x000000FF] ^
+ term1 = CRC32_T8_7[crc & 0x000000FF] ^
CRC32_T8_6[(crc >> 8) & 0x000000FF];
- uint32_t term2 = crc >> 16;
+ term2 = crc >> 16;
crc = term1 ^
CRC32_T8_5[term2 & 0x000000FF] ^
CRC32_T8_4[(term2 >> 8) & 0x000000FF];
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h Wed Mar 6 19:15:18 2013
@@ -19,7 +19,10 @@
#define BULK_CRC32_H_INCLUDED
#include <stdint.h>
+
+#ifdef UNIX
#include <unistd.h> /* for size_t */
+#endif // UNIX
// Constants for different CRC algorithms
#define CRC32C_POLYNOMIAL 1
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org_apache_hadoop.h Wed Mar 6 19:15:18 2013
@@ -17,19 +17,22 @@
*/
/**
- * This file includes some common utilities
+ * This file includes some common utilities
* for all native code used in hadoop.
*/
-
+
#if !defined ORG_APACHE_HADOOP_H
#define ORG_APACHE_HADOOP_H
-#include <dlfcn.h>
-#include <jni.h>
-
-#include "config.h"
+#if defined(_WIN32)
+#undef UNIX
+#define WINDOWS
+#else
+#undef WINDOWS
+#define UNIX
+#endif
-/* A helper macro to 'throw' a java exception. */
+/* A helper macro to 'throw' a java exception. */
#define THROW(env, exception_name, message) \
{ \
jclass ecls = (*env)->FindClass(env, exception_name); \
@@ -55,13 +58,21 @@
if ((*env)->ExceptionCheck(env)) return (ret); \
}
-/**
- * A helper function to dlsym a 'symbol' from a given library-handle.
- *
+/**
+ * Unix definitions
+ */
+#ifdef UNIX
+#include <config.h>
+#include <dlfcn.h>
+#include <jni.h>
+
+/**
+ * A helper function to dlsym a 'symbol' from a given library-handle.
+ *
* @param env jni handle to report contingencies.
* @param handle handle to the dlopen'ed library.
* @param symbol symbol to load.
- * @return returns the address where the symbol is loaded in memory,
+ * @return returns the address where the symbol is loaded in memory,
* <code>NULL</code> on error.
*/
static __attribute__ ((unused))
@@ -84,6 +95,76 @@ void *do_dlsym(JNIEnv *env, void *handle
if ((func_ptr = do_dlsym(env, handle, symbol)) == NULL) { \
return; \
}
+#endif
+// Unix part end
+
+
+/**
+ * Windows definitions
+ */
+#ifdef WINDOWS
+
+/* Force using Unicode throughout the code */
+#ifndef UNICODE
+#define UNICODE
+#endif
+
+/* Microsoft C Compiler does not support the C99 inline keyword */
+#ifndef __cplusplus
+#define inline __inline;
+#endif // _cplusplus
+
+/* Optimization macros supported by GCC but for which there is no
+ direct equivalent in the Microsoft C compiler */
+#define likely(_c) (_c)
+#define unlikely(_c) (_c)
+
+/* Disable certain warnings in the native CRC32 code. */
+#pragma warning(disable:4018) // Signed/unsigned mismatch.
+#pragma warning(disable:4244) // Possible loss of data in conversion.
+#pragma warning(disable:4267) // Possible loss of data.
+#pragma warning(disable:4996) // Use of deprecated function.
+
+#include <Windows.h>
+#include <stdio.h>
+#include <jni.h>
+
+#define snprintf(a, b ,c, d) _snprintf_s((a), (b), _TRUNCATE, (c), (d))
+
+/* A helper macro to dlsym the requisite dynamic symbol and bail-out on error. */
+#define LOAD_DYNAMIC_SYMBOL(func_type, func_ptr, env, handle, symbol) \
+ if ((func_ptr = (func_type) do_dlsym(env, handle, symbol)) == NULL) { \
+ return; \
+ }
+
+/**
+ * A helper function to dynamic load a 'symbol' from a given library-handle.
+ *
+ * @param env jni handle to report contingencies.
+ * @param handle handle to the dynamic library.
+ * @param symbol symbol to load.
+ * @return returns the address where the symbol is loaded in memory,
+ * <code>NULL</code> on error.
+ */
+static FARPROC WINAPI do_dlsym(JNIEnv *env, HMODULE handle, LPCSTR symbol) {
+ DWORD dwErrorCode = ERROR_SUCCESS;
+ FARPROC func_ptr = NULL;
+
+ if (!env || !handle || !symbol) {
+ THROW(env, "java/lang/InternalError", NULL);
+ return NULL;
+ }
+
+ func_ptr = GetProcAddress(handle, symbol);
+ if (func_ptr == NULL)
+ {
+ THROW(env, "java/lang/UnsatisfiedLinkError", symbol);
+ }
+ return func_ptr;
+}
+#endif
+// Windows part end
+
#define LOCK_CLASS(env, clazz, classname) \
if ((*env)->MonitorEnter(env, clazz) != 0) { \
Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c?rev=1453486&r1=1453485&r2=1453486&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/test/org/apache/hadoop/util/test_bulk_crc32.c Wed Mar 6 19:15:18 2013
@@ -16,6 +16,8 @@
* limitations under the License.
*/
+#include "org_apache_hadoop.h"
+
#include "bulk_crc32.h"
#include <stdint.h>