hadoop-common-commits mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From bo...@apache.org
Subject svn commit: r1401321 - in /hadoop/common/trunk/hadoop-common-project/hadoop-common: ./ src/ src/main/java/org/apache/hadoop/fs/ src/main/native/src/org/apache/hadoop/io/nativeio/ src/main/native/src/org/apache/hadoop/security/ src/main/native/src/org/a...
Date Tue, 23 Oct 2012 15:32:26 GMT
Author: bobby
Date: Tue Oct 23 15:32:25 2012
New Revision: 1401321

URL: http://svn.apache.org/viewvc?rev=1401321&view=rev
Log:
HADOOP-8811. Compile hadoop native library in FreeBSD (Radim Kolar via bobby)

Modified:
    hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
    hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt?rev=1401321&r1=1401320&r2=1401321&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/CHANGES.txt Tue Oct 23 15:32:25
2012
@@ -1094,6 +1094,9 @@ Release 0.23.5 - UNRELEASED
     HADOOP-8906. paths with multiple globs are unreliable. (Daryn Sharp via
     jlowe)
 
+    HADOOP-8811. Compile hadoop native library in FreeBSD (Radim Kolar via
+    bobby)
+
 Release 0.23.4 - UNRELEASED
 
   INCOMPATIBLE CHANGES

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt?rev=1401321&r1=1401320&r2=1401321&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt (original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/CMakeLists.txt Tue Oct 23
15:32:25 2012
@@ -67,6 +67,9 @@ macro(set_find_shared_library_version LV
     IF(${CMAKE_SYSTEM_NAME} MATCHES "Darwin")
         # Mac OS uses .dylib
         SET(CMAKE_FIND_LIBRARY_SUFFIXES ".${LVERS}.dylib")
+    ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "FreeBSD")
+        # FreeBSD has always .so installed.
+        SET(CMAKE_FIND_LIBRARY_SUFFIXES ".so")
     ELSEIF(${CMAKE_SYSTEM_NAME} MATCHES "Windows")
         # Windows doesn't support finding shared libraries by version.
     ELSE()
@@ -95,8 +98,10 @@ GET_FILENAME_COMPONENT(HADOOP_ZLIB_LIBRA
 
 INCLUDE(CheckFunctionExists)
 INCLUDE(CheckCSourceCompiles)
+INCLUDE(CheckLibraryExists)
 CHECK_FUNCTION_EXISTS(sync_file_range HAVE_SYNC_FILE_RANGE)
 CHECK_FUNCTION_EXISTS(posix_fadvise HAVE_POSIX_FADVISE)
+CHECK_LIBRARY_EXISTS(dl dlopen "" NEED_LINK_DL)
 
 SET(STORED_CMAKE_FIND_LIBRARY_SUFFIXES CMAKE_FIND_LIBRARY_SUFFIXES)
 set_find_shared_library_version("1")
@@ -159,6 +164,9 @@ add_dual_library(hadoop
     ${D}/util/NativeCrc32.c
     ${D}/util/bulk_crc32.c
 )
+if (NEED_LINK_DL)
+   set(LIB_DL dl)
+endif (NEED_LINK_DL)
 
 IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux")
     #
@@ -171,7 +179,7 @@ IF (${CMAKE_SYSTEM_NAME} MATCHES "Linux"
 ENDIF()
 
 target_link_dual_libraries(hadoop
-    dl
+    ${LIB_DL}
     ${JAVA_JVM_LIBRARY}
 )
 SET(LIBHADOOP_VERSION "1.0.0")

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java?rev=1401321&r1=1401320&r2=1401321&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
Tue Oct 23 15:32:25 2012
@@ -43,7 +43,8 @@ public class HardLink { 
     OS_TYPE_UNIX,
     OS_TYPE_WINXP,
     OS_TYPE_SOLARIS,
-    OS_TYPE_MAC
+    OS_TYPE_MAC,
+    OS_TYPE_FREEBSD
   }
   
   public static OSType osType;
@@ -63,7 +64,7 @@ public class HardLink { 
       getHardLinkCommand = new HardLinkCGUnix();
       //override getLinkCountCommand for the particular Unix variant
       //Linux is already set as the default - {"stat","-c%h", null}
-      if (osType == OSType.OS_TYPE_MAC) {
+      if (osType == OSType.OS_TYPE_MAC || osType == OSType.OS_TYPE_FREEBSD) {
         String[] linkCountCmdTemplate = {"/usr/bin/stat","-f%l", null};
         HardLinkCGUnix.setLinkCountCmdTemplate(linkCountCmdTemplate);
       } else if (osType == OSType.OS_TYPE_SOLARIS) {
@@ -95,6 +96,9 @@ public class HardLink { 
     else if (osName.contains("Mac")) {
        return OSType.OS_TYPE_MAC;
     }
+    else if (osName.contains("FreeBSD")) {
+       return OSType.OS_TYPE_FREEBSD;
+    }
     else {
       return OSType.OS_TYPE_UNIX;
     }

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c?rev=1401321&r1=1401320&r2=1401321&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/nativeio/NativeIO.c
Tue Oct 23 15:32:25 2012
@@ -254,7 +254,11 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 
   int err = 0;
   if ((err = posix_fadvise(fd, (off_t)offset, (off_t)len, flags))) {
+#ifdef __FreeBSD__
+    throw_ioe(env, errno);
+#else
     throw_ioe(env, err);
+#endif
   }
 #endif
 }
@@ -310,6 +314,22 @@ Java_org_apache_hadoop_io_nativeio_Nativ
 #endif
 }
 
+#ifdef __FreeBSD__
+static int toFreeBSDFlags(int flags)
+{
+  int rc = flags & 03;
+  if ( flags &  0100 ) rc |= O_CREAT;
+  if ( flags &  0200 ) rc |= O_EXCL;
+  if ( flags &  0400 ) rc |= O_NOCTTY;
+  if ( flags & 01000 ) rc |= O_TRUNC;
+  if ( flags & 02000 ) rc |= O_APPEND;
+  if ( flags & 04000 ) rc |= O_NONBLOCK;
+  if ( flags &010000 ) rc |= O_SYNC;
+  if ( flags &020000 ) rc |= O_ASYNC;
+  return rc;
+}
+#endif
+
 /*
  * public static native FileDescriptor open(String path, int flags, int mode);
  */
@@ -318,6 +338,9 @@ Java_org_apache_hadoop_io_nativeio_Nativ
   JNIEnv *env, jclass clazz, jstring j_path,
   jint flags, jint mode)
 {
+#ifdef __FreeBSD__
+  flags = toFreeBSDFlags(flags);
+#endif
   jobject ret = NULL;
 
   const char *path = (*env)->GetStringUTFChars(env, j_path, NULL);
@@ -399,7 +422,7 @@ err:
  * Determine how big a buffer we need for reentrant getpwuid_r and getgrnam_r
  */
 ssize_t get_pw_buflen() {
-  size_t ret = 0;
+  long ret = 0;
   #ifdef _SC_GETPW_R_SIZE_MAX
   ret = sysconf(_SC_GETPW_R_SIZE_MAX);
   #endif

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c?rev=1401321&r1=1401320&r2=1401321&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
Tue Oct 23 15:32:25 2012
@@ -46,6 +46,7 @@ JNIEXPORT jobjectArray JNICALL 
 Java_org_apache_hadoop_security_JniBasedUnixGroupsNetgroupMapping_getUsersForNetgroupJNI
 (JNIEnv *env, jobject jobj, jstring jgroup) {
   UserList *userListHead = NULL;
+  UserList *current = NULL;
   int       userListSize = 0;
 
   // pointers to free at the end
@@ -72,8 +73,10 @@ Java_org_apache_hadoop_security_JniBased
   // was successful or not (as long as it was called we need to call
   // endnetgrent)
   setnetgrentCalledFlag = 1;
+#ifndef __FreeBSD__
   if(setnetgrent(cgroup) == 1) {
-    UserList *current = NULL;
+#endif
+    current = NULL;
     // three pointers are for host, user, domain, we only care
     // about user now
     char *p[3];
@@ -87,7 +90,9 @@ Java_org_apache_hadoop_security_JniBased
         userListSize++;
       }
     }
+#ifndef __FreeBSD__
   }
+#endif
 
   //--------------------------------------------------
   // build return data (java array)
@@ -101,7 +106,7 @@ Java_org_apache_hadoop_security_JniBased
     goto END;
   }
 
-  UserList * current = NULL;
+  current = NULL;
 
   // note that the loop iterates over list but also over array (i)
   int i = 0;

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c?rev=1401321&r1=1401320&r2=1401321&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/security/getGroup.c
Tue Oct 23 15:32:25 2012
@@ -78,7 +78,7 @@ int getGroupIDList(const char *user, int
  */
 int getGroupDetails(gid_t group, char **grpBuf) {
   struct group * grp = NULL;
-  size_t currBufferSize = sysconf(_SC_GETGR_R_SIZE_MAX);
+  long currBufferSize = sysconf(_SC_GETGR_R_SIZE_MAX);
   if (currBufferSize < 1024) {
     currBufferSize = 1024;
   }
@@ -123,7 +123,7 @@ int getGroupDetails(gid_t group, char **
  */
 int getPW(const char *user, char **pwbuf) {
   struct passwd *pwbufp = NULL;
-  size_t currBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX);
+  long currBufferSize = sysconf(_SC_GETPW_R_SIZE_MAX);
   if (currBufferSize < 1024) {
     currBufferSize = 1024;
   }

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c?rev=1401321&r1=1401320&r2=1401321&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
Tue Oct 23 15:32:25 2012
@@ -32,7 +32,9 @@
 #include "bulk_crc32.h"
 #include "gcc_optimizations.h"
 
+#ifndef __FreeBSD__
 #define USE_PIPELINED
+#endif
 
 #define CRC_INITIAL_VAL 0xffffffff
 
@@ -260,7 +262,7 @@ static uint32_t crc32_zlib_sb8(
 // Begin code for SSE4.2 specific hardware support of CRC32C
 ///////////////////////////////////////////////////////////////////////////
 
-#if (defined(__amd64__) || defined(__i386)) && defined(__GNUC__)
+#if (defined(__amd64__) || defined(__i386)) && defined(__GNUC__) && !defined(__FreeBSD__)
 #  define SSE42_FEATURE_BIT (1 << 20)
 #  define CPUID_FEATURES 1
 /**

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java?rev=1401321&r1=1401320&r2=1401321&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestHardLink.java
Tue Oct 23 15:32:25 2012
@@ -364,8 +364,12 @@ public class TestHardLink {
     callCount = createHardLinkMult(src, fileNames, tgt_mult, maxLength);
     //check the request was completed in exactly two "chunks"
     assertEquals(2, callCount);
+    String[] tgt_multNames = tgt_mult.list();
+    //sort directory listings before comparsion
+    Arrays.sort(fileNames);
+    Arrays.sort(tgt_multNames);
     //and check the results were as expected in the dir tree
-    assertTrue(Arrays.deepEquals(fileNames, tgt_mult.list()));
+    assertArrayEquals(fileNames, tgt_multNames);
     
     //Test the case where maxlength is too small even for one filename.
     //It should go ahead and try the single files.
@@ -382,8 +386,12 @@ public class TestHardLink {
         maxLength);
     //should go ahead with each of the three single file names
     assertEquals(3, callCount);
-    //check the results were as expected in the dir tree
-    assertTrue(Arrays.deepEquals(fileNames, tgt_mult.list()));
+    tgt_multNames = tgt_mult.list();
+    //sort directory listings before comparsion
+    Arrays.sort(fileNames);
+    Arrays.sort(tgt_multNames);
+    //and check the results were as expected in the dir tree
+    assertArrayEquals(fileNames, tgt_multNames);
   }
   
   /*

Modified: hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java?rev=1401321&r1=1401320&r2=1401321&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
(original)
+++ hadoop/common/trunk/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java
Tue Oct 23 15:32:25 2012
@@ -224,7 +224,10 @@ public class TestNativeIO {
       // we should just skip the unit test on machines where we don't
       // have fadvise support
       assumeTrue(false);
-    } finally {
+    } catch (NativeIOException nioe) {
+      // ignore this error as FreeBSD returns EBADF even if length is zero
+    }
+      finally {
       fis.close();
     }
 



Mime
View raw message