summaryrefslogtreecommitdiff
path: root/devel/hadoop/files
diff options
context:
space:
mode:
Diffstat (limited to 'devel/hadoop/files')
-rw-r--r--devel/hadoop/files/datanode.in72
-rw-r--r--devel/hadoop/files/hadoop.in7
-rw-r--r--devel/hadoop/files/jobtracker.in72
-rw-r--r--devel/hadoop/files/namenode.in72
-rw-r--r--devel/hadoop/files/patch-build.xml80
-rw-r--r--devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c11
-rw-r--r--devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c11
-rw-r--r--devel/hadoop/files/patch-src__native__Makefile.in105
-rw-r--r--devel/hadoop/files/patch-src__native__configure11
-rw-r--r--devel/hadoop/files/patch-src__native__configure.ac13
-rw-r--r--devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c11
-rw-r--r--devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c12
-rw-r--r--devel/hadoop/files/pkg-deinstall.in14
-rw-r--r--devel/hadoop/files/pkg-install.in55
-rw-r--r--devel/hadoop/files/secondarynamenode.in72
-rw-r--r--devel/hadoop/files/tasktracker.in72
16 files changed, 690 insertions, 0 deletions
diff --git a/devel/hadoop/files/datanode.in b/devel/hadoop/files/datanode.in
new file mode 100644
index 000000000000..402ecc228531
--- /dev/null
+++ b/devel/hadoop/files/datanode.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: datanode
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# datanode_enable (bool): Set to NO by default.
+# Set it to YES to enable datanode.
+# datanode_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# datanode user.
+# datanode_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# datanode_log_dir (str): Unset by default.
+# datanode_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=datanode
+rcvar=datanode_enable
+
+load_rc_config "${name}"
+
+: ${datanode_enable:=NO}
+: ${datanode_user:=%%HADOOP_USER%%}
+: ${datanode_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start datanode'
+
+start_precmd=datanode_init
+stop_precmd=datanode_init
+stop_cmd=datanode_stop
+
+
+datanode_init()
+{
+ if [ -n "${datanode_java_home}" ]
+ then
+ export JAVA_HOME="${datanode_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${datanode_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${datanode_log_dir}
+ fi
+
+ if [ -n "${datanode_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${datanode_pid_dir}
+ fi
+
+ install -d -m 755 -o ${datanode_user} -g ${datanode_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${datanode_user} -g ${datanode_group} ${HADOOP_LOG_DIR}
+}
+
+datanode_stop ()
+{
+ su -m ${datanode_user} -c "${command} --config ${HADOOP_CONF_DIR} stop datanode"
+}
+
+run_rc_command "$1"
diff --git a/devel/hadoop/files/hadoop.in b/devel/hadoop/files/hadoop.in
new file mode 100644
index 000000000000..269b9821332b
--- /dev/null
+++ b/devel/hadoop/files/hadoop.in
@@ -0,0 +1,7 @@
+#!/usr/bin/env bash
+for i in `ls %%HADOOP_ETC%%/envvars.d/*.env`
+do
+ . ${i}
+done
+
+exec %%HADOOP_HOME%%/bin/hadoop $@
diff --git a/devel/hadoop/files/jobtracker.in b/devel/hadoop/files/jobtracker.in
new file mode 100644
index 000000000000..20313e8dd661
--- /dev/null
+++ b/devel/hadoop/files/jobtracker.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: jobtracker
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# jobtracker_enable (bool): Set to NO by default.
+# Set it to YES to enable jobtracker.
+# jobtracker_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# jobtracker user.
+# jobtracker_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# jobtracker_log_dir (str): Unset by default.
+# jobtracker_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=jobtracker
+rcvar=jobtracker_enable
+
+load_rc_config "${name}"
+
+: ${jobtracker_enable:=NO}
+: ${jobtracker_user:=%%HADOOP_USER%%}
+: ${jobtracker_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start jobtracker'
+
+start_precmd=jobtracker_init
+stop_precmd=jobtracker_init
+stop_cmd=jobtracker_stop
+
+
+jobtracker_init()
+{
+ if [ -n "${jobtracker_java_home}" ]
+ then
+ export JAVA_HOME="${jobtracker_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${jobtracker_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${jobtracker_log_dir}
+ fi
+
+ if [ -n "${jobtracker_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${jobtracker_pid_dir}
+ fi
+
+ install -d -m 755 -o ${jobtracker_user} -g ${jobtracker_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${jobtracker_user} -g ${jobtracker_group} ${HADOOP_LOG_DIR}
+}
+
+jobtracker_stop ()
+{
+ su -m ${jobtracker_user} -c "${command} --config ${HADOOP_CONF_DIR} stop jobtracker"
+}
+
+run_rc_command "$1"
diff --git a/devel/hadoop/files/namenode.in b/devel/hadoop/files/namenode.in
new file mode 100644
index 000000000000..dc48bccc50b2
--- /dev/null
+++ b/devel/hadoop/files/namenode.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: namenode
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# namenode_enable (bool): Set to NO by default.
+# Set it to YES to enable namenode.
+# namenode_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# namenode user.
+# namenode_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# namenode_log_dir (str): Unset by default.
+# namenode_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=namenode
+rcvar=namenode_enable
+
+load_rc_config "${name}"
+
+: ${namenode_enable:=NO}
+: ${namenode_user:=%%HADOOP_USER%%}
+: ${namenode_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start namenode'
+
+start_precmd=namenode_init
+stop_precmd=namenode_init
+stop_cmd=namenode_stop
+
+
+namenode_init()
+{
+ if [ -n "${namenode_java_home}" ]
+ then
+ export JAVA_HOME="${namenode_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${namenode_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${namenode_log_dir}
+ fi
+
+ if [ -n "${namenode_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${namenode_pid_dir}
+ fi
+
+ install -d -m 755 -o ${namenode_user} -g ${namenode_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${namenode_user} -g ${namenode_group} ${HADOOP_LOG_DIR}
+}
+
+namenode_stop ()
+{
+ su -m ${namenode_user} -c "${command} --config ${HADOOP_CONF_DIR} stop namenode"
+}
+
+run_rc_command "$1"
diff --git a/devel/hadoop/files/patch-build.xml b/devel/hadoop/files/patch-build.xml
new file mode 100644
index 000000000000..ef61c7ce5845
--- /dev/null
+++ b/devel/hadoop/files/patch-build.xml
@@ -0,0 +1,80 @@
+--- build.xml.orig 2011-05-04 08:30:16.000000000 +0200
++++ build.xml 2011-08-08 00:08:22.000000000 +0200
+@@ -372,7 +372,7 @@
+ <!-- ====================================================== -->
+ <!-- Compile the Java files -->
+ <!-- ====================================================== -->
+- <target name="record-parser" depends="init" if="javacc.home">
++ <target name="record-parser" if="javacc.home">
+ <javacc
+ target="${core.src.dir}/org/apache/hadoop/record/compiler/generated/rcc.jj"
+ outputdirectory="${core.src.dir}/org/apache/hadoop/record/compiler/generated"
+@@ -539,7 +539,7 @@
+ </antcall>
+ </target>
+
+- <target name="compile-core-native" depends="compile-core-classes"
++ <target name="compile-core-native"
+ if="compile.native">
+
+ <mkdir dir="${build.native}/lib"/>
+@@ -1669,7 +1669,7 @@
+ <!-- librecordio targets. -->
+ <!-- ================================================================== -->
+
+- <target name="compile-librecordio" depends="init" if="librecordio" >
++ <target name="compile-librecordio" if="librecordio" >
+ <mkdir dir="${build.librecordio}"/>
+ <exec dir="${librecordio.src}" executable="${make.cmd}" failonerror="true">
+ <env key="XERCESCROOT" value="${xercescroot}"/>
+@@ -1703,7 +1703,7 @@
+ </chmod>
+ </target>
+
+- <target name="create-c++-configure" depends="init" if="compile.c++">
++ <target name="create-c++-configure" if="compile.c++">
+ <exec executable="autoreconf" dir="${c++.utils.src}" searchpath="yes"
+ failonerror="yes">
+ <arg value="-if"/>
+@@ -1726,7 +1726,7 @@
+ </exec>
+ </target>
+
+- <target name="check-c++-makefiles" depends="init" if="compile.c++">
++ <target name="check-c++-makefiles" if="compile.c++">
+ <condition property="need.c++.utils.makefile">
+ <not> <available file="${build.c++.utils}/Makefile"/> </not>
+ </condition>
+@@ -1747,7 +1747,7 @@
+ </condition>
+ </target>
+
+- <target name="check-c++-makefile-libhdfs" depends="init,check-c++-libhdfs" if="islibhdfs">
++ <target name="check-c++-makefile-libhdfs" depends="check-c++-libhdfs" if="islibhdfs">
+ <condition property="need.c++.libhdfs.makefile">
+ <not> <available file="${build.c++.libhdfs}/Makefile"/> </not>
+ </condition>
+@@ -2326,5 +2326,23 @@
+ <fileset file="${jsvc.install.dir}/jsvc"/>
+ </chmod>
+ </target>
++ <target name="FreeBSD-dist" >
++ <mkdir dir="${dist.dir}"/>
++ <mkdir dir="${dist.dir}/lib"/>
++ <mkdir dir="${dist.dir}/contrib"/>
++ <mkdir dir="${dist.dir}/bin"/>
++
++ <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
++ <fileset dir="lib">
++ <exclude name="**/native/**"/>
++ </fileset>
++ </copy>
+
++ <exec dir="${basedir}" executable="sh" failonerror="true">
++ <env key="BASE_NATIVE_LIB_DIR" value="${basedir}/lib/native"/>
++ <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
++ <env key="DIST_LIB_DIR" value="${basedir}/lib/native"/>
++ <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
++ </exec>
++</target>
+ </project>
diff --git a/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c b/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c
new file mode 100644
index 000000000000..7ea39e3471af
--- /dev/null
+++ b/devel/hadoop/files/patch-src__c++__libhdfs__hdfs.c
@@ -0,0 +1,11 @@
+--- src/c++/libhdfs/hdfs.c.orig 2011-08-07 16:38:59.000000000 +0200
++++ src/c++/libhdfs/hdfs.c 2011-08-07 16:39:18.000000000 +0200
+@@ -252,7 +252,7 @@
+ cURI = malloc(strlen(host)+16);
+ sprintf(cURI, "hdfs://%s:%d", host, (int)(port));
+ if (cURI == NULL) {
+- fprintf (stderr, "Couldn't allocate an object of size %d",
++ fprintf (stderr, "Couldn't allocate an object of size %llu",
+ strlen(host) + 16);
+ errno = EINTERNAL;
+ goto done;
diff --git a/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c b/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c
new file mode 100644
index 000000000000..ea2ff1a40923
--- /dev/null
+++ b/devel/hadoop/files/patch-src__c++__libhdfs__hdfsJniHelper.c
@@ -0,0 +1,11 @@
+--- src/c++/libhdfs/hdfsJniHelper.c.orig 2011-05-04 08:30:16.000000000 +0200
++++ src/c++/libhdfs/hdfsJniHelper.c 2011-08-07 16:40:54.000000000 +0200
+@@ -15,7 +15,7 @@
+ */
+
+ #include <string.h>
+-#include <error.h>
++//#include <error.h>
+ #include "hdfsJniHelper.h"
+
+ static pthread_mutex_t hdfsHashMutex = PTHREAD_MUTEX_INITIALIZER;
diff --git a/devel/hadoop/files/patch-src__native__Makefile.in b/devel/hadoop/files/patch-src__native__Makefile.in
new file mode 100644
index 000000000000..0ebf9fb4223f
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__Makefile.in
@@ -0,0 +1,105 @@
+--- src/native/Makefile.in.orig 2011-05-04 08:30:16.000000000 +0200
++++ src/native/Makefile.in 2011-12-09 10:38:40.000000000 +0100
+@@ -92,10 +92,7 @@
+ libLTLIBRARIES_INSTALL = $(INSTALL)
+ LTLIBRARIES = $(lib_LTLIBRARIES)
+ libhadoop_la_DEPENDENCIES =
+-am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo \
+- getGroup.lo JniBasedUnixGroupsMapping.lo \
+- JniBasedUnixGroupsNetgroupMapping.lo file_descriptor.lo \
+- errno_enum.lo NativeIO.lo
++am_libhadoop_la_OBJECTS = ZlibCompressor.lo ZlibDecompressor.lo
+ libhadoop_la_OBJECTS = $(am_libhadoop_la_OBJECTS)
+ DEFAULT_INCLUDES = -I. -I$(srcdir) -I.
+ depcomp = $(SHELL) $(top_srcdir)/config/depcomp
+@@ -223,24 +220,15 @@
+ sysconfdir = @sysconfdir@
+ target_alias = @target_alias@
+ AM_CPPFLAGS = @JNI_CPPFLAGS@ -I$(HADOOP_NATIVE_SRCDIR)/src \
+- -Isrc/org/apache/hadoop/io/compress/zlib \
+- -Isrc/org/apache/hadoop/io/nativeio \
+- -Isrc/org/apache/hadoop/security
++ -Isrc/org/apache/hadoop/io/compress/zlib
+
+ AM_LDFLAGS = @JNI_LDFLAGS@ -m$(JVM_DATA_MODEL)
+ AM_CFLAGS = -g -Wall -fPIC -O2 -m$(JVM_DATA_MODEL)
+ lib_LTLIBRARIES = libhadoop.la
+ libhadoop_la_SOURCES = src/org/apache/hadoop/io/compress/zlib/ZlibCompressor.c \
+- src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c \
+- src/org/apache/hadoop/security/getGroup.c \
+- src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c \
+- src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \
+- src/org/apache/hadoop/io/nativeio/file_descriptor.c \
+- src/org/apache/hadoop/io/nativeio/errno_enum.c \
+- src/org/apache/hadoop/io/nativeio/NativeIO.c
+-
++ src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
+ libhadoop_la_LDFLAGS = -version-info 1:0:0
+-libhadoop_la_LIBADD = -ldl -ljvm
++libhadoop_la_LIBADD = -ljvm
+ all: config.h
+ $(MAKE) $(AM_MAKEFLAGS) all-am
+
+@@ -332,14 +320,8 @@
+ distclean-compile:
+ -rm -f *.tab.c
+
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsMapping.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/NativeIO.Plo@am__quote@
+ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibCompressor.Plo@am__quote@
+ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/ZlibDecompressor.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/errno_enum.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/file_descriptor.Plo@am__quote@
+-@AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/getGroup.Plo@am__quote@
+
+ .c.o:
+ @am__fastdepCC_TRUE@ if $(COMPILE) -MT $@ -MD -MP -MF "$(DEPDIR)/$*.Tpo" -c -o $@ $<; \
+@@ -376,47 +358,6 @@
+ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+ @am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o ZlibDecompressor.lo `test -f 'src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.c
+
+-getGroup.lo: src/org/apache/hadoop/security/getGroup.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT getGroup.lo -MD -MP -MF "$(DEPDIR)/getGroup.Tpo" -c -o getGroup.lo `test -f 'src/org/apache/hadoop/security/getGroup.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/getGroup.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/getGroup.Tpo" "$(DEPDIR)/getGroup.Plo"; else rm -f "$(DEPDIR)/getGroup.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/getGroup.c' object='getGroup.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o getGroup.lo `test -f 'src/org/apache/hadoop/security/getGroup.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/getGroup.c
+-
+-JniBasedUnixGroupsMapping.lo: src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT JniBasedUnixGroupsMapping.lo -MD -MP -MF "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo" -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo" "$(DEPDIR)/JniBasedUnixGroupsMapping.Plo"; else rm -f "$(DEPDIR)/JniBasedUnixGroupsMapping.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' object='JniBasedUnixGroupsMapping.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsMapping.c
+-
+-JniBasedUnixGroupsNetgroupMapping.lo: src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT JniBasedUnixGroupsNetgroupMapping.lo -MD -MP -MF "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Tpo" -c -o JniBasedUnixGroupsNetgroupMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Tpo" "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Plo"; else rm -f "$(DEPDIR)/JniBasedUnixGroupsNetgroupMapping.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c' object='JniBasedUnixGroupsNetgroupMapping.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o JniBasedUnixGroupsNetgroupMapping.lo `test -f 'src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c' || echo '$(srcdir)/'`src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c
+-
+-file_descriptor.lo: src/org/apache/hadoop/io/nativeio/file_descriptor.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT file_descriptor.lo -MD -MP -MF "$(DEPDIR)/file_descriptor.Tpo" -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/file_descriptor.Tpo" "$(DEPDIR)/file_descriptor.Plo"; else rm -f "$(DEPDIR)/file_descriptor.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/file_descriptor.c' object='file_descriptor.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o file_descriptor.lo `test -f 'src/org/apache/hadoop/io/nativeio/file_descriptor.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/file_descriptor.c
+-
+-errno_enum.lo: src/org/apache/hadoop/io/nativeio/errno_enum.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT errno_enum.lo -MD -MP -MF "$(DEPDIR)/errno_enum.Tpo" -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/errno_enum.Tpo" "$(DEPDIR)/errno_enum.Plo"; else rm -f "$(DEPDIR)/errno_enum.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/errno_enum.c' object='errno_enum.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o errno_enum.lo `test -f 'src/org/apache/hadoop/io/nativeio/errno_enum.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/errno_enum.c
+-
+-NativeIO.lo: src/org/apache/hadoop/io/nativeio/NativeIO.c
+-@am__fastdepCC_TRUE@ if $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -MT NativeIO.lo -MD -MP -MF "$(DEPDIR)/NativeIO.Tpo" -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c; \
+-@am__fastdepCC_TRUE@ then mv -f "$(DEPDIR)/NativeIO.Tpo" "$(DEPDIR)/NativeIO.Plo"; else rm -f "$(DEPDIR)/NativeIO.Tpo"; exit 1; fi
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ source='src/org/apache/hadoop/io/nativeio/NativeIO.c' object='NativeIO.lo' libtool=yes @AMDEPBACKSLASH@
+-@AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@
+-@am__fastdepCC_FALSE@ $(LIBTOOL) --tag=CC --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) -c -o NativeIO.lo `test -f 'src/org/apache/hadoop/io/nativeio/NativeIO.c' || echo '$(srcdir)/'`src/org/apache/hadoop/io/nativeio/NativeIO.c
+
+ mostlyclean-libtool:
+ -rm -f *.lo
diff --git a/devel/hadoop/files/patch-src__native__configure b/devel/hadoop/files/patch-src__native__configure
new file mode 100644
index 000000000000..d9b70ca443a3
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__configure
@@ -0,0 +1,11 @@
+--- src/native/configure.orig 2011-12-08 23:11:17.000000000 +0100
++++ src/native/configure 2011-12-08 23:11:50.000000000 +0100
+@@ -20504,7 +20504,7 @@
+ echo 'int main(int argc, char **argv){return 0;}' > conftest.c
+ if test -z "`${CC} ${LDFLAGS} -o conftest conftest.c -lz 2>&1`"; then
+ if test ! -z "`which objdump | grep -v 'no objdump'`"; then
+- ac_cv_libname_z="`objdump -p conftest | grep NEEDED | grep z | sed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`"
++ ac_cv_libname_z="`objdump -p conftest | grep NEEDED | grep z | gsed 's/\W*NEEDED\W*\(.*\)\W*$/\"\1\"/'`"
+ elif test ! -z "`which ldd | grep -v 'no ldd'`"; then
+ ac_cv_libname_z="`ldd conftest | grep z | sed 's/^[^A-Za-z0-9]*\([A-Za-z0-9\.]*\)[^A-Za-z0-9]*=>.*$/\"\1\"/'`"
+ else
diff --git a/devel/hadoop/files/patch-src__native__configure.ac b/devel/hadoop/files/patch-src__native__configure.ac
new file mode 100644
index 000000000000..91561671f959
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__configure.ac
@@ -0,0 +1,13 @@
+--- src/native/configure.ac.orig 2011-05-04 08:30:16.000000000 +0200
++++ src/native/configure.ac 2011-08-07 16:17:58.000000000 +0200
+@@ -46,10 +46,6 @@
+ AC_PROG_CC
+ AC_PROG_LIBTOOL
+
+-# Checks for libraries.
+-dnl Check for '-ldl'
+-AC_CHECK_LIB([dl], [dlopen])
+-
+ dnl Check for '-ljvm'
+ JNI_LDFLAGS=""
+ if test $JAVA_HOME != ""
diff --git a/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c b/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c
new file mode 100644
index 000000000000..20ae853ee768
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__io__nativeio__NativeIO.c
@@ -0,0 +1,11 @@
+--- src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c.orig 2011-08-10 13:43:50.000000000 +0200
++++ src/native/src/org/apache/hadoop/io/nativeio/NativeIO.c 2011-08-10 13:44:32.000000000 +0200
+@@ -188,7 +188,7 @@
+ if (flags & O_CREAT) {
+ fd = open(path, flags, mode);
+ } else {
+- fd = open(path, flags);
++ fd = open(path, flags | O_CREAT);
+ }
+
+ if (fd == -1) {
diff --git a/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c b/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c
new file mode 100644
index 000000000000..d8881f76bb33
--- /dev/null
+++ b/devel/hadoop/files/patch-src__native__src__org__apache__hadoop__security__JniBasedUnixGroupsNetgroupMapping.c
@@ -0,0 +1,12 @@
+--- src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c.orig 2011-08-07 16:43:00.000000000 +0200
++++ src/native/src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c 2011-08-07 16:43:30.000000000 +0200
+@@ -73,7 +73,8 @@
+ // was successfull or not (as long as it was called we need to call
+ // endnetgrent)
+ setnetgrentCalledFlag = 1;
+- if(setnetgrent(cgroup) == 1) {
++ setnetgrent(cgroup);
++ if(1 == 1) {
+ UserList *current = NULL;
+ // three pointers are for host, user, domain, we only care
+ // about user now
diff --git a/devel/hadoop/files/pkg-deinstall.in b/devel/hadoop/files/pkg-deinstall.in
new file mode 100644
index 000000000000..e42856245c9f
--- /dev/null
+++ b/devel/hadoop/files/pkg-deinstall.in
@@ -0,0 +1,14 @@
+#!/bin/sh
+# $FreeBSD$
+
+PATH="/bin:/sbin:/usr/bin:/usr/sbin"
+
+RUNDIR=%%HADOOP_RUNDIR%%
+LOGDIR=%%HADOOP_LOGDIR%%
+
+if [ "$2" = "POST-DEINSTALL" ]; then
+ echo "=> Deleting ${RUNDIR} if empty..."
+ rm -d ${RUNDIR} 2>/dev/null || true
+ echo "=> Deleting ${LOGDIR} if empty..."
+ rm -d ${LOGDIR} 2>/dev/null || true
+fi
diff --git a/devel/hadoop/files/pkg-install.in b/devel/hadoop/files/pkg-install.in
new file mode 100644
index 000000000000..7d0b09b046c3
--- /dev/null
+++ b/devel/hadoop/files/pkg-install.in
@@ -0,0 +1,55 @@
+#!/bin/sh
+# $FreeBSD$
+PATH="/bin:/sbin:/usr/bin:/usr/sbin"
+
+HADOOP_USER=%%HADOOP_USER%%
+HADOOP_GROUP=%%HADOOP_GROUP%%
+UID=%%HADOOP_UID%%
+GID=%%HADOOP_GID%%
+
+RUNDIR=%%HADOOP_RUNDIR%%
+LOGDIR=%%HADOOP_LOGDIR%%
+
+PW="pw"
+CHOWN="chown"
+INSTALL_DIR="install -d -o ${HADOOP_USER} -g ${HADOOP_GROUP} -m 0755"
+
+if [ "$2" = "PRE-INSTALL" ]; then
+
+# if ! ${PW} groupshow ${HADOOP_GROUP} 2>/dev/null 1>&2; then
+# if ${PW} groupadd ${HADOOP_GROUP} -g $GID; then
+# echo "=> Added group \"${HADOOP_GROUP}\"."
+# else
+# echo "=> Adding group \"${HADOOP_GROUP}\" failed..."
+# exit 1
+# fi
+# fi
+
+# if ! ${PW} usershow ${HADOOP_USER} 2>/dev/null 1>&2; then
+# if ${PW} useradd ${HADOOP_USER} -u $UID -g ${HADOOP_GROUP} -h - \
+# -s "/sbin/nologin" -d "/nonexistent" \
+# -c "Hadoop Daemons"; then
+# echo "=> Added user \"${HADOOP_USER}\"."
+# else
+# echo "=> Adding user \"${HADOOP_USER}\" failed..."
+# exit 1
+# fi
+# fi
+ sleep 1
+
+elif [ "$2" = "POST-INSTALL" ]; then
+ if [ -d ${RUNDIR} ]; then
+ echo "=> ${RUNDIR} already exists."
+ else
+ echo -n "=> Creating RUNDIR ${RUNDIR}... "
+ ${INSTALL_DIR} ${RUNDIR} || echo "failed"
+ fi
+ if [ -d ${LOGDIR} ]; then
+ echo "=> ${LOGDIR} already exists."
+ else
+ echo -n "=> Creating LOGDIR ${LOGDIR}... "
+ ${INSTALL_DIR} ${LOGDIR} || echo "failed"
+ fi
+fi
+
+exit 0
diff --git a/devel/hadoop/files/secondarynamenode.in b/devel/hadoop/files/secondarynamenode.in
new file mode 100644
index 000000000000..fa58dd346017
--- /dev/null
+++ b/devel/hadoop/files/secondarynamenode.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: secondarynamenode
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# secondarynamenode_enable (bool): Set to NO by default.
+# Set it to YES to enable secondarynamenode.
+# secondarynamenode_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# secondarynamenode user.
+# secondarynamenode_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# secondarynamenode_log_dir (str): Unset by default.
+# secondarynamenode_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=secondarynamenode
+rcvar=secondarynamenode_enable
+
+load_rc_config "${name}"
+
+: ${secondarynamenode_enable:=NO}
+: ${secondarynamenode_user:=%%HADOOP_USER%%}
+: ${secondarynamenode_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start secondarynamenode'
+
+start_precmd=secondarynamenode_init
+stop_precmd=secondarynamenode_init
+stop_cmd=secondarynamenode_stop
+
+
+secondarynamenode_init()
+{
+ if [ -n "${secondarynamenode_java_home}" ]
+ then
+ export JAVA_HOME="${secondarynamenode_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${secondarynamenode_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${secondarynamenode_log_dir}
+ fi
+
+ if [ -n "${secondarynamenode_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${secondarynamenode_pid_dir}
+ fi
+
+ install -d -m 755 -o ${secondarynamenode_user} -g ${secondarynamenode_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${secondarynamenode_user} -g ${secondarynamenode_group} ${HADOOP_LOG_DIR}
+}
+
+secondarynamenode_stop ()
+{
+ su -m ${secondarynamenode_user} -c "${command} --config ${HADOOP_CONF_DIR} stop secondarynamenode"
+}
+
+run_rc_command "$1"
diff --git a/devel/hadoop/files/tasktracker.in b/devel/hadoop/files/tasktracker.in
new file mode 100644
index 000000000000..6d97a049f361
--- /dev/null
+++ b/devel/hadoop/files/tasktracker.in
@@ -0,0 +1,72 @@
+#!/bin/sh
+#
+# $FreeBSD$
+#
+# PROVIDE: tasktracker
+# REQUIRE: LOGIN
+# KEYWORD: shutdown
+#
+# Add the following lines to /etc/rc.conf to enable this service:
+#
+# tasktracker_enable (bool): Set to NO by default.
+# Set it to YES to enable tasktracker.
+# tasktracker_user (str): Set to %%HADOOP_USER%% by default.
+# Set to a valid user to change default
+# tasktracker user.
+# tasktracker_java_home (str): Unset by default.
+# Switch to another JVM.
+#
+# tasktracker_log_dir (str): Unset by default.
+# tasktracker_pid_dir (str): Unset by default.
+#
+
+. /etc/rc.subr
+
+name=tasktracker
+rcvar=tasktracker_enable
+
+load_rc_config "${name}"
+
+: ${tasktracker_enable:=NO}
+: ${tasktracker_user:=%%HADOOP_USER%%}
+: ${tasktracker_group:=%%HADOOP_GROUP%%}
+
+command="%%HADOOP_HOME%%/bin/hadoop-daemon.sh"
+command_args='--config ${HADOOP_CONF_DIR} start tasktracker'
+
+start_precmd=tasktracker_init
+stop_precmd=tasktracker_init
+stop_cmd=tasktracker_stop
+
+
+tasktracker_init()
+{
+ if [ -n "${tasktracker_java_home}" ]
+ then
+ export JAVA_HOME="${tasktracker_java_home}"
+ fi
+
+ for f in %%HADOOP_ETC%%/envvars.d/*.env
+ do
+ . ${f}
+ done
+ if [ -n "${tasktracker_log_dir}" ]
+ then
+ HADOOP_LOG_DIR=${tasktracker_log_dir}
+ fi
+
+ if [ -n "${tasktracker_pid_dir}" ]
+ then
+ HADOOP_LOG_DIR=${tasktracker_pid_dir}
+ fi
+
+ install -d -m 755 -o ${tasktracker_user} -g ${tasktracker_group} ${HADOOP_PID_DIR}
+ install -d -m 755 -o ${tasktracker_user} -g ${tasktracker_group} ${HADOOP_LOG_DIR}
+}
+
+tasktracker_stop ()
+{
+ su -m ${tasktracker_user} -c "${command} --config ${HADOOP_CONF_DIR} stop tasktracker"
+}
+
+run_rc_command "$1"