aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorAnton Chevychalov <cab@arenadata.io>2017-09-27 14:48:38 +0300
committerYoungwoo Kim <ywkim@apache.org>2017-12-19 15:23:34 +0900
commitfb26cdadd11d6b0164ac25bca7a5fb8f4d1f97cf (patch)
tree001cd2cd1487e3b599d93208072cee8cd7d023fd
parent2e1795361d55c0a6f15f1bae7ff5cd6714367621 (diff)
BIGTOP-2893 Bump Hadoop to 2.8.1
Patches: ======= HADOOP-12366 was ported to 2.8.1 HADOOP-11628 was removed because it was merged to 2.8 Build time issues fixed: ======================== Fuse_dfs target dir has been changed in 2.8 ------------------------------------------- cp: cannot stat 'hadoop-hdfs-project/hadoop-hdfs/target/native/main/native/fuse-dfs/fuse_dfs': No such file or directory add package goal to mvn ---------------------- Without that goal tar is incomplete: for dir in '${HADOOP_DIR}/{lib,}' '${HDFS_DIR}/{lib,}' '${YARN_DIR}/{lib,}' '${MAPREDUCE_DIR}/{lib,}' '[' -e /code/build/hadoop/rpm/BUILDROOT/hadoop-2.8.1-243.el7.centos.x86_64/usr/lib/hadoop-mapreduce//hadoop-common-2.8.1.jar ']' exit 1 error: Bad exit status from /var/tmp/rpm-tmp.5Etrah (%install) Bad exit status from /var/tmp/rpm-tmp.5Etrah (%install) Signed-off-by: Youngwoo Kim <ywkim@apache.org>
-rw-r--r--bigtop-packages/src/common/hadoop/do-component-build4
-rw-r--r--bigtop-packages/src/common/hadoop/patch0-HADOOP-12366.-expose-calculated-paths.diff38
-rw-r--r--bigtop-packages/src/common/hadoop/patch1-HADOOP-11628.diff22
-rw-r--r--bigtop.bom2
4 files changed, 22 insertions, 44 deletions
diff --git a/bigtop-packages/src/common/hadoop/do-component-build b/bigtop-packages/src/common/hadoop/do-component-build
index 32429c28..dcd802f6 100644
--- a/bigtop-packages/src/common/hadoop/do-component-build
+++ b/bigtop-packages/src/common/hadoop/do-component-build
@@ -56,7 +56,7 @@ mkdir build/src
# Build artifacts
MAVEN_OPTS="-Dzookeeper.version=$ZOOKEEPER_VERSION"
MAVEN_OPTS="$MAVEN_OPTS -DskipTests -DskipTest -DskipITs"
-mvn $ANT_OPTS $BUNDLE_SNAPPY -Pdist -Pnative -Psrc -Dtar ${MAVEN_OPTS} -Dtomcat.version=${BIGTOP_TOMCAT_VERSION} -Dtomcat.download.url="http://archive.apache.org/dist/tomcat/tomcat-6/v${BIGTOP_TOMCAT_VERSION}/bin/apache-tomcat-${BIGTOP_TOMCAT_VERSION}.tar.gz" install "$@"
+mvn $ANT_OPTS $BUNDLE_SNAPPY -Pdist -Pnative -Psrc -Dtar ${MAVEN_OPTS} -Dtomcat.version=${BIGTOP_TOMCAT_VERSION} -Dtomcat.download.url="http://archive.apache.org/dist/tomcat/tomcat-6/v${BIGTOP_TOMCAT_VERSION}/bin/apache-tomcat-${BIGTOP_TOMCAT_VERSION}.tar.gz" install package "$@"
mvn site site:stage ${MAVEN_OPTS} $@
(cd build ; tar --strip-components=1 -xzvf ../hadoop-dist/target/hadoop-${HADOOP_VERSION}.tar.gz)
@@ -70,4 +70,4 @@ cp -r target/staging/hadoop-project build/share/doc
(cd hadoop-client/target/hadoop-client-*/share/hadoop/client/lib ; ls) > build/hadoop-client.list
# Copy fuse output to the build directory
-cp hadoop-hdfs-project/hadoop-hdfs/target/native/main/native/fuse-dfs/fuse_dfs build/bin/
+cp hadoop-hdfs-project/hadoop-hdfs-native-client/target/main/native/fuse-dfs/fuse_dfs build/bin
diff --git a/bigtop-packages/src/common/hadoop/patch0-HADOOP-12366.-expose-calculated-paths.diff b/bigtop-packages/src/common/hadoop/patch0-HADOOP-12366.-expose-calculated-paths.diff
index b0c831cf..026c01f1 100644
--- a/bigtop-packages/src/common/hadoop/patch0-HADOOP-12366.-expose-calculated-paths.diff
+++ b/bigtop-packages/src/common/hadoop/patch0-HADOOP-12366.-expose-calculated-paths.diff
@@ -1,9 +1,9 @@
-From 3131744ed6e1e874d7cd0666208adc424fec3224 Mon Sep 17 00:00:00 2001
-From: Konstantin Boudnik <cos@boudnik.org>
-Date: Mon, 15 Feb 2016 10:42:16 +0300
-Subject: [PATCH] HADOOP-12366. expose calculated paths
+From f10ec9f9bec56bcfa5e2015ea87a8a5054d096c5 Mon Sep 17 00:00:00 2001
+From: Anton Chevychalov <pub@mnu.pp.ru>
+Date: Tue, 5 Sep 2017 15:14:19 +0300
+Subject: [PATCH] [PATCH] HADOOP-12366. expose calculated paths
-Adapted the patch to apply cleanly on branch-2.7.1
+Adapted the patch to apply cleanly on branch-2.8.1
---
hadoop-common-project/hadoop-common/src/main/bin/hadoop | 10 ++++++++++
hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs | 9 +++++++++
@@ -12,7 +12,7 @@ Adapted the patch to apply cleanly on branch-2.7.1
4 files changed, 37 insertions(+)
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
-index a5e8885..715ad9b 100755
+index 1575996..44be2e3 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -38,6 +38,7 @@ function print_usage(){
@@ -22,7 +22,7 @@ index a5e8885..715ad9b 100755
+ echo " envvars display computed Hadoop environment variables"
echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
echo " classpath prints the class path needed to get the"
- echo " credential interact with credential providers"
+ echo " Hadoop jar and the required libraries"
@@ -116,6 +117,15 @@ case $COMMAND in
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
elif [ "$COMMAND" = "daemonlog" ] ; then
@@ -40,18 +40,18 @@ index a5e8885..715ad9b 100755
CLASS=org.apache.hadoop.tools.HadoopArchives
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
-index 155df14..b744761 100755
+index fbfbaf2..714280e 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
-@@ -46,6 +46,7 @@ function print_usage(){
- echo " zkfc run the ZK Failover Controller daemon"
+@@ -47,6 +47,7 @@ function print_usage(){
echo " datanode run a DFS datanode"
+ echo " debug run a Debug Admin to execute HDFS debug commands"
echo " dfsadmin run a DFS admin client"
+ echo " envvars display computed Hadoop environment variables"
echo " haadmin run a DFS HA admin client"
echo " fsck run a DFS filesystem checking utility"
echo " balancer run a cluster balancing utility"
-@@ -160,6 +161,14 @@ elif [ "$COMMAND" = "haadmin" ] ; then
+@@ -161,6 +162,14 @@ elif [ "$COMMAND" = "haadmin" ] ; then
CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin
CLASSPATH=${CLASSPATH}:${TOOL_PATH}
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
@@ -67,7 +67,7 @@ index 155df14..b744761 100755
CLASS=org.apache.hadoop.hdfs.tools.DFSck
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
diff --git a/hadoop-mapreduce-project/bin/mapred b/hadoop-mapreduce-project/bin/mapred
-index fe16e07..6a21fed 100755
+index 6f3bc04..fd1300d 100755
--- a/hadoop-mapreduce-project/bin/mapred
+++ b/hadoop-mapreduce-project/bin/mapred
@@ -38,6 +38,7 @@ function print_usage(){
@@ -76,9 +76,9 @@ index fe16e07..6a21fed 100755
echo " distcp <srcurl> <desturl> copy file or directories recursively"
+ echo " envvars display computed Hadoop environment variables"
echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
+ echo " archive-logs combine aggregated logs into hadoop archives"
echo " hsadmin job history server admin interface"
- echo ""
-@@ -74,6 +75,14 @@ elif [ "$COMMAND" = "sampler" ] ; then
+@@ -75,6 +76,14 @@ elif [ "$COMMAND" = "sampler" ] ; then
HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
elif [ "$COMMAND" = "classpath" ] ; then
echo -n
@@ -94,18 +94,18 @@ index fe16e07..6a21fed 100755
CLASS=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
HADOOP_OPTS="$HADOOP_OPTS -Dmapred.jobsummary.logger=${HADOOP_JHS_LOGGER:-INFO,console} $HADOOP_JOB_HISTORYSERVER_OPTS"
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn b/hadoop-yarn-project/hadoop-yarn/bin/yarn
-index 71347a2..ce3e24a 100644
+index 552cef4..5495387 100644
--- a/hadoop-yarn-project/hadoop-yarn/bin/yarn
+++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn
-@@ -87,6 +87,7 @@ function print_usage(){
+@@ -89,6 +89,7 @@ function print_usage(){
echo " cluster prints cluster information"
echo " daemonlog get/set the log level for each"
echo " daemon"
+ echo " envvars display computed Hadoop environment variables"
+ echo " top run cluster usage tool"
echo ""
echo "Most commands print help when invoked w/o parameters."
- }
-@@ -214,6 +215,14 @@ if [ "$COMMAND" = "classpath" ] ; then
+@@ -217,6 +218,14 @@ if [ "$COMMAND" = "classpath" ] ; then
echo $CLASSPATH
exit 0
fi
@@ -121,5 +121,5 @@ index 71347a2..ce3e24a 100644
CLASS='org.apache.hadoop.yarn.client.cli.RMAdminCLI'
YARN_OPTS="$YARN_OPTS $YARN_CLIENT_OPTS"
--
-2.0.4
+2.7.4
diff --git a/bigtop-packages/src/common/hadoop/patch1-HADOOP-11628.diff b/bigtop-packages/src/common/hadoop/patch1-HADOOP-11628.diff
deleted file mode 100644
index 1c2de374..00000000
--- a/bigtop-packages/src/common/hadoop/patch1-HADOOP-11628.diff
+++ /dev/null
@@ -1,22 +0,0 @@
-diff --git a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
-index 9852460..8cd39c7 100644
---- a/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
-+++ b/hadoop-common-project/hadoop-auth/src/main/java/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java
-@@ -37,6 +37,7 @@
-
- import java.io.File;
- import java.io.IOException;
-+import java.net.InetAddress;
- import java.security.PrivilegedActionException;
- import java.security.PrivilegedExceptionAction;
- import java.util.ArrayList;
-@@ -343,7 +344,8 @@ public AuthenticationToken authenticate(HttpServletRequest request, final HttpSe
- authorization = authorization.substring(KerberosAuthenticator.NEGOTIATE.length()).trim();
- final Base64 base64 = new Base64(0);
- final byte[] clientToken = base64.decode(authorization);
-- final String serverName = request.getServerName();
-+ final String serverName = InetAddress.getByName(request.getServerName())
-+ .getCanonicalHostName();
- try {
- token = Subject.doAs(serverSubject, new PrivilegedExceptionAction<AuthenticationToken>() {
-
diff --git a/bigtop.bom b/bigtop.bom
index f346d888..1a9fabef 100644
--- a/bigtop.bom
+++ b/bigtop.bom
@@ -144,7 +144,7 @@ bigtop {
'hadoop' {
name = 'hadoop'
relNotes = 'Apache Hadoop'
- version { base = '2.7.4'; pkg = base; release = 1 }
+ version { base = '2.8.1'; pkg = base; release = 1 }
tarball { destination = "${name}-${version.base}.tar.gz"
source = "${name}-${version.base}-src.tar.gz" }
url { download_path = "/$name/common/$name-${version.base}"