blob: 9ef33a9fb3ea691076fae1cc11549b614b7156f3 (
plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -ex
if ! charms.reactive is_state 'apache-bigtop-resourcemanager.ready'; then
action-fail 'ResourceManager not yet ready'
exit
fi
IN_DIR='/tmp/smoke_test_in'
OUT_DIR='/tmp/smoke_test_out'
SIZE=10000
OPTIONS=''
MAPS=1
REDUCES=1
NUMTASKS=1
COMPRESSION='LocalDefault'
OPTIONS="${OPTIONS} -D mapreduce.job.maps=${MAPS}"
OPTIONS="${OPTIONS} -D mapreduce.job.reduces=${REDUCES}"
OPTIONS="${OPTIONS} -D mapreduce.job.jvm.numtasks=${NUMTASKS}"
if [ $COMPRESSION == 'Disable' ] ; then
OPTIONS="${OPTIONS} -D mapreduce.map.output.compress=false"
elif [ $COMPRESSION == 'LocalDefault' ] ; then
OPTIONS="${OPTIONS}"
else
OPTIONS="${OPTIONS} -D mapreduce.map.output.compress=true -D mapred.map.output.compress.codec=org.apache.hadoop.io.compress.${COMPRESSION}Codec"
fi
# create dir to store results
RUN=`date +%s`
RESULT_DIR=/opt/terasort-results
RESULT_LOG=${RESULT_DIR}/${RUN}.$$.log
mkdir -p ${RESULT_DIR}
chown -R hdfs ${RESULT_DIR}
# clean out any previous data (must be run as the hdfs user)
su hdfs << EOF
if hadoop fs -stat ${IN_DIR} &> /dev/null; then
hadoop fs -rm -r -skipTrash ${IN_DIR} || true
fi
if hadoop fs -stat ${OUT_DIR} &> /dev/null; then
hadoop fs -rm -r -skipTrash ${OUT_DIR} || true
fi
EOF
START=`date +%s`
# NB: Escaped vars in the block below (e.g., \${HADOOP_MAPRED_HOME}) come from
# the environment while non-escaped vars (e.g., ${IN_DIR}) are parameterized
# from this outer scope
su hdfs << EOF
. /etc/default/hadoop
echo 'generating data'
hadoop jar \${HADOOP_MAPRED_HOME}/hadoop-mapreduce-examples-*.jar teragen ${SIZE} ${IN_DIR} &>/dev/null
echo 'sorting data'
hadoop jar \${HADOOP_MAPRED_HOME}/hadoop-mapreduce-examples-*.jar terasort ${OPTIONS} ${IN_DIR} ${OUT_DIR} &> ${RESULT_LOG}
EOF
STOP=`date +%s`
if ! grep -q 'Bytes Written=1000000' ${RESULT_LOG}; then
action-fail 'smoke-test failed'
action-set log="$(cat ${RESULT_LOG})"
fi
DURATION=`expr $STOP - $START`
|