summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorVishal Bhoj <vishal.bhoj@linaro.org>2018-08-28 15:25:55 +0530
committerVishal Bhoj <vishal.bhoj@linaro.org>2018-08-29 13:33:14 +0530
commitadcf3a1a43e40f7647acc8f88806942dc161f976 (patch)
treed5418e126a530f19cc06945822979d13338f459f
parentcf2d21e86c3d0153667e2d371b7458e00f7cabeb (diff)
add noninteractive tradefed test wrapper
tradefed can handle adb disconnection and other device issues. Whenever adb connection to the device breaks, tradefed will wait for a timeout and end the testing which inturn will end the process. We should then get a return code of failure. Change-Id: I814a2194ae1663cf932a3aab30137736e6fab7ac Signed-off-by: Vishal Bhoj <vishal.bhoj@linaro.org>
-rwxr-xr-xautomated/android/noninteractive-tradefed/monitor_fastboot.sh4
-rwxr-xr-xautomated/android/noninteractive-tradefed/setup.sh29
-rwxr-xr-xautomated/android/noninteractive-tradefed/tradefed-runner.py216
-rwxr-xr-xautomated/android/noninteractive-tradefed/tradefed.sh88
-rw-r--r--automated/android/noninteractive-tradefed/tradefed.yaml64
5 files changed, 401 insertions, 0 deletions
diff --git a/automated/android/noninteractive-tradefed/monitor_fastboot.sh b/automated/android/noninteractive-tradefed/monitor_fastboot.sh
new file mode 100755
index 0000000..2345cab
--- /dev/null
+++ b/automated/android/noninteractive-tradefed/monitor_fastboot.sh
@@ -0,0 +1,4 @@
+#!/bin/sh -x
+while true; do
+fastboot boot /lava-lxc/boot*.img
+done
diff --git a/automated/android/noninteractive-tradefed/setup.sh b/automated/android/noninteractive-tradefed/setup.sh
new file mode 100755
index 0000000..86d8e69
--- /dev/null
+++ b/automated/android/noninteractive-tradefed/setup.sh
@@ -0,0 +1,29 @@
+#!/bin/sh -x
+# shellcheck disable=SC2154
+# shellcheck disable=SC1091
+
+. ../../lib/sh-test-lib
+. ../../lib/android-test-lib
+
+if echo "$ANDROID_VERSION" | grep aosp-master ; then
+ JDK="openjdk-9-jdk-headless"
+else
+ JDK="openjdk-8-jdk-headless"
+fi
+PKG_DEPS="usbutils curl wget zip xz-utils python-lxml python-setuptools python-pexpect aapt lib32z1-dev libc6-dev-i386 lib32gcc1 libc6:i386 libstdc++6:i386 libgcc1:i386 zlib1g:i386 libncurses5:i386 python-dev python-protobuf protobuf-compiler python-virtualenv python-pip python-pexpect psmisc"
+
+dist_name
+case "${dist}" in
+ ubuntu)
+ dpkg --add-architecture i386
+ apt-get update -q
+ install_deps "${PKG_DEPS} ${JDK}"
+ ;;
+ *)
+ error_msg "Please use Ubuntu for CTS or VTS test."
+ ;;
+esac
+
+install_latest_adb
+initialize_adb
+adb_root
diff --git a/automated/android/noninteractive-tradefed/tradefed-runner.py b/automated/android/noninteractive-tradefed/tradefed-runner.py
new file mode 100755
index 0000000..6808256
--- /dev/null
+++ b/automated/android/noninteractive-tradefed/tradefed-runner.py
@@ -0,0 +1,216 @@
+#!/usr/bin/env python
+
+import datetime
+import os
+import re
+import sys
+import shlex
+import shutil
+import subprocess
+import xml.etree.ElementTree as ET
+import argparse
+import logging
+import time
+
+sys.path.insert(0, '../../lib/')
+import py_test_lib # nopep8
+
+
+OUTPUT = '%s/output' % os.getcwd()
+RESULT_FILE = '%s/result.txt' % OUTPUT
+TRADEFED_STDOUT = '%s/tradefed-stdout.txt' % OUTPUT
+TRADEFED_LOGCAT = '%s/tradefed-logcat.txt' % OUTPUT
+TEST_PARAMS = ''
+AGGREGATED = 'aggregated'
+ATOMIC = 'atomic'
+
+
+def result_parser(xml_file, result_format):
+ etree_file = open(xml_file, 'rb')
+ etree_content = etree_file.read()
+ rx = re.compile("&#([0-9]+);|&#x([0-9a-fA-F]+);")
+ endpos = len(etree_content)
+ pos = 0
+ while pos < endpos:
+ # remove characters that don't conform to XML spec
+ m = rx.search(etree_content, pos)
+ if not m:
+ break
+ mstart, mend = m.span()
+ target = m.group(1)
+ if target:
+ num = int(target)
+ else:
+ num = int(m.group(2), 16)
+ # #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] | [#x10000-#x10FFFF]
+ if not(num in (0x9, 0xA, 0xD) or
+ 0x20 <= num <= 0xD7FF or
+ 0xE000 <= num <= 0xFFFD or
+ 0x10000 <= num <= 0x10FFFF):
+ etree_content = etree_content[:mstart] + etree_content[mend:]
+ endpos = len(etree_content)
+ pos = mend
+
+ try:
+ root = ET.fromstring(etree_content)
+ except ET.ParseError as e:
+ logger.error('xml.etree.ElementTree.ParseError: %s' % e)
+ logger.info('Please Check %s manually' % xml_file)
+ sys.exit(1)
+ logger.info('Test modules in %s: %s'
+ % (xml_file, str(len(root.findall('Module')))))
+ failures_count = 0
+ for elem in root.findall('Module'):
+ # Naming: Module Name + Test Case Name + Test Name
+ if 'abi' in elem.attrib.keys():
+ module_name = '.'.join([elem.attrib['abi'], elem.attrib['name']])
+ else:
+ module_name = elem.attrib['name']
+
+ if result_format == AGGREGATED:
+ tests_executed = len(elem.findall('.//Test'))
+ tests_passed = len(elem.findall('.//Test[@result="pass"]'))
+ tests_failed = len(elem.findall('.//Test[@result="fail"]'))
+
+ result = '%s_executed pass %s' % (module_name, str(tests_executed))
+ py_test_lib.add_result(RESULT_FILE, result)
+
+ result = '%s_passed pass %s' % (module_name, str(tests_passed))
+ py_test_lib.add_result(RESULT_FILE, result)
+
+ failed_result = 'pass'
+ if tests_failed > 0:
+ failed_result = 'fail'
+ result = '%s_failed %s %s' % (module_name, failed_result,
+ str(tests_failed))
+ py_test_lib.add_result(RESULT_FILE, result)
+
+ # output result to show if the module is done or not
+ tests_done = elem.get('done', 'false')
+ if tests_done == 'false':
+ result = '%s_done fail' % module_name
+ else:
+ result = '%s_done pass' % module_name
+ py_test_lib.add_result(RESULT_FILE, result)
+
+ if args.FAILURES_PRINTED > 0 and failures_count < args.FAILURES_PRINTED:
+ # print failed test cases for debug
+ test_cases = elem.findall('.//TestCase')
+ for test_case in test_cases:
+ failed_tests = test_case.findall('.//Test[@result="fail"]')
+ for failed_test in failed_tests:
+ test_name = '%s/%s.%s' % (module_name,
+ test_case.get("name"),
+ failed_test.get("name"))
+ failures = failed_test.findall('.//Failure')
+ failure_msg = ''
+ for failure in failures:
+ failure_msg = '%s \n %s' % (failure_msg,
+ failure.get('message'))
+
+ logger.info('%s %s' % (test_name, failure_msg.strip()))
+ failures_count = failures_count + 1
+ if failures_count > args.FAILURES_PRINTED:
+ logger.info('There are more than %d test cases '
+ 'failed, the output for the rest '
+ 'failed test cases will be '
+ 'skipped.' % (args.FAILURES_PRINTED))
+ #break the for loop of failed_tests
+ break
+ if failures_count > args.FAILURES_PRINTED:
+ #break the for loop of test_cases
+ break
+
+ if result_format == ATOMIC:
+ test_cases = elem.findall('.//TestCase')
+ for test_case in test_cases:
+ tests = test_case.findall('.//Test')
+ for atomic_test in tests:
+ atomic_test_result = atomic_test.get("result")
+ atomic_test_name = "%s/%s.%s" % (module_name,
+ test_case.get("name"),
+ atomic_test.get("name"))
+ py_test_lib.add_result(
+ RESULT_FILE, "%s %s" % (atomic_test_name,
+ atomic_test_result))
+
+
+parser = argparse.ArgumentParser()
+parser.add_argument('-t', dest='TEST_PARAMS', required=True,
+ help="tradefed shell test parameters")
+parser.add_argument('-p', dest='TEST_PATH', required=True,
+ help="path to tradefed package top directory")
+parser.add_argument('-r', dest='RESULTS_FORMAT', required=False,
+ default=AGGREGATED, choices=[AGGREGATED, ATOMIC],
+ help="The format of the saved results. 'aggregated' means number of \
+ passed and failed tests are recorded for each module. 'atomic' means \
+ each test result is recorded separately")
+
+## The total number of failed test cases to be printed for this job
+## Print too much failures would cause the lava job timed out
+## Default to not print any failures
+parser.add_argument('-f', dest='FAILURES_PRINTED', type=int,
+ required=False, default=0,
+ help="Speciy the number of failed test cases to be\
+ printed, 0 means not print any failures.")
+
+args = parser.parse_args()
+# TEST_PARAMS = args.TEST_PARAMS
+
+if os.path.exists(OUTPUT):
+ suffix = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
+ shutil.move(OUTPUT, '%s_%s' % (OUTPUT, suffix))
+os.makedirs(OUTPUT)
+
+# Setup logger.
+# There might be an issue in lava/local dispatcher, most likely problem of
+# pexpect. It prints the messages from print() last, not by sequence.
+# Use logging and subprocess.call() to work around this.
+logger = logging.getLogger('Tradefed')
+logger.setLevel(logging.DEBUG)
+ch = logging.StreamHandler()
+ch.setLevel(logging.DEBUG)
+formatter = logging.Formatter('%(asctime)s - %(name)s: %(levelname)s: %(message)s')
+ch.setFormatter(formatter)
+logger.addHandler(ch)
+
+tradefed_stdout = open(TRADEFED_STDOUT, 'w')
+tradefed_logcat_out = open(TRADEFED_LOGCAT, 'w')
+tradefed_logcat = subprocess.Popen(['adb', 'logcat'], stdout=tradefed_logcat_out)
+
+logger.info('Test params: %s' % args.TEST_PARAMS)
+logger.info('Starting tradefed shell test...')
+
+command = None
+prompt = None
+if args.TEST_PATH == "android-cts":
+ command = "android-cts/tools/cts-tradefed run commandAndExit " + args.TEST_PARAMS
+if args.TEST_PATH == "android-vts":
+ os.environ["VTS_ROOT"] = os.getcwd()
+ command = "android-vts/tools/vts-tradefed run commandAndExit " + args.TEST_PARAMS
+
+if command is None:
+ logger.error("Not supported path: %s" % args.TEST_PATH)
+ sys.exit(1)
+
+child = subprocess.Popen(shlex.split(command), stderr=subprocess.STDOUT, stdout=tradefed_stdout)
+fail_to_complete = child.wait()
+
+if fail_to_complete:
+ py_test_lib.add_result(RESULT_FILE, 'tradefed-test-run fail')
+else:
+ py_test_lib.add_result(RESULT_FILE, 'tradefed-test-run pass')
+
+logger.info('Tradefed test finished')
+tradefed_stdout.close()
+tradefed_logcat.kill()
+tradefed_logcat_out.close()
+
+# Locate and parse test result.
+result_dir = '%s/results' % args.TEST_PATH
+test_result = 'test_result.xml'
+if os.path.exists(result_dir) and os.path.isdir(result_dir):
+ for root, dirs, files in os.walk(result_dir):
+ for name in files:
+ if name == test_result:
+ result_parser(os.path.join(root, name), args.RESULTS_FORMAT)
diff --git a/automated/android/noninteractive-tradefed/tradefed.sh b/automated/android/noninteractive-tradefed/tradefed.sh
new file mode 100755
index 0000000..280cf71
--- /dev/null
+++ b/automated/android/noninteractive-tradefed/tradefed.sh
@@ -0,0 +1,88 @@
+#!/bin/sh -ex
+
+# shellcheck disable=SC1091
+. ../../lib/sh-test-lib
+# shellcheck disable=SC1091
+. ../../lib/android-test-lib
+
+export PATH=$PWD/platform-tools:$PATH
+TIMEOUT="300"
+TEST_URL="http://testdata.validation.linaro.org/cts/android-cts-7.1_r1.zip"
+TEST_PARAMS="cts -m CtsBionicTestCases --abi arm64-v8a --disable-reboot --skip-preconditions --skip-device-info"
+TEST_PATH="android-cts"
+RESULT_FORMAT="aggregated"
+RESULT_FILE="$(pwd)/output/result.txt"
+export RESULT_FILE
+# the default number of failed test cases to be printed
+FAILURES_PRINTED="0"
+# WIFI AP SSID
+AP_SSID=""
+# WIFI AP KEY
+AP_KEY=""
+
+usage() {
+ echo "Usage: $0 [-o timeout] [-n serialno] [-c cts_url] [-t test_params] [-p test_path] [-r <aggregated|atomic>] [-f failures_printed] [-a <ap_ssid>] [-k <ap_key>]" 1>&2
+ exit 1
+}
+
+while getopts ':o:n:c:t:p:r:f:a:k:' opt; do
+ case "${opt}" in
+ o) TIMEOUT="${OPTARG}" ;;
+ n) export ANDROID_SERIAL="${OPTARG}" ;;
+ c) TEST_URL="${OPTARG}" ;;
+ t) TEST_PARAMS="${OPTARG}" ;;
+ p) TEST_PATH="${OPTARG}" ;;
+ r) RESULT_FORMAT="${OPTARG}" ;;
+ f) FAILURES_PRINTED="${OPTARG}" ;;
+ a) AP_SSID="${OPTARG}" ;;
+ k) AP_KEY="${OPTARG}" ;;
+ *) usage ;;
+ esac
+done
+
+if [ -e "/home/testuser" ]; then
+ export HOME=/home/testuser
+fi
+
+wait_boot_completed "${TIMEOUT}"
+disable_suspend
+# wait_homescreen() searches logcat output for
+# 'Displayed com.android.launcher', but the log might be washed away when
+# a lot of logs generated after it. When the function not executed in
+# time, error occurs. This has been observer several times on lkft
+# testing. Refer to the following link:
+ # https://lkft.validation.linaro.org/scheduler/job/18918#L4721
+# We are already using wait_boot_completed() to check boot status, lets
+# comment out wait_homescreen() and see if wait_boot_completed() is
+# sufficient.
+# wait_homescreen "${TIMEOUT}"
+
+# Increase the heap size. KVM devices in LAVA default to ~250M of heap
+export _JAVA_OPTIONS="-Xmx350M"
+java -version
+
+# Download CTS/VTS test package or copy it from local disk.
+if echo "${TEST_URL}" | grep "^http" ; then
+ wget -S --progress=dot:giga "${TEST_URL}"
+else
+ cp "${TEST_URL}" ./
+fi
+file_name=$(basename "${TEST_URL}")
+unzip -q "${file_name}"
+rm -f "${file_name}"
+
+if [ -d "${TEST_PATH}/results" ]; then
+ mv "${TEST_PATH}/results" "${TEST_PATH}/results_$(date +%Y%m%d%H%M%S)"
+fi
+
+# FIXME removing timer-suspend from vts test as it breaks the testing in lava
+if [ -e "${TEST_PATH}/testcases/vts/testcases/kernel/linux_kselftest/kselftest_config.py" ]; then
+ sed -i "/suspend/d" "${TEST_PATH}"/testcases/vts/testcases/kernel/linux_kselftest/kselftest_config.py
+fi
+
+# try to connect wifi if AP information specified
+adb_join_wifi "${AP_SSID}" "${AP_KEY}"
+
+# Run tradefed test.
+info_msg "About to run tradefed shell on device ${ANDROID_SERIAL}"
+./tradefed-runner.py -t "${TEST_PARAMS}" -p "${TEST_PATH}" -r "${RESULT_FORMAT}" -f "${FAILURES_PRINTED}"
diff --git a/automated/android/noninteractive-tradefed/tradefed.yaml b/automated/android/noninteractive-tradefed/tradefed.yaml
new file mode 100644
index 0000000..9f9ae70
--- /dev/null
+++ b/automated/android/noninteractive-tradefed/tradefed.yaml
@@ -0,0 +1,64 @@
+metadata:
+ name: cts
+ format: "Lava-Test-Shell Test Definition 1.0"
+ description: "Run tradefed based tests in LAVA."
+ maintainer:
+ - milosz.wasilewski@linaro.org
+ - chase.qi@linaro.org
+ os:
+ - debian
+ - ubuntu
+ devices:
+ - lxc
+ scope:
+ - functional
+
+params:
+ SKIP_INSTALL: "false"
+ # Specify timeout in seconds for wait_boot_completed and wait_homescreen.
+ TIMEOUT: "300"
+ # Download CTS package or copy it from local disk.
+ # CTS_URL: "/root/android-cts/linaro/7.1_r1/android-cts-7.1_r1.zip"
+ TEST_URL: "http://testdata.validation.linaro.org/cts/android-cts-7.1_r1.zip"
+ TEST_PARAMS: "run cts -m CtsBionicTestCases --abi arm64-v8a --disable-reboot --skip-preconditions --skip-device-info"
+ # set to the name of the top directory in TEST_URL archive
+ # This should be 'android-cts' for CTS and android-vts for VTS
+ TEST_PATH: "android-cts"
+ # Specify result format: aggregated or atomic
+ RESULTS_FORMAT: "aggregated"
+ # Specify url and token for file uploading.
+ URL: "https://archive.validation.linaro.org/artifacts/team/qa/"
+ TOKEN: ""
+ AP_SSID: ""
+ AP_KEY: ""
+ # Specify the failures number to be printed
+ FAILURES_PRINTED: "0"
+ TEST_REBOOT_EXPECTED: "false"
+
+run:
+ steps:
+ - cd ./automated/android/noninteractive-tradefed
+ # Run setup.sh in the original shell to reserve env variables.
+ - . ./setup.sh
+ - echo "after ./setup.sh"
+ # delete the test user to clean environment
+ - userdel testuser -r -f || true
+ # create test use to run the cts/vts tests
+ - useradd -m testuser && echo "testuser created successfully"
+ - chown testuser:testuser .
+ - if [[ ${TEST_REBOOT_EXPECTED} == "true" ]]; then ./monitor_fastboot.sh & fi
+ - sudo -u testuser ./tradefed.sh -o "${TIMEOUT}" -c "${TEST_URL}" -t "${TEST_PARAMS}" -p "${TEST_PATH}" -r "${RESULTS_FORMAT}" -n "${ANDROID_SERIAL}" -f "${FAILURES_PRINTED}" -a "${AP_SSID}" -k "${AP_KEY}"
+ # Upload test log and result files to artifactorial.
+ - cp -r ./${TEST_PATH}/results ./output/ || true
+ - cp -r ./${TEST_PATH}/logs ./output/ || true
+ # Include logs dumped from TF shell 'd l' command.
+ - if ls /tmp/tradefed*; then cp -r /tmp/tradefed* ./output || true; fi
+ - tar caf tradefed-output-$(date +%Y%m%d%H%M%S).tar.xz ./output
+ - ATTACHMENT=$(ls tradefed-output-*.tar.xz)
+ - ../../utils/upload-to-artifactorial.sh -a "${ATTACHMENT}" -u "${URL}" -t "${TOKEN}"
+ # Send test result to LAVA.
+ - ../../utils/send-to-lava.sh ./output/result.txt
+ - userdel testuser -f -r || true
+ # When adb device lost, end test job to mark it as 'incomplete'.
+ - if ! adb shell echo ok; then error_fatal "adb device $ANDROID_SERIAL lost!"; fi
+ - if [[ ${TEST_REBOOT_EXPECTED} == "true" ]]; then killall monitor_fastboot.sh; fi