diff options
-rw-r--r-- | build-scripts/BUILD-INFO_lava.txt | 6 | ||||
-rw-r--r-- | build-scripts/BUILD-INFO_toolchain.txt | 7 | ||||
-rw-r--r-- | build-scripts/build-android | 13 | ||||
-rw-r--r-- | build-scripts/build-android-toolchain | 4 | ||||
-rw-r--r-- | build-scripts/build-android-toolchain-linaro | 4 | ||||
-rwxr-xr-x | build-scripts/create-user-build-script | 91 | ||||
-rw-r--r-- | build-scripts/helpers | 3 | ||||
-rwxr-xr-x | build-scripts/post-build-lava.py | 217 | ||||
-rwxr-xr-x | node/lava-submit | 3 | ||||
-rwxr-xr-x | node/prepare_build_config.py | 6 | ||||
-rw-r--r-- | utils/mangle-jobs/builders.xml | 2 | ||||
-rw-r--r-- | utils/mangle-jobs/push-artifacts-set.mangle | 10 | ||||
-rw-r--r-- | utils/new-publish/README | 37 | ||||
-rwxr-xr-x | utils/new-publish/clean-uploads | 6 | ||||
-rwxr-xr-x | utils/new-publish/propagate.py | 11 | ||||
-rwxr-xr-x | utils/new-publish/publib.py | 15 | ||||
-rwxr-xr-x | utils/new-publish/publish | 6 | ||||
-rwxr-xr-x | utils/new-publish/setup.sh | 6 |
18 files changed, 301 insertions, 146 deletions
diff --git a/build-scripts/BUILD-INFO_lava.txt b/build-scripts/BUILD-INFO_lava.txt new file mode 100644 index 0000000..31bf9b8 --- /dev/null +++ b/build-scripts/BUILD-INFO_lava.txt @@ -0,0 +1,6 @@ +Format-Version: 0.1 + + +Files-Pattern: lava-job-info* +License-Type: open + diff --git a/build-scripts/BUILD-INFO_toolchain.txt b/build-scripts/BUILD-INFO_toolchain.txt new file mode 100644 index 0000000..11a6c17 --- /dev/null +++ b/build-scripts/BUILD-INFO_toolchain.txt @@ -0,0 +1,7 @@ +Format-Version: 0.1 + + +Files-Pattern: * +Build-Name: toolchain +License-Type: open + diff --git a/build-scripts/build-android b/build-scripts/build-android index ee4937f..7e1f2ea 100644 --- a/build-scripts/build-android +++ b/build-scripts/build-android @@ -160,16 +160,3 @@ fi if [ -f out/kernel_config ]; then (cd out/; ${BUILD_SCRIPT_ROOT}/create-user-kernel-script) fi - -# add EULA support -( - if [[ "$JOB_NAME" =~ .*blob.* && ("$JOB_NAME" =~ .*origen.* || "$JOB_NAME" =~ .*snowball.*) ]] - then - eula="EULA.txt" - else - eula="OPEN-EULA.txt" - fi - touch out/OPEN-EULA.txt - cd out/target/product/*/ - touch $eula -) || true diff --git a/build-scripts/build-android-toolchain b/build-scripts/build-android-toolchain index cb7f46f..ed09a9a 100644 --- a/build-scripts/build-android-toolchain +++ b/build-scripts/build-android-toolchain @@ -65,5 +65,5 @@ mkdir -p ../out/cross/$TOOLCHAIN_PREFIX $MAKE $MAKE_OPTS install prefix=$PWD/../out/cross/$TOOLCHAIN_PREFIX tar -cj -C ../out/cross -f ../out/android-toolchain-eabi-$GCC_VERSION-$BUILD_NUMBER-$BUILD_ID-linux-x64.tar.bz2 . -# add EULA support -touch ../out/OPEN-EULA.txt +# Add BUILD-INFO.txt support +cp ${BUILD_SCRIPT_ROOT}/BUILD-INFO_toolchain.txt ../out/BUILD-INFO.txt diff --git a/build-scripts/build-android-toolchain-linaro b/build-scripts/build-android-toolchain-linaro index 5ddada7..0d05216 100644 --- a/build-scripts/build-android-toolchain-linaro +++ b/build-scripts/build-android-toolchain-linaro @@ -113,5 +113,5 @@ for i in $TOOLCHAIN_PREFIX/bin/*-gcc; do $i -v done -# add EULA suport -touch ../out/OPEN-EULA.txt +# Add BUILD-INFO.txt support +cp ${BUILD_SCRIPT_ROOT}/BUILD-INFO_toolchain.txt ../out/BUILD-INFO.txt diff --git a/build-scripts/create-user-build-script b/build-scripts/create-user-build-script index 04f07de..3d35447 100755 --- a/build-scripts/create-user-build-script +++ b/build-scripts/create-user-build-script @@ -17,7 +17,7 @@ if [ -n "$SOURCE_OVERLAY" ]; then USAGE_SUM="'Usage: \$0 -m <manifest.xml> -o <overlay.tar> [ -t -d directory -l login ]'" USAGE_OVERLAY="'\\n -m <manifest> If -t is not used, then using a browser with cookies you\\n must download the pinned manifest from:\\n $PINNED_MANIFEST_URL\\n -o The path to the vendor required overlay.\\n Can be downloaded from http://snapshots.linaro.org/android/binaries/$SOURCE_OVERLAY\\n'" USAGE_OPTOVERLAY="m:o:" - USAGE_OPTHANDLER="o ) SOURCE_OVERLAY=\$OPTARG; SOURCE_OVERLAY_OPTIONAL=0;; m ) MANIFEST=\$OPTARG;;" + USAGE_OPTHANDLER="o ) SOURCE_OVERLAY=\$OPTARG; SOURCE_OVERLAY_OPTIONAL=0;; m ) MANIFEST=\`readlink -f \$OPTARG\`;;" else USAGE_SUM="'Usage: \$0 [ -t -d directory -l login ]'" fi @@ -31,6 +31,7 @@ header() set -e EXACT=1 +INTERACTIVE=1 DIR=android if [ -z "\${LINARO_ANDROID_ACCESS_ID}" ] ; then LINARO_ANDROID_ACCESS_ID=default-bot @@ -48,49 +49,73 @@ usage() echo " -l <login-id> login-id to clone from linaro-private git repositories" echo " If in doubt, please contact Linaro Android mailing list for details" echo " Default: \${LINARO_ANDROID_ACCESS_ID}" + echo " -y Assume answer 'YES' for all questions. Non-interactive mode. Requires -l" exit 1 } -while getopts "${USAGE_OPTOVERLAY}d:l:ht" optn; do +while getopts "${USAGE_OPTOVERLAY}d:l:hty" optn; do case \$optn in $USAGE_OPTHANDLER d ) DIR=\$OPTARG;; l ) LINARO_ANDROID_ACCESS_ID=\$OPTARG;; t ) EXACT=0;; + y ) INTERACTIVE=0;; h ) usage; exit 1;; esac done +if [ "\${LINARO_ANDROID_ACCESS_ID}" == "default-bot" -a \${INTERACTIVE} -eq 0 ] ; then + usage +fi + UBUNTU=\`cat /etc/issue.net | cut -d' ' -f2\` HOST_ARCH=\`uname -m\` if [ \${HOST_ARCH} == "x86_64" ] ; then - PKGS='git-core gnupg flex bison gperf build-essential zip curl zlib1g-dev libc6-dev lib32ncurses5-dev x11proto-core-dev libx11-dev lib32z1-dev libgl1-mesa-dev g++-multilib mingw32 tofrodos python-markdown libxml2-utils xsltproc uboot-mkimage openjdk-6-jdk openjdk-6-jre vim-common' + PKGS='gnupg flex bison gperf build-essential zip curl zlib1g-dev libc6-dev lib32ncurses5-dev x11proto-core-dev libx11-dev lib32z1-dev libgl1-mesa-dev g++-multilib mingw32 tofrodos python-markdown libxml2-utils xsltproc uboot-mkimage openjdk-6-jdk openjdk-6-jre vim-common python-parted python-yaml wget' else echo "ERROR: Only 64bit Host(Build) machines are supported at the moment." exit 1 fi -if [[ \${UBUNTU} =~ "12." ]]; then - PKGS+=' lib32readline-gplv2-dev' -elif [[ \${UBUNTU} =~ "10.04" ]] ; then - PKGS+=' ia32-libs lib32readline5-dev' +if [[ \${UBUNTU} =~ "13." || \${UBUNTU} =~ "12.10" ]]; then + #Install basic dev package missing in chrooted environments + sudo apt-get install software-properties-common + sudo dpkg --add-architecture i386 + PKGS+=' libstdc++6:i386 git-core' +elif [[ \${UBUNTU} =~ "12.04" || \${UBUNTU} =~ "10.04" ]] ; then + #Install basic dev package missing in chrooted environments + sudo apt-get install python-software-properties + if [[ \${UBUNTU} =~ "12.04" ]]; then + PKGS+=' libstdc++6:i386 git-core' + else + PKGS+=' ia32-libs libssl-dev libcurl4-gnutls-dev libexpat1-dev gettext' + fi else - echo - echo "ERROR: Only Ubuntu 10.04, 12.04 and 12.10 versions are supported." + echo "ERROR: Only Ubuntu 10.04, 12.* and 13.04 versions are supported." exit 1 fi -echo "Checking and installing missing dependencies if any .. .." -MISSING=\`dpkg-query -W -f='\${Status}\n' \${PKGS} 2>&1 | grep -i 'No packages found matching' | cut -d' ' -f6\` -if [ -n "\$MISSING" ] ; then - echo -n "Missing required packages: " - for m in \$MISSING ; do - echo -n "\${m%?} " - done - echo - sudo add-apt-repository "deb http://archive.ubuntu.com/ubuntu \$(lsb_release -sc) main universe restricted multiverse" - sudo apt-get update +echo +echo "Setting up Ubuntu software repositories..." +sudo add-apt-repository "deb http://archive.ubuntu.com/ubuntu \$(lsb_release -sc) main universe restricted multiverse" +sudo apt-get update +echo +echo "Installing missing dependencies if any..." +if [ \$INTERACTIVE -eq 1 ] ; then + sudo apt-get install \${PKGS} +else + sudo apt-get -y install \${PKGS} +fi +# Obsolete git version 1.7.04 in lucid official repositories +# repo need at least git v1.7.2 +if [[ \${UBUNTU} =~ "10.04" ]]; then echo - sudo apt-get install \${MISSING} + echo "repo tool complains of obsolete git version 1.7.04 in lucid official repositories" + echo "Building git for lucid from precise sources .." + wget http://archive.ubuntu.com/ubuntu/pool/main/g/git/git_1.7.9.5.orig.tar.gz + tar xzf git_1.7.9.5.orig.tar.gz + cd git-1.7.9.5/ + make prefix=/usr + sudo make prefix=/usr install fi EOF @@ -110,9 +135,13 @@ EOF fi cat <<EOF if [ -d \${DIR} ] ; then - echo "Directory \${DIR} exists. Are you sure you want to use this? (y/n) " - read CONTINUE - [ \${CONTINUE} == y ] || exit 1 + if [ \$INTERACTIVE -eq 1 ] ; then + echo "Directory \${DIR} exists. Are you sure you want to use this? (y/n) " + read CONTINUE + [ \${CONTINUE} == y ] || exit 1 + else + echo "Using existing directory: \${DIR} . " + fi else mkdir \${DIR} fi @@ -126,7 +155,7 @@ jenkins_configs_method() cat <<EOF # check for linaro private manifests PM=\`echo ${MANIFEST_REPO} | grep -i "linaro-private" | wc -l\` -if [ \${PM} -gt 0 ] ; then +if [ \${PM} -gt 0 -a \${INTERACTIVE} -eq 1 ] ; then if [ "\${LINARO_ANDROID_ACCESS_ID}" == "default-bot" ] ; then echo "You must specify valid login/access-id to clone from linaro-private manifest repositories." echo "Press "y" to continue (which may result in incomplete build or failure), OR" @@ -148,16 +177,22 @@ export TARGET_PRODUCT=${TARGET_PRODUCT} export TARGET_SIMULATOR=false export BUILD_TINY_ANDROID=${BUILD_TINY_ANDROID} export CPUS=\`grep -c processor /proc/cpuinfo\` +export INCLUDE_PERF=${INCLUDE_PERF} +export TARGET_BUILD_VARIANT=${TARGET_BUILD_VARIANT} +export BUILD_FS_IMAGE=${BUILD_FS_IMAGE} +export DEBUG_NO_STRICT_ALIASING=${DEBUG_NO_STRICT_ALIASING} +export DEBUG_NO_STDCXX11=${DEBUG_NO_STDCXX11} +export TOOLCHAIN_TRIPLET=${TOOLCHAIN_TRIPLET} +export ANDROID_64=${ANDROID_64} EOF if [ -n "$TOOLCHAIN_URL" ] ; then cat <<EOF export TOOLCHAIN_URL=${TOOLCHAIN_URL} export TARGET_TOOLS_PREFIX=android-toolchain-eabi/bin/arm-linux-androideabi- - EOF else cat <<EOF -export TARGET_TOOLS_PREFIX=prebuilt/linux-x86/toolchain/arm-linux-androideabi-4.4.x/bin/arm-linux-androideabi- +export TARGET_TOOLS_PREFIX=${TARGET_TOOLS_PREFIX} EOF fi @@ -204,7 +239,7 @@ if [ \${EXACT} -eq 1 ] ; then fi # check for linaro private git repositories PRI=\`grep -i "linaro-private" .repo/manifests/\${MANIFEST_FILENAME} | wc -l\` -if [ \${PRI} -gt 0 ] ; then +if [ \${PRI} -gt 0 -a \${INTERACTIVE} -eq 1 ] ; then if [ "\${LINARO_ANDROID_ACCESS_ID}" == "default-bot" ] ; then echo "You must specify valid login/access-id to clone from linaro-private git repositories." echo "Press "y" to continue (which may result in incomplete build), OR" @@ -220,7 +255,7 @@ if [ \${PRI} -gt 0 ] ; then fi sed -i 's/\/\/.*-bot@/\/\/'"\${LINARO_ANDROID_ACCESS_ID}"'@/' .repo/manifests/\${MANIFEST_FILENAME} fi -./repo sync +./repo sync -f -j1 EOF diff --git a/build-scripts/helpers b/build-scripts/helpers index cba665a..9a4c7a2 100644 --- a/build-scripts/helpers +++ b/build-scripts/helpers @@ -1,5 +1,6 @@ # Set REPO_MIRROR to non-empty value to get around upstream downtimes -REPO_MIRROR="--repo-url=http://android.git.linaro.org/git-ro/tools/repo" +#REPO_MIRROR="--repo-url=http://android.git.linaro.org/git-ro/tools/repo" +REPO_MIRROR="--repo-url=git://android.git.linaro.org/tools/repo" setup-repo-vars () { EABI="${EABI-arm-eabi}" diff --git a/build-scripts/post-build-lava.py b/build-scripts/post-build-lava.py index f5778df..8c62013 100755 --- a/build-scripts/post-build-lava.py +++ b/build-scripts/post-build-lava.py @@ -3,6 +3,7 @@ import sys import os import re import json +import copy import xmlrpclib @@ -35,6 +36,12 @@ PRODUCT_MAP = { "vexpress_rtsm": { "test_device_type": "rtsm_ve-a15x4-a7x4", }, + "full_maguro": { + "test_device_type": "nexus", + }, + "full_arndale": { + "test_device_type": "arndale", + }, } OPTION_SUFFIX = "_OPTION" @@ -142,12 +149,14 @@ def gen_lava_android_test_actions(tests=[]): continue test_actions.append(test) - if len(test_actions) > 0: + ## make the next test installation be able to execute + ## when one test installation failed + for test_action in list(set(test_actions)): inst_action = { "command": "lava_android_test_install", "parameters": { # ensure only unique test names - "tests": list(set(test_actions)) + "tests": [test_action] } } actions.append(inst_action) @@ -227,8 +236,9 @@ def gen_lava_android_test_actions(tests=[]): return actions -def gen_test_plan_actions(): - test_plan = os.environ.get("LAVA_TEST_PLAN") +def gen_test_plan_actions(test_plan=None): + if test_plan == None: + test_plan = os.environ.get("LAVA_TEST_PLAN") if test_plan == None: test_plan = '0xbench, glmark2, monkey' test_plans = test_plan.split(',') @@ -261,10 +271,12 @@ def gen_custom_actions(): test_actions = [] prefix = 'LAVA_TEST_' pat_suffix = '_PATTERN' + sec_job_prefix = 'LAVA_TEST_PLAN_SECONDARY_' test_list = [] for var in os.environ.keys(): if var.startswith(prefix) and (not var.endswith(pat_suffix)) \ and (var != 'LAVA_TEST_PLAN') \ + and (not var.startswith(sec_job_prefix)) \ and (not var.endswith(TIMEOUT_SUFFIX)): test_list.append(var) test_list.sort() @@ -331,6 +343,14 @@ def main(): # User can disable the installation of android binaries (doing this will # disable hardware acceleration) enable_android_install_binaries = os.environ.get("LAVA_ANDROID_BINARIES") + # Some devices need not boot to GUI like the Tiny Android builds and builds + # which need a proprietary binary overlay to be installed before expecting + # GUI. + wait_for_homescreen = os.environ.get("LAVA_WAIT_FOR_HOMESCREEN") + if wait_for_homescreen == None: + wait_for_homescreen = True + elif wait_for_homescreen.lower() in ['0','false','no']: + wait_for_homescreen = False # Not set, default to False, because this is relevant only for panda # from Vishal if enable_android_install_binaries == None: @@ -344,6 +364,13 @@ def main(): # if this value is not set, then use the 18000 seconds as the default value default_timeout = os.environ.get("DEFAULT_TIMEOUT", 18000) + # Set the file extension based on the type of artifacts + artifact_type = os.environ.get("MAKE_TARGETS", "tarball") + if artifact_type == "droidcore": + file_extension = "img" + else: + file_extension = "tar.bz2" + # Board-specific parameters if target_product not in PRODUCT_MAP: # We don't know how to test this job, so skip testing. @@ -372,17 +399,17 @@ def main(): f.close() default_stream = '/private/team/linaro/android-daily/' - actions = [ + common_actions = [ { "command": "deploy_linaro_android_image", "parameters": { - "boot": "%s%s" % (download_url, - "/boot.tar.bz2"), - "system":"%s%s" % (download_url, - "/system.tar.bz2"), - "data":"%s%s" % (download_url, - "/userdata.tar.bz2") + "boot": "%s%s%s" % (download_url, + "/boot.", file_extension), + "system":"%s%s%s" % (download_url, + "/system.", file_extension), + "data":"%s%s%s" % (download_url, + "/userdata.", file_extension) }, "metadata": { @@ -393,80 +420,106 @@ def main(): }] if enable_android_install_binaries: - actions.append({"command": "android_install_binaries"}) - - actions.append({"command": "boot_linaro_android_image"}) - - actions.extend(gen_test_actions()) - - actions.append( - { - "command": "submit_results_on_host", - "parameters": - { - "server": schema_url, - "stream": PRODUCT_MAP[target_product].get( - "test_stream", default_stream) - } - }) - - config_json = {"job_name": build_url, - "image_type": 'android', - "timeout": int(default_timeout), - "actions": actions - } - - # allow overload lava device_type by build config - test_device_type = os.environ.get("LAVA_DEVICE_TYPE") - if not test_device_type: - test_device_type = PRODUCT_MAP[target_product]["test_device_type"] - - # allow to submit to a specific device - test_device = os.environ.get("LAVA_DEVICE") - - # test_device set will win over test_device_type - # LAVA parameter naming could use more consistency - if test_device: - config_json["target"] = test_device + common_actions.append({"command": "android_install_binaries"}) + + if wait_for_homescreen == False: + common_actions.append({"command": "boot_linaro_android_image", + "parameters": { + "wait_for_home_screen": False + } + }) else: - config_json["device_type"] = test_device_type - - config = json.dumps(config_json, indent=4) + common_actions.append({"command": "boot_linaro_android_image"}) - print config + plan_list = ["LAVA_TEST_PLAN"] + sec_plan_prefix = "LAVA_TEST_PLAN_SECONDARY_" + + for var in os.environ.keys(): + if var.startswith(sec_plan_prefix): + plan_list.append(var) + plan_list.sort() + # Create a copy of common actions + for plan in plan_list: + actions = copy.deepcopy(common_actions) + if plan == "LAVA_TEST_PLAN": + actions.extend(gen_test_actions()) + else: + actions.extend(gen_test_plan_actions(os.environ.get(plan))) + actions.append( + { + "command": "submit_results_on_host", + "parameters": + { + "server": schema_url, + "stream": PRODUCT_MAP[target_product].get( + "test_stream", default_stream) + } + }) + + config_json = {"job_name": build_url, + "image_type": 'android', + "timeout": int(default_timeout), + "actions": actions + } + + # allow overload lava device_type by build config + test_device_type = os.environ.get("LAVA_DEVICE_TYPE") + if not test_device_type: + test_device_type = PRODUCT_MAP[target_product]["test_device_type"] + + # allow to submit to a specific device + test_device = os.environ.get("LAVA_DEVICE") + + # test_device set will win over test_device_type + # LAVA parameter naming could use more consistency + if test_device: + config_json["target"] = test_device + else: + config_json["device_type"] = test_device_type + + config = json.dumps(config_json, indent=4) + + print config + + lava_token_f = os.environ.get("LAVA_TOKEN_FILE") + if lava_token_f == None: + lava_token_f = '/var/run/lava/lava-token' + else: + lava_token_f = '/var/run/lava/%s' % lava_token_f + + with open(lava_token_f) as fd: + lava_token = fd.read().strip() + + try: + report_url = ("%(schema)s://" + "%(lava_user)s:%(lava_token)s@%(lava_server)s") % dict( + schema=schema, + lava_user=lava_user, + lava_token=lava_token, + lava_server=lava_server) + server = xmlrpclib.ServerProxy(report_url) + lava_job_id = server.scheduler.submit_job(config) + lava_server_root = lava_server.rstrip("/") + if lava_server_root.endswith("/RPC2"): + lava_server_root = lava_server_root[:-len("/RPC2")] + except xmlrpclib.ProtocolError, e: + print "Error making a LAVA request:", obfuscate_credentials(str(e)) + sys.exit(1) + + print "LAVA Job Id: %s, URL: %s://%s/scheduler/job/%s" % \ + (lava_job_id, schema, lava_server_root, lava_job_id) + + if plan == "LAVA_TEST_PLAN": + json.dump({ + 'lava_url': "%s://%s" % (schema, lava_server_root), + 'job_id': lava_job_id, + }, open('out/lava-job-info', 'w')) + else: + json.dump({ + 'lava_url': "%s://%s" % (schema, lava_server_root), + 'job_id': lava_job_id, + }, open('out/lava-job-info-' + plan , 'w')) - lava_token_f = os.environ.get("LAVA_TOKEN_FILE") - if lava_token_f == None: - lava_token_f = '/var/run/lava/lava-token' - else: - lava_token_f = '/var/run/lava/%s' % lava_token_f - - with open(lava_token_f) as fd: - lava_token = fd.read().strip() - - try: - report_url = ("%(schema)s://" - "%(lava_user)s:%(lava_token)s@%(lava_server)s") % dict( - schema=schema, - lava_user=lava_user, - lava_token=lava_token, - lava_server=lava_server) - server = xmlrpclib.ServerProxy(report_url) - lava_job_id = server.scheduler.submit_job(config) - lava_server_root = lava_server.rstrip("/") - if lava_server_root.endswith("/RPC2"): - lava_server_root = lava_server_root[:-len("/RPC2")] - except xmlrpclib.ProtocolError, e: - print "Error making a LAVA request:", obfuscate_credentials(str(e)) - sys.exit(1) - - print "LAVA Job Id: %s, URL: %s://%s/scheduler/job/%s" % \ - (lava_job_id, schema, lava_server_root, lava_job_id) - - json.dump({ - 'lava_url': "%s://%s" % (schema, lava_server_root), - 'job_id': lava_job_id, - }, open('out/lava-job-info', 'w')) if __name__ == "__main__": main() diff --git a/node/lava-submit b/node/lava-submit index 992faf5..120508d 100755 --- a/node/lava-submit +++ b/node/lava-submit @@ -31,4 +31,7 @@ if ! "${BUILD_SCRIPT_ROOT}"/post-build-lava.py; then fi fi +# Add BUILD-INFO.txt support for lava-job-info files +cp ${BUILD_SCRIPT_ROOT}/BUILD-INFO_lava.txt out/BUILD-INFO.txt + EOF diff --git a/node/prepare_build_config.py b/node/prepare_build_config.py index d8b0eea..ce80a3d 100755 --- a/node/prepare_build_config.py +++ b/node/prepare_build_config.py @@ -53,7 +53,7 @@ def validate_config(config, slave_type): else: slave_type_cat = "normal" - if owner in ["linaro-android-private", "linaro-android-restricted"]: + if owner.endswith("-restricted"): owner_cat = "restricted" else: owner_cat = "normal" @@ -69,10 +69,10 @@ def validate_config(config, slave_type): # Now, process few most expected mismatches in adhoc way, # to provide better error messages if slave_type_cat == "restricted" and owner_cat != "restricted": - raise BuildConfigMismatchException("Only jobs owned by ~linaro-android-restricted may run on this build slave type") + raise BuildConfigMismatchException("Only jobs owned by ~linaro-android-*-restricted may run on this build slave type") if owner_cat == "restricted" and build_type_cat != "restricted": - raise BuildConfigMismatchException("Jobs owned by ~linaro-android-restricted must use BUILD_TYPE=build-android-*-restricted") + raise BuildConfigMismatchException("Jobs owned by ~linaro-android-*-restricted must use BUILD_TYPE=build-android-*-restricted") # Finally, generic mismatch detection if slave_type_cat != owner_cat or slave_type_cat != build_type_cat: diff --git a/utils/mangle-jobs/builders.xml b/utils/mangle-jobs/builders.xml index f6e78a9..4d89266 100644 --- a/utils/mangle-jobs/builders.xml +++ b/utils/mangle-jobs/builders.xml @@ -5,7 +5,7 @@ rm -rf build-tools bzr get lp:linaro-android-build-tools build-tools sudo -H -E build-tools/node/build us-east-1.ec2-git-mirror.linaro.org "$CONFIG" -time build-tools/utils/new-publish/publish -p2 $JOB_NAME/$BUILD_NUMBER "build/out/target/*/*/*.img,build/out/target/*/*/*.img.bz2,build/out/target/*/*/*.tar.bz2,build/out/target/*/*/MD5SUMS,build/out/*.tar.bz2,build/out/*.xml,build/out/*_config,build/out/lava-job-info,build/out/linaro_kernel_build_cmds.sh,build/out/linaro_android_build_cmds.sh,build/out/*EULA*,build/out/target/product/*/*EULA*,build/out/target/product/*/howto/*EULA*,build/out/BUILD-INFO.txt,build/out/*/BUILD-INFO.txt,build/out/*/*/BUILD-INFO.txt,build/out/*/*/*/BUILD-INFO.txt,build/out/target/product/*/howto/HOWTO_*.txt,build/out/target/product/*/HOWTO_*.txt" +time build-tools/utils/new-publish/publish -p2 $JOB_NAME/$BUILD_NUMBER "build/out/target/*/*/*.img,build/out/target/*/*/*.img.bz2,build/out/target/*/*/*.tar.bz2,build/out/target/*/*/MD5SUMS,build/out/*.tar.bz2,build/out/*.xml,build/out/*_config,build/out/lava-job-info,build/out/linaro_kernel_build_cmds.sh,build/out/linaro_android_build_cmds.sh,build/out/BUILD-INFO.txt,build/out/*/BUILD-INFO.txt,build/out/*/*/BUILD-INFO.txt,build/out/*/*/*/BUILD-INFO.txt,build/out/target/product/*/howto/HOWTO_*.txt,build/out/target/product/*/HOWTO_*.txt" build-tools/node/lava-submit "$CONFIG" diff --git a/utils/mangle-jobs/push-artifacts-set.mangle b/utils/mangle-jobs/push-artifacts-set.mangle index 2813cd7..67e0045 100644 --- a/utils/mangle-jobs/push-artifacts-set.mangle +++ b/utils/mangle-jobs/push-artifacts-set.mangle @@ -1,4 +1,9 @@ -# Update list of artifacts to push to snapshots.linaro.org +# +# This script is no longer used! +# +# See build-steps-set.mangle & builders.xml +# + new_value = "build/out/target/*/*/*.img," \ "build/out/target/*/*/*.img.bz2," \ @@ -10,9 +15,6 @@ new_value = "build/out/target/*/*/*.img," \ "build/out/lava-job-info," \ "build/out/linaro_kernel_build_cmds.sh," \ "build/out/linaro_android_build_cmds.sh," \ - "build/out/*EULA*," \ - "build/out/target/product/*/*EULA*," \ - "build/out/target/product/*/howto/*EULA*," \ "build/out/**/BUILD-INFO.txt," \ "build/out/target/product/*/HOWTO_*.txt" \ "build/out/target/product/*/howto/HOWTO_*.txt" diff --git a/utils/new-publish/README b/utils/new-publish/README index e63e000..b9be0c4 100644 --- a/utils/new-publish/README +++ b/utils/new-publish/README @@ -72,3 +72,40 @@ Publishing starts on build slave with SFTPing artifact files to master processing by calling out (by SSH) sshd-config fixed script on master. This script recursively applies same processing (chroot SFTP, fixed script) to publish files to snapshots. + +Conclusions and Future Work +--------------------------- +The biggest management and security issue with the implementation described +above is authentication of publishing clients to publishing service. +Implementation described above is cumbersome to setup and maintain and +doesn't adhere to strictest security practices. + +To adress this problem, implementation of publishing as a web service may be +suggested - this way, authentication handling on server side is confined to +a single custom component, web application. It thus can be very flexible +and featureful, for example, we can implement "publishing tockens", each +associated with set of constraints, like "active not before 30min from +time of issuance", "active not after 2hr from time of issuance", "can +be used for publishing type 'android'", "publisher IP should be X.X.X.X", +etc., etc. However, there still remains problems of issuing tockens for +build hosts. Essentially, tockens should be "injected" into builds by +a trusted party (a kind of build scheduling frontend). We already have +frontend on android-build, but ci.linaro.org presents "raw" Jenkins. It +might be possible to integrate needed functionality into Jenkins via plugin. + +But publishing few moderately-sized files is not the only usecase for +Publishing Service. For OpenEmbedded builds, we need to publish used sources/ +cache files, which may be thousands of files totalling gigabytes. Except +that any particular build would like likely change only reasonably small +subset of these files, and only those need to bt actually published. +This is clearly a usecase for rsync, but with rsync, we would need to deal +with PAM for any custom authentication, and it's still unclear if it will +possible to achieve flexibility simalar to tokens described. + +That's the dichotomy we have - we need efficient transfer protocol, as +we potentially deal with many files and large amounts of data, and yet +we need flexible token/ticket style authentication. It may be possible +to choose a compromise between the two - implement a webservice with +rudimentary "file freshness" protocol (which would work on the level of +entire file, not sub-blocks). Existing system-level ticketing systems +like Kerberos can be also considered. diff --git a/utils/new-publish/clean-uploads b/utils/new-publish/clean-uploads new file mode 100755 index 0000000..f637a70 --- /dev/null +++ b/utils/new-publish/clean-uploads @@ -0,0 +1,6 @@ +#!/bin/sh +# +# Cronjob to clean up uploads area on master +# + +find /mnt/publish/uploads/ -mindepth 1 -mmin +120 | xargs -n100 rm -rf diff --git a/utils/new-publish/propagate.py b/utils/new-publish/propagate.py index 6a9b166..e279e37 100755 --- a/utils/new-publish/propagate.py +++ b/utils/new-publish/propagate.py @@ -10,7 +10,7 @@ import paramiko import publib -REMOTE_HOST_PRODUCTION = "mombin.canonical.com" +REMOTE_HOST_PRODUCTION = "snapshots.linaro.org" REMOTE_HOST_STAGING = "staging.snapshots.linaro.org" PUBLISH_USER_NAME = "linaro-android-build-publish" TRIGGER_USER_NAME = "linaro-android-build-publish-trigger" @@ -21,11 +21,12 @@ REMOTE_UPLOAD_DIR = "/uploads/android" if __name__ == "__main__": - optparser = optparse.OptionParser(usage="%prog") + optparser = optparse.OptionParser(usage="%prog <job/build>") optparser.add_option("-s", "--staging", action="store_true", help="Publish to staging server") optparser.add_option("--identity-publish", metavar="KEY", default=PUBLISH_KEY_FILE, help="Publish SSH key file") optparser.add_option("--identity-trigger", metavar="KEY", default=TRIGGER_KEY_FILE, help="Trigger SSH key file") optparser.add_option("-n", "--dry-run", action="store_true", help="Don't actually publish files, log commands") + optparser.add_option("--host", help="Override destination publishing host, for debugging") optparser.add_option("--step", default="all", help="Run only specific step") options, args = optparser.parse_args(sys.argv[1:]) if len(args) != 1: @@ -33,12 +34,16 @@ if __name__ == "__main__": publib.validate_build_id(args[0]) + print "Starting propagation phase" + if options.staging: remote_host = REMOTE_HOST_STAGING opt_staging = "-s" else: remote_host = REMOTE_HOST_PRODUCTION opt_staging = "" + if options.host: + remote_host = options.host if options.step in ("all", "1"): file_list = [] @@ -62,9 +67,11 @@ if __name__ == "__main__": client.connect(remote_host, username=TRIGGER_USER_NAME, key_filename=TRIGGER_KEY_FILE) stdin, stdout, stderr = client.exec_command("reshuffle-files -t android -j %s -n %s -m %s" % (job, build, opt_staging)) stdin.close() + rc = stdout.channel.recv_exit_status() print "=== stdout ===" print stdout.read() print "=== stderr ===" print stderr.read() print "==============" client.close() + sys.exit(rc) diff --git a/utils/new-publish/publib.py b/utils/new-publish/publib.py index e6b186d..9d39025 100755 --- a/utils/new-publish/publib.py +++ b/utils/new-publish/publib.py @@ -2,6 +2,7 @@ import sys import os import glob +import tempfile import optparse import paramiko @@ -105,15 +106,21 @@ def make_upload_script(file_list, upload_dir, build_dir="", strip=0): def upload_files(upload_script, host, user, key, options): - with open("/tmp/sftp.script", "w") as f: - f.write("\n".join(upload_script) + "\n") + fd, fname = tempfile.mkstemp(prefix="sftp_script") + os.close(fd) + f = open(fname, "w") + f.write("\n".join(upload_script) + "\n") + f.close() - cmd = "sftp -i %s -b /tmp/sftp.script %s@%s" % (key, user, host) + cmd = "sftp -i %s -b %s %s@%s" % (key, fname, user, host) print cmd sys.stdout.flush() if not options.dry_run: rc = os.system(cmd) - os.remove("/tmp/sftp.script") + try: + os.remove(fname) + except: + pass if rc != 0: print "ERROR: sftp transfer finished with error" sys.exit(1) diff --git a/utils/new-publish/publish b/utils/new-publish/publish index c312ecb..bae8ac0 100755 --- a/utils/new-publish/publish +++ b/utils/new-publish/publish @@ -23,6 +23,7 @@ if __name__ == "__main__": optparser.add_option("--identity-copy", metavar="KEY", default=COPY_KEY_FILE, help="SSH key file") optparser.add_option("--identity-trigger", metavar="KEY", default=TRIGGER_KEY_FILE, help="SSH key file") optparser.add_option("-n", "--dry-run", action="store_true", help="Don't actually publish files, log commands") + optparser.add_option("--host", help="Override destination publishing host, for debugging") options, args = optparser.parse_args(sys.argv[1:]) if len(args) < 2: optparser.error("Wrong number of arguments") @@ -49,9 +50,12 @@ if __name__ == "__main__": upload_script = publib.make_upload_script(file_list, UPLOAD_DIR, build_id, options.strip) publib.upload_files(upload_script, REMOTE_HOST, COPY_USER_NAME, options.identity_copy, options) - rc = os.system("ssh -i %s %s@%s propagate.py %s %s" % (options.identity_trigger, + print "Propagating files to the downloads server" + sys.stdout.flush() + rc = os.system("ssh -i %s %s@%s propagate.py %s %s %s" % (options.identity_trigger, TRIGGER_USER_NAME, REMOTE_HOST, "-s" if options.staging else "", + "--host=%s" % options.host if options.host else "", build_id)) if rc != 0: print "Publishing failed" diff --git a/utils/new-publish/setup.sh b/utils/new-publish/setup.sh index 3d8ba6d..f3b6ed0 100755 --- a/utils/new-publish/setup.sh +++ b/utils/new-publish/setup.sh @@ -53,11 +53,11 @@ function setup_accounts() { chmod 755 $publish_home # Actual uploads will happen here - mkdir -p $publish_home/upload + mkdir -p $publish_home/uploads # publish-copy should have write access there, publish-trigger # generally only read (cleanup can be handled by cronjob) - chown publish-copy.publish $publish_home/upload - chmod 755 $publish_home/upload + chown publish-copy.publish $publish_home/uploads + chmod 755 $publish_home/uploads } |