aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSergei Trofimov <sergei.trofimov@arm.com>2017-04-26 14:29:12 +0100
committerSergei Trofimov <sergei.trofimov@arm.com>2017-04-27 09:01:17 +0100
commit867972f742cc95744fe38192a0aab6052dda7904 (patch)
tree605ebafc88ac2df318876723c9f6e2bd3e15c4a2
parent0e5f7eb72436b7e05f4bccbe99026c4a955808c1 (diff)
removing old files
Removing old and unsused files: - wa/framework/old_output.py: superseded by output.py in the same dir - the entire wlauto tree: replaced by wa/ tree; it's stale by now anyway. - log.py and actor.py from framework/ as neither is used.
-rw-r--r--wa/framework/actor.py31
-rw-r--r--wa/framework/log.py306
-rw-r--r--wa/framework/old_output.py362
-rw-r--r--wa/framework/target.py80
-rw-r--r--wlauto/__init__.py35
-rw-r--r--wlauto/agenda-example-biglittle.yaml79
-rw-r--r--wlauto/agenda-example-tutorial.yaml43
-rw-r--r--wlauto/commands/__init__.py16
-rw-r--r--wlauto/commands/create.py400
-rw-r--r--wlauto/commands/list.py74
-rw-r--r--wlauto/commands/record.py217
-rw-r--r--wlauto/commands/run.py123
-rw-r--r--wlauto/commands/show.py114
-rw-r--r--wlauto/commands/templates/UiAutomation.java25
-rw-r--r--wlauto/commands/templates/android_benchmark27
-rw-r--r--wlauto/commands/templates/android_uiauto_benchmark24
-rw-r--r--wlauto/commands/templates/basic_workload28
-rw-r--r--wlauto/commands/templates/setup.template102
-rw-r--r--wlauto/commands/templates/uiauto_workload35
-rw-r--r--wlauto/common/__init__.py16
-rw-r--r--wlauto/common/android/BaseUiAutomation.classbin4182 -> 0 bytes
-rw-r--r--wlauto/common/android/__init__.py16
-rw-r--r--wlauto/common/android/resources.py40
-rw-r--r--wlauto/common/android/workload.py506
-rwxr-xr-xwlauto/common/bin/arm64/busyboxbin1914688 -> 0 bytes
-rwxr-xr-xwlauto/common/bin/arm64/m5bin3561677 -> 0 bytes
-rwxr-xr-xwlauto/common/bin/arm64/reventbin3560328 -> 0 bytes
-rwxr-xr-xwlauto/common/bin/armeabi/busyboxbin1397424 -> 0 bytes
-rwxr-xr-xwlauto/common/bin/armeabi/m5bin2745108 -> 0 bytes
-rwxr-xr-xwlauto/common/bin/armeabi/reventbin2730664 -> 0 bytes
-rw-r--r--wlauto/common/bin/x86_64/busyboxbin1009384 -> 0 bytes
-rw-r--r--wlauto/common/gem5/LICENSE6
-rw-r--r--wlauto/common/gem5/__init__.py16
-rw-r--r--wlauto/common/resources.py64
-rw-r--r--wlauto/config_example.py289
-rw-r--r--wlauto/core/__init__.py16
-rw-r--r--wlauto/core/command.py81
-rw-r--r--wlauto/core/configuration/__init__.py19
-rw-r--r--wlauto/core/configuration/configuration.py1036
-rw-r--r--wlauto/core/configuration/default.py42
-rw-r--r--wlauto/core/configuration/manager.py213
-rw-r--r--wlauto/core/configuration/parsers.py308
-rw-r--r--wlauto/core/configuration/plugin_cache.py210
-rw-r--r--wlauto/core/configuration/tree.py89
-rw-r--r--wlauto/core/device_manager.py198
-rw-r--r--wlauto/core/entry_point.py108
-rw-r--r--wlauto/core/execution.py875
-rw-r--r--wlauto/core/exttype.py32
-rw-r--r--wlauto/core/host.py33
-rw-r--r--wlauto/core/instrumentation.py399
-rw-r--r--wlauto/core/output.py188
-rw-r--r--wlauto/core/plugin.py793
-rw-r--r--wlauto/core/pluginloader.py89
-rw-r--r--wlauto/core/resolver.py111
-rw-r--r--wlauto/core/resource.py185
-rw-r--r--wlauto/core/result.py319
-rw-r--r--wlauto/core/signal.py272
-rw-r--r--wlauto/core/version.py26
-rw-r--r--wlauto/core/workload.py104
-rw-r--r--wlauto/exceptions.py162
-rw-r--r--wlauto/external/README74
-rwxr-xr-xwlauto/external/bbench_server/build.sh31
-rw-r--r--wlauto/external/bbench_server/jni/Android.mk9
-rwxr-xr-xwlauto/external/bbench_server/jni/bbench_server.cpp151
-rw-r--r--wlauto/external/daq_server/daqpower-1.0.5.tar.gzbin14433 -> 0 bytes
-rw-r--r--wlauto/external/daq_server/src/MANIFEST.in0
-rw-r--r--wlauto/external/daq_server/src/README0
-rwxr-xr-xwlauto/external/daq_server/src/build.sh25
-rw-r--r--wlauto/external/daq_server/src/daqpower/__init__.py17
-rw-r--r--wlauto/external/daq_server/src/daqpower/client.py380
-rw-r--r--wlauto/external/daq_server/src/daqpower/common.py103
-rw-r--r--wlauto/external/daq_server/src/daqpower/config.py153
-rw-r--r--wlauto/external/daq_server/src/daqpower/daq.py347
-rw-r--r--wlauto/external/daq_server/src/daqpower/log.py58
-rw-r--r--wlauto/external/daq_server/src/daqpower/server.py526
-rw-r--r--wlauto/external/daq_server/src/scripts/run-daq-server3
-rw-r--r--wlauto/external/daq_server/src/scripts/send-daq-command3
-rw-r--r--wlauto/external/daq_server/src/setup.py52
-rwxr-xr-xwlauto/external/pmu_logger/Makefile7
-rwxr-xr-xwlauto/external/pmu_logger/README35
-rwxr-xr-xwlauto/external/pmu_logger/pmu_logger.c294
-rw-r--r--wlauto/external/pmu_logger/pmu_logger.kobin7821 -> 0 bytes
-rw-r--r--wlauto/external/readenergy/Makefile11
-rwxr-xr-xwlauto/external/readenergy/readenergybin695696 -> 0 bytes
-rw-r--r--wlauto/external/readenergy/readenergy.c345
-rw-r--r--wlauto/external/revent/Makefile12
-rw-r--r--wlauto/external/revent/revent.c636
-rwxr-xr-xwlauto/external/uiauto/build.sh21
-rw-r--r--wlauto/external/uiauto/build.xml92
-rw-r--r--wlauto/external/uiauto/project.properties14
-rw-r--r--wlauto/external/uiauto/src/com/arm/wlauto/uiauto/BaseUiAutomation.java113
-rw-r--r--wlauto/instrumentation/__init__.py35
-rw-r--r--wlauto/instrumentation/coreutil/__init__.py278
-rw-r--r--wlauto/instrumentation/daq/__init__.py416
-rw-r--r--wlauto/instrumentation/delay/__init__.py199
-rw-r--r--wlauto/instrumentation/dmesg/__init__.py62
-rw-r--r--wlauto/instrumentation/energy_model/__init__.py850
-rw-r--r--wlauto/instrumentation/energy_model/em.template51
-rw-r--r--wlauto/instrumentation/energy_model/report.template123
-rw-r--r--wlauto/instrumentation/energy_probe/__init__.py147
-rw-r--r--wlauto/instrumentation/fps/__init__.py315
-rwxr-xr-xwlauto/instrumentation/freqsweep/__init__.py150
-rw-r--r--wlauto/instrumentation/hwmon/__init__.py125
-rw-r--r--wlauto/instrumentation/juno_energy/__init__.py109
-rwxr-xr-xwlauto/instrumentation/juno_energy/readenergybin695696 -> 0 bytes
-rw-r--r--wlauto/instrumentation/misc/__init__.py390
-rw-r--r--wlauto/instrumentation/netstats/__init__.py192
-rw-r--r--wlauto/instrumentation/netstats/netstats.apkbin39631 -> 0 bytes
-rw-r--r--wlauto/instrumentation/perf/LICENSE9
-rw-r--r--wlauto/instrumentation/perf/__init__.py179
-rwxr-xr-xwlauto/instrumentation/perf/bin/arm64/perfbin6186003 -> 0 bytes
-rwxr-xr-xwlauto/instrumentation/perf/bin/armeabi/perfbin4914376 -> 0 bytes
-rw-r--r--wlauto/instrumentation/pmu_logger/__init__.py152
-rw-r--r--wlauto/instrumentation/screenon/__init__.py80
-rw-r--r--wlauto/instrumentation/streamline/__init__.py278
-rw-r--r--wlauto/instrumentation/systrace/__init__.py154
-rw-r--r--wlauto/instrumentation/trace_cmd/LICENSE39
-rw-r--r--wlauto/instrumentation/trace_cmd/__init__.py346
-rwxr-xr-xwlauto/instrumentation/trace_cmd/bin/arm64/trace-cmdbin1475074 -> 0 bytes
-rwxr-xr-xwlauto/instrumentation/trace_cmd/bin/armeabi/trace-cmdbin1170276 -> 0 bytes
-rw-r--r--wlauto/managers/__init__.py14
-rw-r--r--wlauto/managers/android.py189
-rw-r--r--wlauto/managers/linux.py65
-rw-r--r--wlauto/managers/locallinux.py30
-rw-r--r--wlauto/resource_getters/__init__.py16
-rw-r--r--wlauto/resource_getters/standard.py508
-rw-r--r--wlauto/result_processors/__init__.py16
-rw-r--r--wlauto/result_processors/cpustate.py225
-rw-r--r--wlauto/result_processors/dvfs.py376
-rw-r--r--wlauto/result_processors/ipynb_exporter/__init__.py182
-rw-r--r--wlauto/result_processors/ipynb_exporter/template.ipynb60
-rw-r--r--wlauto/result_processors/json_rp.py122
-rw-r--r--wlauto/result_processors/mongodb.py235
-rw-r--r--wlauto/result_processors/notify.py69
-rw-r--r--wlauto/result_processors/sqlite.py184
-rw-r--r--wlauto/result_processors/standard.py145
-rw-r--r--wlauto/result_processors/status.py51
-rw-r--r--wlauto/result_processors/syeg.py150
-rw-r--r--wlauto/tests/README12
-rw-r--r--wlauto/tests/__init__.py16
-rw-r--r--wlauto/tests/data/extensions/devices/test_device.py49
-rwxr-xr-xwlauto/tests/data/interrupts/after98
-rwxr-xr-xwlauto/tests/data/interrupts/before97
-rwxr-xr-xwlauto/tests/data/interrupts/result98
-rw-r--r--wlauto/tests/data/logcat.2.log14
-rw-r--r--wlauto/tests/data/logcat.log10
-rw-r--r--wlauto/tests/data/test-agenda-bad-syntax.yaml1
-rw-r--r--wlauto/tests/data/test-agenda-not-dict.yaml1
-rw-r--r--wlauto/tests/data/test-agenda.yaml25
-rw-r--r--wlauto/tests/data/test-config.py17
-rw-r--r--wlauto/tests/test_agenda.py172
-rw-r--r--wlauto/tests/test_config.py199
-rw-r--r--wlauto/tests/test_configuration.py621
-rw-r--r--wlauto/tests/test_device.py99
-rw-r--r--wlauto/tests/test_diff.py44
-rw-r--r--wlauto/tests/test_exec_control.py269
-rw-r--r--wlauto/tests/test_execution.py1069
-rw-r--r--wlauto/tests/test_extension.py349
-rw-r--r--wlauto/tests/test_extension_loader.py51
-rw-r--r--wlauto/tests/test_instrumentation.py236
-rw-r--r--wlauto/tests/test_parsers.py381
-rw-r--r--wlauto/tests/test_results_manager.py130
-rw-r--r--wlauto/tests/test_utils.py109
-rw-r--r--wlauto/tools/__init__.py16
-rw-r--r--wlauto/tools/extdoc.py134
-rw-r--r--wlauto/utils/__init__.py14
-rw-r--r--wlauto/utils/android.py69
-rw-r--r--wlauto/utils/cpuinfo.py44
-rw-r--r--wlauto/utils/doc.py307
-rw-r--r--wlauto/utils/exec_control.py117
-rw-r--r--wlauto/utils/formatter.py147
-rw-r--r--wlauto/utils/hwmon.py77
-rw-r--r--wlauto/utils/ipython.py188
-rw-r--r--wlauto/utils/log.py223
-rw-r--r--wlauto/utils/misc.py600
-rw-r--r--wlauto/utils/netio.py97
-rw-r--r--wlauto/utils/power.py738
-rw-r--r--wlauto/utils/serial_port.py51
-rw-r--r--wlauto/utils/serializer.py283
-rw-r--r--wlauto/utils/terminalsize.py93
-rw-r--r--wlauto/utils/trace_cmd.py271
-rw-r--r--wlauto/utils/types.py476
-rw-r--r--wlauto/workloads/__init__.py16
-rw-r--r--wlauto/workloads/andebench/__init__.py95
-rw-r--r--wlauto/workloads/andebench/com.arm.wlauto.uiauto.andebench.jarbin3666 -> 0 bytes
-rwxr-xr-xwlauto/workloads/andebench/uiauto/build.sh29
-rw-r--r--wlauto/workloads/andebench/uiauto/build.xml92
-rw-r--r--wlauto/workloads/andebench/uiauto/project.properties14
-rw-r--r--wlauto/workloads/andebench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java115
-rw-r--r--wlauto/workloads/androbench/__init__.py52
-rw-r--r--wlauto/workloads/androbench/com.arm.wlauto.uiauto.androbench.jarbin3052 -> 0 bytes
-rwxr-xr-xwlauto/workloads/androbench/uiauto/build.sh28
-rw-r--r--wlauto/workloads/androbench/uiauto/build.xml92
-rw-r--r--wlauto/workloads/androbench/uiauto/project.properties14
-rw-r--r--wlauto/workloads/androbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java67
-rw-r--r--wlauto/workloads/angrybirds/__init__.py30
-rw-r--r--wlauto/workloads/angrybirds/angrybirds_classic.reventbin37647 -> 0 bytes
-rw-r--r--wlauto/workloads/angrybirds/revent_files/.empty0
-rw-r--r--wlauto/workloads/angrybirds_rio/__init__.py30
-rw-r--r--wlauto/workloads/angrybirds_rio/revent_files/.empty0
-rw-r--r--wlauto/workloads/angrybirds_rio/revent_files/Nexus10.run.reventbin25208 -> 0 bytes
-rw-r--r--wlauto/workloads/angrybirds_rio/revent_files/Nexus10.setup.reventbin2088 -> 0 bytes
-rw-r--r--wlauto/workloads/anomaly2/__init__.py63
-rw-r--r--wlauto/workloads/antutu/__init__.py145
-rw-r--r--wlauto/workloads/antutu/com.arm.wlauto.uiauto.antutu.jarbin6139 -> 0 bytes
-rwxr-xr-xwlauto/workloads/antutu/uiauto/build.sh28
-rw-r--r--wlauto/workloads/antutu/uiauto/build.xml92
-rw-r--r--wlauto/workloads/antutu/uiauto/project.properties14
-rw-r--r--wlauto/workloads/antutu/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java380
-rw-r--r--wlauto/workloads/apklaunch/__init__.py63
-rw-r--r--wlauto/workloads/applaunch/__init__.py188
-rw-r--r--wlauto/workloads/applaunch/device_script.template88
-rw-r--r--wlauto/workloads/audio/__init__.py103
-rw-r--r--wlauto/workloads/autotest/__init__.py108
-rw-r--r--wlauto/workloads/bbench/__init__.py244
-rwxr-xr-xwlauto/workloads/bbench/bin/arm64/bbench_serverbin570819 -> 0 bytes
-rwxr-xr-xwlauto/workloads/bbench/bin/armeabi/bbench_serverbin570819 -> 0 bytes
-rwxr-xr-xwlauto/workloads/bbench/patches/bbc.html1412
-rw-r--r--wlauto/workloads/bbench/patches/bbench.js177
-rwxr-xr-xwlauto/workloads/bbench/patches/cnn.html1293
-rw-r--r--wlauto/workloads/bbench/patches/index_noinput.html56
-rw-r--r--wlauto/workloads/bbench/patches/results.html158
-rwxr-xr-xwlauto/workloads/bbench/patches/twitter.html1215
-rw-r--r--wlauto/workloads/benchmarkpi/__init__.py63
-rw-r--r--wlauto/workloads/benchmarkpi/com.arm.wlauto.uiauto.benchmarkpi.jarbin3079 -> 0 bytes
-rwxr-xr-xwlauto/workloads/benchmarkpi/uiauto/build.sh28
-rw-r--r--wlauto/workloads/benchmarkpi/uiauto/build.xml92
-rw-r--r--wlauto/workloads/benchmarkpi/uiauto/project.properties14
-rw-r--r--wlauto/workloads/benchmarkpi/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java62
-rw-r--r--wlauto/workloads/caffeinemark/__init__.py68
-rw-r--r--wlauto/workloads/caffeinemark/com.arm.wlauto.uiauto.caffeinemark.jarbin3569 -> 0 bytes
-rwxr-xr-xwlauto/workloads/caffeinemark/uiauto/build.sh28
-rw-r--r--wlauto/workloads/caffeinemark/uiauto/build.xml92
-rw-r--r--wlauto/workloads/caffeinemark/uiauto/project.properties14
-rw-r--r--wlauto/workloads/caffeinemark/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java85
-rw-r--r--wlauto/workloads/cameracapture/__init__.py51
-rw-r--r--wlauto/workloads/cameracapture/com.arm.wlauto.uiauto.cameracapture.jarbin3125 -> 0 bytes
-rwxr-xr-xwlauto/workloads/cameracapture/uiauto/build.sh28
-rw-r--r--wlauto/workloads/cameracapture/uiauto/build.xml92
-rw-r--r--wlauto/workloads/cameracapture/uiauto/project.properties14
-rw-r--r--wlauto/workloads/cameracapture/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java68
-rw-r--r--wlauto/workloads/camerarecord/__init__.py47
-rw-r--r--wlauto/workloads/camerarecord/com.arm.wlauto.uiauto.camerarecord.jarbin3053 -> 0 bytes
-rwxr-xr-xwlauto/workloads/camerarecord/uiauto/build.sh28
-rw-r--r--wlauto/workloads/camerarecord/uiauto/build.xml92
-rw-r--r--wlauto/workloads/camerarecord/uiauto/project.properties14
-rw-r--r--wlauto/workloads/camerarecord/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java65
-rw-r--r--wlauto/workloads/castlebuilder/__init__.py28
-rw-r--r--wlauto/workloads/castlebuilder/revent_files/.empty0
-rw-r--r--wlauto/workloads/castlebuilder/revent_files/Nexus10.run.reventbin32768 -> 0 bytes
-rw-r--r--wlauto/workloads/castlebuilder/revent_files/Nexus10.setup.reventbin1088 -> 0 bytes
-rw-r--r--wlauto/workloads/castlemaster/__init__.py30
-rw-r--r--wlauto/workloads/castlemaster/revent_files/.empty0
-rw-r--r--wlauto/workloads/castlemaster/revent_files/Nexus10.run.reventbin28348 -> 0 bytes
-rw-r--r--wlauto/workloads/castlemaster/revent_files/Nexus10.setup.reventbin3448 -> 0 bytes
-rw-r--r--wlauto/workloads/cfbench/__init__.py72
-rw-r--r--wlauto/workloads/cfbench/com.arm.wlauto.uiauto.cfbench.jarbin2445 -> 0 bytes
-rwxr-xr-xwlauto/workloads/cfbench/uiauto/build.sh28
-rw-r--r--wlauto/workloads/cfbench/uiauto/build.xml92
-rw-r--r--wlauto/workloads/cfbench/uiauto/project.properties14
-rw-r--r--wlauto/workloads/cfbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java63
-rw-r--r--wlauto/workloads/citadel/__init__.py44
-rw-r--r--wlauto/workloads/citadel/revent_files/.empty0
-rw-r--r--wlauto/workloads/citadel/revent_files/Nexus10.run.reventbin608 -> 0 bytes
-rw-r--r--wlauto/workloads/citadel/revent_files/Nexus10.setup.reventbin6068 -> 0 bytes
-rw-r--r--wlauto/workloads/cyclictest/LICENSE8
-rw-r--r--wlauto/workloads/cyclictest/__init__.py138
-rwxr-xr-xwlauto/workloads/cyclictest/bin/arm64/cyclictestbin810676 -> 0 bytes
-rwxr-xr-xwlauto/workloads/cyclictest/bin/armeabi/cyclictestbin610188 -> 0 bytes
-rw-r--r--wlauto/workloads/dex2oat/__init__.py121
-rw-r--r--wlauto/workloads/dhrystone/__init__.py130
-rwxr-xr-xwlauto/workloads/dhrystone/dhrystonebin523482 -> 0 bytes
-rwxr-xr-xwlauto/workloads/dhrystone/src/build.sh23
-rw-r--r--wlauto/workloads/dhrystone/src/jni/Android.mk11
-rw-r--r--wlauto/workloads/dhrystone/src/jni/dhrystone.c959
-rw-r--r--wlauto/workloads/dungeondefenders/__init__.py34
-rw-r--r--wlauto/workloads/dungeondefenders/revent_files/Nexus10.run.reventbin301908 -> 0 bytes
-rw-r--r--wlauto/workloads/dungeondefenders/revent_files/Nexus10.setup.reventbin52148 -> 0 bytes
-rw-r--r--wlauto/workloads/ebizzy/__init__.py89
-rwxr-xr-xwlauto/workloads/ebizzy/bin/arm64/ebizzybin925945 -> 0 bytes
-rwxr-xr-xwlauto/workloads/ebizzy/bin/armeabi/ebizzybin868623 -> 0 bytes
-rw-r--r--wlauto/workloads/ebizzy/src/LICENSE3
-rw-r--r--wlauto/workloads/facebook/__init__.py82
-rw-r--r--wlauto/workloads/facebook/com.arm.wlauto.uiauto.facebook.jarbin3755 -> 0 bytes
-rwxr-xr-xwlauto/workloads/facebook/uiauto/build.sh27
-rw-r--r--wlauto/workloads/facebook/uiauto/build.xml92
-rw-r--r--wlauto/workloads/facebook/uiauto/project.properties14
-rw-r--r--wlauto/workloads/facebook/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java257
-rw-r--r--wlauto/workloads/geekbench/__init__.py356
-rw-r--r--wlauto/workloads/geekbench/com.arm.wlauto.uiauto.geekbench.jarbin3523 -> 0 bytes
-rwxr-xr-xwlauto/workloads/geekbench/uiauto/build.sh28
-rw-r--r--wlauto/workloads/geekbench/uiauto/build.xml92
-rw-r--r--wlauto/workloads/geekbench/uiauto/project.properties14
-rw-r--r--wlauto/workloads/geekbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java121
-rw-r--r--wlauto/workloads/glbcorp/__init__.py215
-rw-r--r--wlauto/workloads/glbenchmark/__init__.py158
-rw-r--r--wlauto/workloads/glbenchmark/com.arm.wlauto.uiauto.glb.jarbin4629 -> 0 bytes
-rwxr-xr-xwlauto/workloads/glbenchmark/uiauto/build.sh28
-rw-r--r--wlauto/workloads/glbenchmark/uiauto/build.xml92
-rw-r--r--wlauto/workloads/glbenchmark/uiauto/project.properties14
-rw-r--r--wlauto/workloads/glbenchmark/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java164
-rw-r--r--wlauto/workloads/googlemap/__init__.py39
-rw-r--r--wlauto/workloads/googlemap/revent_files/generic_android.run.reventbin77824 -> 0 bytes
-rw-r--r--wlauto/workloads/googlemap/revent_files/generic_android.setup.reventbin94208 -> 0 bytes
-rw-r--r--wlauto/workloads/gunbros2/__init__.py42
-rw-r--r--wlauto/workloads/hackbench/__init__.py88
-rwxr-xr-xwlauto/workloads/hackbench/bin/arm64/hackbenchbin932308 -> 0 bytes
-rwxr-xr-xwlauto/workloads/hackbench/bin/armeabi/hackbenchbin872913 -> 0 bytes
-rw-r--r--wlauto/workloads/hackbench/src/LICENSE3
-rw-r--r--wlauto/workloads/homescreen/__init__.py43
-rw-r--r--wlauto/workloads/hwuitest/__init__.py108
-rw-r--r--wlauto/workloads/idle/__init__.py68
-rw-r--r--wlauto/workloads/iozone/LICENSE22
-rw-r--r--wlauto/workloads/iozone/__init__.py316
-rwxr-xr-xwlauto/workloads/iozone/bin/arm64/iozonebin1431160 -> 0 bytes
-rwxr-xr-xwlauto/workloads/iozone/bin/armeabi/iozonebin1175818 -> 0 bytes
-rw-r--r--wlauto/workloads/ironman/__init__.py35
-rw-r--r--wlauto/workloads/ironman/revent_files/Nexus10.run.reventbin1387568 -> 0 bytes
-rw-r--r--wlauto/workloads/ironman/revent_files/Nexus10.setup.reventbin3528 -> 0 bytes
-rw-r--r--wlauto/workloads/krazykart/__init__.py28
-rw-r--r--wlauto/workloads/krazykart/revent_files/.empty0
-rw-r--r--wlauto/workloads/linpack/__init__.py64
-rw-r--r--wlauto/workloads/linpack/com.arm.wlauto.uiauto.linpack.jarbin3138 -> 0 bytes
-rwxr-xr-xwlauto/workloads/linpack/uiauto/build.sh28
-rw-r--r--wlauto/workloads/linpack/uiauto/build.xml92
-rw-r--r--wlauto/workloads/linpack/uiauto/project.properties14
-rw-r--r--wlauto/workloads/linpack/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java59
-rw-r--r--wlauto/workloads/linpack_cli/LICENSE6
-rw-r--r--wlauto/workloads/linpack_cli/__init__.py77
-rwxr-xr-xwlauto/workloads/linpack_cli/bin/arm64/linpackbin690774 -> 0 bytes
-rwxr-xr-xwlauto/workloads/linpack_cli/bin/armeabi/linpackbin513222 -> 0 bytes
-rw-r--r--wlauto/workloads/lmbench/__init__.py162
-rw-r--r--wlauto/workloads/lmbench/bin/COPYING339
-rw-r--r--wlauto/workloads/lmbench/bin/COPYING-2108
-rw-r--r--wlauto/workloads/lmbench/bin/README17
-rwxr-xr-xwlauto/workloads/lmbench/bin/arm64/bw_membin736367 -> 0 bytes
-rwxr-xr-xwlauto/workloads/lmbench/bin/arm64/lat_mem_rdbin803800 -> 0 bytes
-rwxr-xr-xwlauto/workloads/lmbench/bin/armeabi/bw_membin554798 -> 0 bytes
-rwxr-xr-xwlauto/workloads/lmbench/bin/armeabi/lat_mem_rdbin606379 -> 0 bytes
-rw-r--r--wlauto/workloads/manual/__init__.py104
-rw-r--r--wlauto/workloads/memcpy/__init__.py75
-rwxr-xr-xwlauto/workloads/memcpy/bin/arm64/memcpybin690382 -> 0 bytes
-rwxr-xr-xwlauto/workloads/memcpy/bin/armeabi/memcpybin456813 -> 0 bytes
-rwxr-xr-xwlauto/workloads/memcpy/src/build.sh21
-rw-r--r--wlauto/workloads/memcpy/src/jni/Android.mk11
-rw-r--r--wlauto/workloads/memcpy/src/jni/memcopy.c114
-rw-r--r--wlauto/workloads/nenamark/__init__.py66
-rw-r--r--wlauto/workloads/peacekeeper/__init__.py129
-rw-r--r--wlauto/workloads/peacekeeper/com.arm.wlauto.uiauto.peacekeeper.jarbin3479 -> 0 bytes
-rwxr-xr-xwlauto/workloads/peacekeeper/uiauto/build.sh27
-rw-r--r--wlauto/workloads/peacekeeper/uiauto/build.xml92
-rw-r--r--wlauto/workloads/peacekeeper/uiauto/project.properties14
-rw-r--r--wlauto/workloads/peacekeeper/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java115
-rw-r--r--wlauto/workloads/power_loadtest/__init__.py122
-rw-r--r--wlauto/workloads/quadrant/__init__.py112
-rw-r--r--wlauto/workloads/quadrant/com.arm.wlauto.uiauto.quadrant.jarbin3661 -> 0 bytes
-rwxr-xr-xwlauto/workloads/quadrant/uiauto/build.sh28
-rw-r--r--wlauto/workloads/quadrant/uiauto/build.xml92
-rw-r--r--wlauto/workloads/quadrant/uiauto/project.properties14
-rw-r--r--wlauto/workloads/quadrant/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java120
-rw-r--r--wlauto/workloads/real_linpack/__init__.py66
-rw-r--r--wlauto/workloads/real_linpack/com.arm.wlauto.uiauto.reallinpack.jarbin2951 -> 0 bytes
-rwxr-xr-xwlauto/workloads/real_linpack/uiauto/build.sh28
-rw-r--r--wlauto/workloads/real_linpack/uiauto/build.xml92
-rw-r--r--wlauto/workloads/real_linpack/uiauto/project.properties14
-rw-r--r--wlauto/workloads/real_linpack/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java51
-rw-r--r--wlauto/workloads/realracing3/__init__.py35
-rw-r--r--wlauto/workloads/recentfling/__init__.py100
-rw-r--r--wlauto/workloads/rt_app/LICENSE8
-rw-r--r--wlauto/workloads/rt_app/__init__.py281
-rwxr-xr-xwlauto/workloads/rt_app/bin/arm64/rt-appbin1199504 -> 0 bytes
-rwxr-xr-xwlauto/workloads/rt_app/bin/armeabi/rt-appbin1201237 -> 0 bytes
-rw-r--r--wlauto/workloads/rt_app/use_cases/browser-long.json134
-rw-r--r--wlauto/workloads/rt_app/use_cases/browser-short.json134
-rw-r--r--wlauto/workloads/rt_app/use_cases/mp3-long.json68
-rw-r--r--wlauto/workloads/rt_app/use_cases/mp3-short.json68
-rw-r--r--wlauto/workloads/rt_app/use_cases/spreading-tasks.json52
-rw-r--r--wlauto/workloads/rt_app/use_cases/taskset.json186
-rw-r--r--wlauto/workloads/rt_app/use_cases/video-long.json252
-rw-r--r--wlauto/workloads/rt_app/use_cases/video-short.json252
-rwxr-xr-xwlauto/workloads/rt_app/workgen236
-rw-r--r--wlauto/workloads/shellscript/__init__.py65
-rw-r--r--wlauto/workloads/skypevideo/__init__.py130
-rw-r--r--wlauto/workloads/skypevideo/com.arm.wlauto.uiauto.skypevideo.jarbin3210 -> 0 bytes
-rwxr-xr-xwlauto/workloads/skypevideo/uiauto/build.sh28
-rw-r--r--wlauto/workloads/skypevideo/uiauto/build.xml92
-rw-r--r--wlauto/workloads/skypevideo/uiauto/project.properties14
-rw-r--r--wlauto/workloads/skypevideo/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java72
-rw-r--r--wlauto/workloads/smartbench/__init__.py59
-rw-r--r--wlauto/workloads/smartbench/com.arm.wlauto.uiauto.smartbench.jarbin2365 -> 0 bytes
-rwxr-xr-xwlauto/workloads/smartbench/uiauto/build.sh28
-rw-r--r--wlauto/workloads/smartbench/uiauto/build.xml92
-rw-r--r--wlauto/workloads/smartbench/uiauto/project.properties14
-rw-r--r--wlauto/workloads/smartbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java62
-rw-r--r--wlauto/workloads/spec2000/__init__.py350
-rw-r--r--wlauto/workloads/sqlite/__init__.py48
-rw-r--r--wlauto/workloads/sqlite/com.arm.wlauto.uiauto.sqlite.jarbin3488 -> 0 bytes
-rwxr-xr-xwlauto/workloads/sqlite/uiauto/build.sh28
-rw-r--r--wlauto/workloads/sqlite/uiauto/build.xml92
-rw-r--r--wlauto/workloads/sqlite/uiauto/project.properties14
-rw-r--r--wlauto/workloads/sqlite/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java103
-rw-r--r--wlauto/workloads/stream/__init__.py93
-rwxr-xr-xwlauto/workloads/stream/bin/arm64/stream_noompbin647496 -> 0 bytes
-rwxr-xr-xwlauto/workloads/stream/bin/arm64/stream_ompbin973482 -> 0 bytes
-rwxr-xr-xwlauto/workloads/stream/bin/armeabi/stream_noompbin599350 -> 0 bytes
-rwxr-xr-xwlauto/workloads/stream/bin/armeabi/stream_ompbin927706 -> 0 bytes
-rw-r--r--wlauto/workloads/sysbench/LICENSE24
-rw-r--r--wlauto/workloads/sysbench/__init__.py203
-rw-r--r--wlauto/workloads/sysbench/bin/armeabi/sysbenchbin2015463 -> 0 bytes
-rw-r--r--wlauto/workloads/telemetry/__init__.py306
-rw-r--r--wlauto/workloads/templerun/__init__.py29
-rw-r--r--wlauto/workloads/templerun/revent_files/.empty0
-rw-r--r--wlauto/workloads/templerun/revent_files/Nexus10.run.reventbin36864 -> 0 bytes
-rw-r--r--wlauto/workloads/templerun/revent_files/Nexus10.setup.reventbin88 -> 0 bytes
-rwxr-xr-xwlauto/workloads/thechase/__init__.py46
-rw-r--r--wlauto/workloads/truckerparking3d/__init__.py29
-rw-r--r--wlauto/workloads/vellamo/__init__.py214
-rw-r--r--wlauto/workloads/vellamo/com.arm.wlauto.uiauto.vellamo.jarbin5779 -> 0 bytes
-rwxr-xr-xwlauto/workloads/vellamo/uiauto/build.sh28
-rw-r--r--wlauto/workloads/vellamo/uiauto/build.xml92
-rw-r--r--wlauto/workloads/vellamo/uiauto/project.properties14
-rw-r--r--wlauto/workloads/vellamo/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java260
-rw-r--r--wlauto/workloads/video/__init__.py138
-rw-r--r--wlauto/workloads/videostreaming/__init__.py73
-rw-r--r--wlauto/workloads/videostreaming/com.arm.wlauto.uiauto.videostreaming.jarbin4334 -> 0 bytes
-rwxr-xr-xwlauto/workloads/videostreaming/uiauto/build.sh28
-rw-r--r--wlauto/workloads/videostreaming/uiauto/build.xml92
-rw-r--r--wlauto/workloads/videostreaming/uiauto/project.properties14
-rw-r--r--wlauto/workloads/videostreaming/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java155
429 files changed, 0 insertions, 47309 deletions
diff --git a/wa/framework/actor.py b/wa/framework/actor.py
deleted file mode 100644
index dfb0ae59..00000000
--- a/wa/framework/actor.py
+++ /dev/null
@@ -1,31 +0,0 @@
-import uuid
-import logging
-
-from wa.framework import pluginloader
-from wa.framework.plugin import Plugin
-
-
-class JobActor(Plugin):
-
- kind = 'job_actor'
-
- def initialize(self, context):
- pass
-
- def run(self):
- pass
-
- def restart(self):
- pass
-
- def complete(self):
- pass
-
- def finalize(self):
- pass
-
-
-class NullJobActor(JobActor):
-
- name = 'null-job-actor'
-
diff --git a/wa/framework/log.py b/wa/framework/log.py
deleted file mode 100644
index fe49c510..00000000
--- a/wa/framework/log.py
+++ /dev/null
@@ -1,306 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E1101
-import logging
-import string
-import threading
-import subprocess
-
-import colorama
-
-from wa.framework import signal
-from wa.framework.exception import WAError
-from wa.utils.misc import get_traceback
-
-
-COLOR_MAP = {
- logging.DEBUG: colorama.Fore.BLUE,
- logging.INFO: colorama.Fore.GREEN,
- logging.WARNING: colorama.Fore.YELLOW,
- logging.ERROR: colorama.Fore.RED,
- logging.CRITICAL: colorama.Style.BRIGHT + colorama.Fore.RED,
-}
-
-RESET_COLOR = colorama.Style.RESET_ALL
-
-_indent_level = 0
-_indent_width = 4
-_console_handler = None
-
-
-def init(verbosity=logging.INFO, color=True, indent_with=4,
- regular_fmt='%(levelname)-8s %(message)s',
- verbose_fmt='%(asctime)s %(levelname)-8s %(name)-10.10s: %(message)s',
- debug=False):
- global _indent_width, _console_handler
- _indent_width = indent_with
- signal.log_error_func = lambda m: log_error(m, signal.logger)
-
- root_logger = logging.getLogger()
- root_logger.setLevel(logging.DEBUG)
-
- error_handler = ErrorSignalHandler(logging.DEBUG)
- root_logger.addHandler(error_handler)
-
- _console_handler = logging.StreamHandler()
- if color:
- formatter = ColorFormatter
- else:
- formatter = LineFormatter
- if verbosity:
- _console_handler.setLevel(logging.DEBUG)
- _console_handler.setFormatter(formatter(verbose_fmt))
- else:
- _console_handler.setLevel(logging.INFO)
- _console_handler.setFormatter(formatter(regular_fmt))
- root_logger.addHandler(_console_handler)
- logging.basicConfig(level=logging.DEBUG)
- if not debug:
- logging.raiseExceptions = False
-
-
-def set_level(level):
- _console_handler.setLevel(level)
-
-
-def add_file(filepath, level=logging.DEBUG,
- fmt='%(asctime)s %(levelname)-8s %(name)s: %(message)-10.10s'):
- root_logger = logging.getLogger()
- file_handler = logging.FileHandler(filepath)
- file_handler.setLevel(level)
- file_handler.setFormatter(LineFormatter(fmt))
- root_logger.addHandler(file_handler)
-
-
-def enable(logs):
- if isinstance(logs, list):
- for log in logs:
- __enable_logger(log)
- else:
- __enable_logger(logs)
-
-
-def disable(logs):
- if isinstance(logs, list):
- for log in logs:
- __disable_logger(log)
- else:
- __disable_logger(logs)
-
-
-def __enable_logger(logger):
- if isinstance(logger, basestring):
- logger = logging.getLogger(logger)
- logger.propagate = True
-
-
-def __disable_logger(logger):
- if isinstance(logger, basestring):
- logger = logging.getLogger(logger)
- logger.propagate = False
-
-
-def indent():
- global _indent_level
- _indent_level += 1
-
-
-def dedent():
- global _indent_level
- _indent_level -= 1
-
-
-def log_error(e, logger, critical=False):
- """
- Log the specified Exception as an error. The Error message will be formatted
- differently depending on the nature of the exception.
-
- :e: the error to log. should be an instance of ``Exception``
- :logger: logger to be used.
- :critical: if ``True``, this error will be logged at ``logging.CRITICAL``
- level, otherwise it will be logged as ``logging.ERROR``.
-
- """
- if critical:
- log_func = logger.critical
- else:
- log_func = logger.error
-
- if isinstance(e, KeyboardInterrupt):
- log_func('Got CTRL-C. Aborting.')
- elif isinstance(e, WAError):
- log_func(e)
- elif isinstance(e, subprocess.CalledProcessError):
- tb = get_traceback()
- log_func(tb)
- command = e.cmd
- if e.args:
- command = '{} {}'.format(command, ' '.join(e.args))
- message = 'Command \'{}\' returned non-zero exit status {}\nOUTPUT:\n{}\n'
- log_func(message.format(command, e.returncode, e.output))
- elif isinstance(e, SyntaxError):
- tb = get_traceback()
- log_func(tb)
- message = 'Syntax Error in {}, line {}, offset {}:'
- log_func(message.format(e.filename, e.lineno, e.offset))
- log_func('\t{}'.format(e.msg))
- else:
- tb = get_traceback()
- log_func(tb)
- log_func('{}({})'.format(e.__class__.__name__, e))
-
-
-class ErrorSignalHandler(logging.Handler):
- """
- Emits signals for ERROR and WARNING level traces.
-
- """
-
- def emit(self, record):
- if record.levelno == logging.ERROR:
- signal.send(signal.ERROR_LOGGED, self)
- elif record.levelno == logging.WARNING:
- signal.send(signal.WARNING_LOGGED, self)
-
-
-class LineFormatter(logging.Formatter):
- """
- Logs each line of the message separately.
-
- """
-
- def format(self, record):
- record.message = record.getMessage()
- if self.usesTime():
- record.asctime = self.formatTime(record, self.datefmt)
-
- indent = _indent_width * _indent_level
- d = record.__dict__
- parts = []
- for line in record.message.split('\n'):
- line = ' ' * indent + line
- d.update({'message': line.strip('\r')})
- parts.append(self._fmt % d)
-
- return '\n'.join(parts)
-
-
-class ColorFormatter(LineFormatter):
- """
- Formats logging records with color and prepends record info
- to each line of the message.
-
- BLUE for DEBUG logging level
- GREEN for INFO logging level
- YELLOW for WARNING logging level
- RED for ERROR logging level
- BOLD RED for CRITICAL logging level
-
- """
-
- def __init__(self, fmt=None, datefmt=None):
- super(ColorFormatter, self).__init__(fmt, datefmt)
- template_text = self._fmt.replace('%(message)s', RESET_COLOR + '%(message)s${color}')
- template_text = '${color}' + template_text + RESET_COLOR
- self.fmt_template = string.Template(template_text)
-
- def format(self, record):
- self._set_color(COLOR_MAP[record.levelno])
- return super(ColorFormatter, self).format(record)
-
- def _set_color(self, color):
- self._fmt = self.fmt_template.substitute(color=color)
-
-
-class BaseLogWriter(object):
-
- def __init__(self, name, level=logging.DEBUG):
- """
- File-like object class designed to be used for logging from streams
- Each complete line (terminated by new line character) gets logged
- at DEBUG level. In complete lines are buffered until the next new line.
-
- :param name: The name of the logger that will be used.
-
- """
- self.logger = logging.getLogger(name)
- self.buffer = ''
- if level == logging.DEBUG:
- self.do_write = self.logger.debug
- elif level == logging.INFO:
- self.do_write = self.logger.info
- elif level == logging.WARNING:
- self.do_write = self.logger.warning
- elif level == logging.ERROR:
- self.do_write = self.logger.error
- else:
- raise Exception('Unknown logging level: {}'.format(level))
-
- def flush(self):
- # Defined to match the interface expected by pexpect.
- return self
-
- def close(self):
- if self.buffer:
- self.logger.debug(self.buffer)
- self.buffer = ''
- return self
-
- def __del__(self):
- # Ensure we don't lose bufferd output
- self.close()
-
-
-class LogWriter(BaseLogWriter):
-
- def write(self, data):
- data = data.replace('\r\n', '\n').replace('\r', '\n')
- if '\n' in data:
- parts = data.split('\n')
- parts[0] = self.buffer + parts[0]
- for part in parts[:-1]:
- self.do_write(part)
- self.buffer = parts[-1]
- else:
- self.buffer += data
- return self
-
-
-class LineLogWriter(BaseLogWriter):
-
- def write(self, data):
- self.do_write(data)
-
-
-class StreamLogger(threading.Thread):
- """
- Logs output from a stream in a thread.
-
- """
-
- def __init__(self, name, stream, level=logging.DEBUG, klass=LogWriter):
- super(StreamLogger, self).__init__()
- self.writer = klass(name, level)
- self.stream = stream
- self.daemon = True
-
- def run(self):
- line = self.stream.readline()
- while line:
- self.writer.write(line.rstrip('\n'))
- line = self.stream.readline()
- self.writer.close()
diff --git a/wa/framework/old_output.py b/wa/framework/old_output.py
deleted file mode 100644
index 49ce8721..00000000
--- a/wa/framework/old_output.py
+++ /dev/null
@@ -1,362 +0,0 @@
-import os
-import shutil
-import logging
-import uuid
-from copy import copy
-from datetime import datetime, timedelta
-
-from wa.framework import signal, log
-from wa.framework.configuration.core import merge_config_values
-from wa.utils import serializer
-from wa.utils.misc import enum_metaclass, ensure_directory_exists as _d
-from wa.utils.types import numeric
-
-
-class Status(object):
-
- __metaclass__ = enum_metaclass('values', return_name=True)
-
- values = [
- 'NEW',
- 'PENDING',
- 'RUNNING',
- 'COMPLETE',
- 'OK',
- 'OKISH',
- 'NONCRITICAL',
- 'PARTIAL',
- 'FAILED',
- 'ABORTED',
- 'SKIPPED',
- 'UNKNOWN',
- ]
-
-
-class WAOutput(object):
-
- basename = '.wa-output'
-
- @classmethod
- def load(cls, source):
- if os.path.isfile(source):
- pod = serializer.load(source)
- elif os.path.isdir(source):
- pod = serializer.load(os.path.join(source, cls.basename))
- else:
- message = 'Cannot load {} from {}'
- raise ValueError(message.format(cls.__name__, source))
- return cls.from_pod(pod)
-
- @classmethod
- def from_pod(cls, pod):
- instance = cls(pod['output_directory'])
- instance.status = pod['status']
- instance.metrics = [Metric.from_pod(m) for m in pod['metrics']]
- instance.artifacts = [Artifact.from_pod(a) for a in pod['artifacts']]
- instance.events = [RunEvent.from_pod(e) for e in pod['events']]
- instance.classifiers = pod['classifiers']
- return instance
-
- def __init__(self, output_directory):
- self.logger = logging.getLogger('output')
- self.output_directory = output_directory
- self.status = Status.UNKNOWN
- self.classifiers = {}
- self.metrics = []
- self.artifacts = []
- self.events = []
-
- def initialize(self, overwrite=False):
- if os.path.exists(self.output_directory):
- if not overwrite:
- raise RuntimeError('"{}" already exists.'.format(self.output_directory))
- self.logger.info('Removing existing output directory.')
- shutil.rmtree(self.output_directory)
- self.logger.debug('Creating output directory {}'.format(self.output_directory))
- os.makedirs(self.output_directory)
-
- def add_metric(self, name, value, units=None, lower_is_better=False, classifiers=None):
- classifiers = merge_config_values(self.classifiers, classifiers or {})
- self.metrics.append(Metric(name, value, units, lower_is_better, classifiers))
-
- def add_artifact(self, name, path, kind, *args, **kwargs):
- path = _check_artifact_path(path, self.output_directory)
- self.artifacts.append(Artifact(name, path, kind, Artifact.RUN, *args, **kwargs))
-
- def get_path(self, subpath):
- return os.path.join(self.output_directory, subpath)
-
- def to_pod(self):
- return {
- 'output_directory': self.output_directory,
- 'status': self.status,
- 'metrics': [m.to_pod() for m in self.metrics],
- 'artifacts': [a.to_pod() for a in self.artifacts],
- 'events': [e.to_pod() for e in self.events],
- 'classifiers': copy(self.classifiers),
- }
-
- def persist(self):
- statefile = os.path.join(self.output_directory, self.basename)
- with open(statefile, 'wb') as wfh:
- serializer.dump(self, wfh)
-
-
-class RunInfo(object):
-
- default_name_format = 'wa-run-%y%m%d-%H%M%S'
-
- def __init__(self, project=None, project_stage=None, name=None):
- self.uuid = uuid.uuid4()
- self.project = project
- self.project_stage = project_stage
- self.name = name or datetime.now().strftime(self.default_name_format)
- self.start_time = None
- self.end_time = None
- self.duration = None
-
- @staticmethod
- def from_pod(pod):
- instance = RunInfo()
- instance.uuid = uuid.UUID(pod['uuid'])
- instance.project = pod['project']
- instance.project_stage = pod['project_stage']
- instance.name = pod['name']
- instance.start_time = pod['start_time']
- instance.end_time = pod['end_time']
- instance.duration = timedelta(seconds=pod['duration'])
- return instance
-
- def to_pod(self):
- d = copy(self.__dict__)
- d['uuid'] = str(self.uuid)
- d['duration'] = self.duration.days * 3600 * 24 + self.duration.seconds
- return d
-
-
-class RunOutput(WAOutput):
-
- @property
- def info_directory(self):
- return _d(os.path.join(self.output_directory, '_info'))
-
- @property
- def config_directory(self):
- return _d(os.path.join(self.output_directory, '_config'))
-
- @property
- def failed_directory(self):
- return _d(os.path.join(self.output_directory, '_failed'))
-
- @property
- def log_file(self):
- return os.path.join(self.output_directory, 'run.log')
-
- @classmethod
- def from_pod(cls, pod):
- instance = WAOutput.from_pod(pod)
- instance.info = RunInfo.from_pod(pod['info'])
- instance.jobs = [JobOutput.from_pod(i) for i in pod['jobs']]
- instance.failed = [JobOutput.from_pod(i) for i in pod['failed']]
- return instance
-
- def __init__(self, output_directory):
- super(RunOutput, self).__init__(output_directory)
- self.logger = logging.getLogger('output')
- self.info = RunInfo()
- self.jobs = []
- self.failed = []
-
- def initialize(self, overwrite=False):
- super(RunOutput, self).initialize(overwrite)
- log.add_file(self.log_file)
- self.add_artifact('runlog', self.log_file, 'log')
-
- def create_job_output(self, id):
- outdir = os.path.join(self.output_directory, id)
- job_output = JobOutput(outdir)
- self.jobs.append(job_output)
- return job_output
-
- def move_failed(self, job_output):
- basename = os.path.basename(job_output.output_directory)
- i = 1
- dest = os.path.join(self.failed_directory, basename + '-{}'.format(i))
- while os.path.exists(dest):
- i += 1
- dest = '{}-{}'.format(dest[:-2], i)
- shutil.move(job_output.output_directory, dest)
-
- def to_pod(self):
- pod = super(RunOutput, self).to_pod()
- pod['info'] = self.info.to_pod()
- pod['jobs'] = [i.to_pod() for i in self.jobs]
- pod['failed'] = [i.to_pod() for i in self.failed]
- return pod
-
-
-class JobOutput(WAOutput):
-
- def add_artifact(self, name, path, kind, *args, **kwargs):
- path = _check_artifact_path(path, self.output_directory)
- self.artifacts.append(Artifact(name, path, kind, Artifact.ITERATION, *args, **kwargs))
-
-
-class Artifact(object):
- """
- This is an artifact generated during execution/post-processing of a workload.
- Unlike metrics, this represents an actual artifact, such as a file, generated.
- This may be "result", such as trace, or it could be "meta data" such as logs.
- These are distinguished using the ``kind`` attribute, which also helps WA decide
- how it should be handled. Currently supported kinds are:
-
- :log: A log file. Not part of "results" as such but contains information about the
- run/workload execution that be useful for diagnostics/meta analysis.
- :meta: A file containing metadata. This is not part of "results", but contains
- information that may be necessary to reproduce the results (contrast with
- ``log`` artifacts which are *not* necessary).
- :data: This file contains new data, not available otherwise and should be considered
- part of the "results" generated by WA. Most traces would fall into this category.
- :export: Exported version of results or some other artifact. This signifies that
- this artifact does not contain any new data that is not available
- elsewhere and that it may be safely discarded without losing information.
- :raw: Signifies that this is a raw dump/log that is normally processed to extract
- useful information and is then discarded. In a sense, it is the opposite of
- ``export``, but in general may also be discarded.
-
- .. note:: whether a file is marked as ``log``/``data`` or ``raw`` depends on
- how important it is to preserve this file, e.g. when archiving, vs
- how much space it takes up. Unlike ``export`` artifacts which are
- (almost) always ignored by other exporters as that would never result
- in data loss, ``raw`` files *may* be processed by exporters if they
- decided that the risk of losing potentially (though unlikely) useful
- data is greater than the time/space cost of handling the artifact (e.g.
- a database uploader may choose to ignore ``raw`` artifacts, where as a
- network filer archiver may choose to archive them).
-
- .. note: The kind parameter is intended to represent the logical function of a particular
- artifact, not it's intended means of processing -- this is left entirely up to the
- result processors.
-
- """
-
- RUN = 'run'
- ITERATION = 'iteration'
-
- valid_kinds = ['log', 'meta', 'data', 'export', 'raw']
-
- @staticmethod
- def from_pod(pod):
- return Artifact(**pod)
-
- def __init__(self, name, path, kind, level=RUN, mandatory=False, description=None):
- """"
- :param name: Name that uniquely identifies this artifact.
- :param path: The *relative* path of the artifact. Depending on the ``level``
- must be either relative to the run or iteration output directory.
- Note: this path *must* be delimited using ``/`` irrespective of the
- operating system.
- :param kind: The type of the artifact this is (e.g. log file, result, etc.) this
- will be used a hit to result processors. This must be one of ``'log'``,
- ``'meta'``, ``'data'``, ``'export'``, ``'raw'``.
- :param level: The level at which the artifact will be generated. Must be either
- ``'iteration'`` or ``'run'``.
- :param mandatory: Boolean value indicating whether this artifact must be present
- at the end of result processing for its level.
- :param description: A free-form description of what this artifact is.
-
- """
- if kind not in self.valid_kinds:
- raise ValueError('Invalid Artifact kind: {}; must be in {}'.format(kind, self.valid_kinds))
- self.name = name
- self.path = path.replace('/', os.sep) if path is not None else path
- self.kind = kind
- self.level = level
- self.mandatory = mandatory
- self.description = description
-
- def exists(self, context):
- """Returns ``True`` if artifact exists within the specified context, and
- ``False`` otherwise."""
- fullpath = os.path.join(context.output_directory, self.path)
- return os.path.exists(fullpath)
-
- def to_pod(self):
- return copy(self.__dict__)
-
-
-class RunEvent(object):
- """
- An event that occured during a run.
-
- """
-
- @staticmethod
- def from_pod(pod):
- instance = RunEvent(pod['message'])
- instance.timestamp = pod['timestamp']
- return instance
-
- def __init__(self, message):
- self.timestamp = datetime.utcnow()
- self.message = message
-
- def to_pod(self):
- return copy(self.__dict__)
-
- def __str__(self):
- return '{} {}'.format(self.timestamp, self.message)
-
- __repr__ = __str__
-
-
-class Metric(object):
- """
- This is a single metric collected from executing a workload.
-
- :param name: the name of the metric. Uniquely identifies the metric
- within the results.
- :param value: The numerical value of the metric for this execution of
- a workload. This can be either an int or a float.
- :param units: Units for the collected value. Can be None if the value
- has no units (e.g. it's a count or a standardised score).
- :param lower_is_better: Boolean flag indicating where lower values are
- better than higher ones. Defaults to False.
- :param classifiers: A set of key-value pairs to further classify this metric
- beyond current iteration (e.g. this can be used to identify
- sub-tests).
-
- """
-
- @staticmethod
- def from_pod(pod):
- return Metric(**pod)
-
- def __init__(self, name, value, units=None, lower_is_better=False, classifiers=None):
- self.name = name
- self.value = numeric(value)
- self.units = units
- self.lower_is_better = lower_is_better
- self.classifiers = classifiers or {}
-
- def to_pod(self):
- return copy(self.__dict__)
-
- def __str__(self):
- result = '{}: {}'.format(self.name, self.value)
- if self.units:
- result += ' ' + self.units
- result += ' ({})'.format('-' if self.lower_is_better else '+')
- return '<{}>'.format(result)
-
- __repr__ = __str__
-
-
-def _check_artifact_path(path, rootpath):
- if path.startswith(rootpath):
- return os.path.abspath(path)
- rootpath = os.path.abspath(rootpath)
- full_path = os.path.join(rootpath, path)
- if not os.path.isfile(full_path):
- raise ValueError('Cannot add artifact because {} does not exist.'.format(full_path))
- return full_path
diff --git a/wa/framework/target.py b/wa/framework/target.py
deleted file mode 100644
index fa9323cd..00000000
--- a/wa/framework/target.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import string
-from copy import copy
-
-from devlib import Platform, AndroidTarget
-
-
-class TargetInfo(object):
-
- @staticmethod
- def from_pod(pod):
- instance = TargetInfo()
- instance.target = pod['target']
- instance.abi = pod['abi']
- instance.cpuinfo = Cpuinfo(pod['cpuinfo'])
- instance.os = pod['os']
- instance.os_version = pod['os_version']
- instance.abi = pod['abi']
- instance.is_rooted = pod['is_rooted']
- instance.kernel_version = KernelVersion(pod['kernel_release'],
- pod['kernel_version'])
- instance.kernel_config = KernelConfig(pod['kernel_config'])
-
- if pod["target"] == "AndroidTarget":
- instance.screen_resolution = pod['screen_resolution']
- instance.prop = pod['prop']
- instance.prop = pod['android_id']
-
- return instance
-
- def __init__(self, target=None):
- if target:
- self.target = target.__class__.__name__
- self.cpuinfo = target.cpuinfo
- self.os = target.os
- self.os_version = target.os_version
- self.abi = target.abi
- self.is_rooted = target.is_rooted
- self.kernel_version = target.kernel_version
- self.kernel_config = target.config
-
- if isinstance(target, AndroidTarget):
- self.screen_resolution = target.screen_resolution
- self.prop = target.getprop()
- self.android_id = target.android_id
-
- else:
- self.target = None
- self.cpuinfo = None
- self.os = None
- self.os_version = None
- self.abi = None
- self.is_rooted = None
- self.kernel_version = None
- self.kernel_config = None
-
- if isinstance(target, AndroidTarget):
- self.screen_resolution = None
- self.prop = None
- self.android_id = None
-
- def to_pod(self):
- pod = {}
- pod['target'] = self.target
- pod['abi'] = self.abi
- pod['cpuinfo'] = self.cpuinfo.sections
- pod['os'] = self.os
- pod['os_version'] = self.os_version
- pod['abi'] = self.abi
- pod['is_rooted'] = self.is_rooted
- pod['kernel_release'] = self.kernel_version.release
- pod['kernel_version'] = self.kernel_version.version
- pod['kernel_config'] = dict(self.kernel_config.iteritems())
-
- if self.target == "AndroidTarget":
- pod['screen_resolution'] = self.screen_resolution
- pod['prop'] = self.prop
- pod['android_id'] = self.android_id
-
- return pod
-
diff --git a/wlauto/__init__.py b/wlauto/__init__.py
deleted file mode 100644
index fbe2e15b..00000000
--- a/wlauto/__init__.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from wlauto.core.configuration import settings # NOQA
-from wlauto.core.device_manager import DeviceManager # NOQA
-from wlauto.core.command import Command # NOQA
-from wlauto.core.workload import Workload # NOQA
-from wlauto.core.plugin import Artifact, Alias # NOQA
-from wlauto.core.configuration.configuration import ConfigurationPoint as Parameter
-import wlauto.core.pluginloader as PluginLoader # NOQA
-from wlauto.core.instrumentation import Instrument # NOQA
-from wlauto.core.result import ResultProcessor, IterationResult # NOQA
-from wlauto.core.resource import ResourceGetter, Resource, GetterPriority, NO_ONE # NOQA
-from wlauto.core.exttype import get_plugin_type # NOQA Note: MUST be imported after other core imports.
-
-from wlauto.common.resources import File, PluginAsset, Executable
-from wlauto.common.android.resources import ApkFile, JarFile
-from wlauto.common.android.workload import (UiAutomatorWorkload, ApkWorkload, AndroidBenchmark, # NOQA
- AndroidUiAutoBenchmark, GameWorkload) # NOQA
-
-from wlauto.core.version import get_wa_version
-
-__version__ = get_wa_version()
diff --git a/wlauto/agenda-example-biglittle.yaml b/wlauto/agenda-example-biglittle.yaml
deleted file mode 100644
index eea89213..00000000
--- a/wlauto/agenda-example-biglittle.yaml
+++ /dev/null
@@ -1,79 +0,0 @@
-# This agenda specifies configuration that may be used for regression runs
-# on big.LITTLE systems. This agenda will with a TC2 device configured as
-# described in the documentation.
-config:
- device: tc2
- run_name: big.LITTLE_regression
-global:
- iterations: 5
-sections:
- - id: mp_a15only
- boot_parameters:
- os_mode: mp_a15_only
- runtime_parameters:
- a15_governor: interactive
- a15_governor_tunables:
- above_hispeed_delay: 20000
- - id: mp_a7bc
- boot_parameters:
- os_mode: mp_a7_bootcluster
- runtime_parameters:
- a7_governor: interactive
- a7_min_frequency: 500000
- a7_governor_tunables:
- above_hispeed_delay: 20000
- a15_governor: interactive
- a15_governor_tunables:
- above_hispeed_delay: 20000
- - id: mp_a15bc
- boot_parameters:
- os_mode: mp_a15_bootcluster
- runtime_parameters:
- a7_governor: interactive
- a7_min_frequency: 500000
- a7_governor_tunables:
- above_hispeed_delay: 20000
- a15_governor: interactive
- a15_governor_tunables:
- above_hispeed_delay: 20000
-workloads:
- - id: b01
- name: andebench
- workload_parameters:
- number_of_threads: 5
- - id: b02
- name: andebench
- label: andebenchst
- workload_parameters:
- number_of_threads: 1
- - id: b03
- name: antutu
- label: antutu4.0.3
- workload_parameters:
- version: 4.0.3
- - id: b04
- name: benchmarkpi
- - id: b05
- name: caffeinemark
- - id: b06
- name: cfbench
- - id: b07
- name: geekbench
- label: geekbench3
- workload_parameters:
- version: 3
- - id: b08
- name: linpack
- - id: b09
- name: quadrant
- - id: b10
- name: smartbench
- - id: b11
- name: sqlite
- - id: b12
- name: vellamo
-
- - id: w01
- name: bbench_with_audio
- - id: w02
- name: audio
diff --git a/wlauto/agenda-example-tutorial.yaml b/wlauto/agenda-example-tutorial.yaml
deleted file mode 100644
index 6eb2b9a1..00000000
--- a/wlauto/agenda-example-tutorial.yaml
+++ /dev/null
@@ -1,43 +0,0 @@
-# This an agenda that is built-up during the explantion of the agenda features
-# in the documentation. This should work out-of-the box on most rooted Android
-# devices.
-config:
- project: governor_comparison
- run_name: performance_vs_interactive
-
- device: generic_android
- reboot_policy: never
-
- instrumentation: [coreutil, cpufreq]
- coreutil:
- threshold: 80
- sysfs_extractor:
- paths: [/proc/meminfo]
- result_processors: [sqlite]
- sqlite:
- database: ~/my_wa_results.sqlite
-global:
- iterations: 5
-sections:
- - id: perf
- runtime_params:
- sysfile_values:
- /sys/devices/system/cpu/cpu0/cpufreq/scaling_governor: performance
- - id: inter
- runtime_params:
- sysfile_values:
- /sys/devices/system/cpu/cpu0/cpufreq/scaling_governor: interactive
-workloads:
- - id: 01_dhry
- name: dhrystone
- label: dhrystone_15over6
- workload_params:
- threads: 6
- mloops: 15
- - id: 02_memc
- name: memcpy
- instrumentation: [sysfs_extractor]
- - id: 03_cycl
- name: cyclictest
- iterations: 10
-
diff --git a/wlauto/commands/__init__.py b/wlauto/commands/__init__.py
deleted file mode 100644
index 16224d6f..00000000
--- a/wlauto/commands/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/commands/create.py b/wlauto/commands/create.py
deleted file mode 100644
index b520a208..00000000
--- a/wlauto/commands/create.py
+++ /dev/null
@@ -1,400 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import os
-import sys
-import stat
-import string
-import textwrap
-import argparse
-import shutil
-import getpass
-import subprocess
-from collections import OrderedDict
-
-import yaml
-
-from wlauto import PluginLoader, Command, settings
-from wlauto.exceptions import CommandError, ConfigError
-from wlauto.core.command import init_argument_parser
-from wlauto.utils.misc import (capitalize, check_output,
- ensure_file_directory_exists as _f, ensure_directory_exists as _d)
-from wlauto.utils.types import identifier
-from wlauto.utils.doc import format_body
-
-
-__all__ = ['create_workload']
-
-
-TEMPLATES_DIR = os.path.join(os.path.dirname(__file__), 'templates')
-
-UIAUTO_BUILD_SCRIPT = """#!/bin/bash
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $$class_dir
-cp $$base_class $$class_dir
-
-ant build
-
-if [[ -f bin/${package_name}.jar ]]; then
- cp bin/${package_name}.jar ..
-fi
-"""
-
-
-class CreateSubcommand(object):
-
- name = None
- help = None
- usage = None
- description = None
- epilog = None
- formatter_class = None
-
- def __init__(self, logger, subparsers):
- self.logger = logger
- self.group = subparsers
- parser_params = dict(help=(self.help or self.description), usage=self.usage,
- description=format_body(textwrap.dedent(self.description), 80),
- epilog=self.epilog)
- if self.formatter_class:
- parser_params['formatter_class'] = self.formatter_class
- self.parser = subparsers.add_parser(self.name, **parser_params)
- init_argument_parser(self.parser) # propagate top-level options
- self.initialize()
-
- def initialize(self):
- pass
-
-
-class CreateWorkloadSubcommand(CreateSubcommand):
-
- name = 'workload'
- description = '''Create a new workload. By default, a basic workload template will be
- used but you can use options to specify a different template.'''
-
- def initialize(self):
- self.parser.add_argument('name', metavar='NAME',
- help='Name of the workload to be created')
- self.parser.add_argument('-p', '--path', metavar='PATH', default=None,
- help='The location at which the workload will be created. If not specified, ' +
- 'this defaults to "~/.workload_automation/workloads".')
- self.parser.add_argument('-f', '--force', action='store_true',
- help='Create the new workload even if a workload with the specified ' +
- 'name already exists.')
-
- template_group = self.parser.add_mutually_exclusive_group()
- template_group.add_argument('-A', '--android-benchmark', action='store_true',
- help='Use android benchmark template. This template allows you to specify ' +
- ' an APK file that will be installed and run on the device. You should ' +
- ' place the APK file into the workload\'s directory at the same level ' +
- 'as the __init__.py.')
- template_group.add_argument('-U', '--ui-automation', action='store_true',
- help='Use UI automation template. This template generates a UI automation ' +
- 'Android project as well as the Python class. This a more general ' +
- 'version of the android benchmark template that makes no assumptions ' +
- 'about the nature of your workload, apart from the fact that you need ' +
- 'UI automation. If you need to install an APK, start an app on device, ' +
- 'etc., you will need to do that explicitly in your code.')
- template_group.add_argument('-B', '--android-uiauto-benchmark', action='store_true',
- help='Use android uiauto benchmark template. This generates a UI automation ' +
- 'project as well as a Python class. This template should be used ' +
- 'if you have a APK file that needs to be run on the device. You ' +
- 'should place the APK file into the workload\'s directory at the ' +
- 'same level as the __init__.py.')
-
- def execute(self, state, args): # pylint: disable=R0201
- where = args.path or 'local'
- check_name = not args.force
-
- if args.android_benchmark:
- kind = 'android'
- elif args.ui_automation:
- kind = 'uiauto'
- elif args.android_uiauto_benchmark:
- kind = 'android_uiauto'
- else:
- kind = 'basic'
-
- try:
- create_workload(args.name, kind, where, check_name)
- except CommandError, e:
- print "ERROR:", e
-
-
-class CreatePackageSubcommand(CreateSubcommand):
-
- name = 'package'
- description = '''Create a new empty Python package for WA plugins. On installation,
- this package will "advertise" itself to WA so that Plugins with in it will
- be loaded by WA when it runs.'''
-
- def initialize(self):
- self.parser.add_argument('name', metavar='NAME',
- help='Name of the package to be created')
- self.parser.add_argument('-p', '--path', metavar='PATH', default=None,
- help='The location at which the new pacakge will be created. If not specified, ' +
- 'current working directory will be used.')
- self.parser.add_argument('-f', '--force', action='store_true',
- help='Create the new package even if a file or directory with the same name '
- 'already exists at the specified location.')
-
- def execute(self, args): # pylint: disable=R0201
- package_dir = args.path or os.path.abspath('.')
- template_path = os.path.join(TEMPLATES_DIR, 'setup.template')
- self.create_plugins_package(package_dir, args.name, template_path, args.force)
-
- def create_plugins_package(self, location, name, setup_template_path, overwrite=False):
- package_path = os.path.join(location, name)
- if os.path.exists(package_path):
- if overwrite:
- self.logger.info('overwriting existing "{}"'.format(package_path))
- shutil.rmtree(package_path)
- else:
- raise CommandError('Location "{}" already exists.'.format(package_path))
- actual_package_path = os.path.join(package_path, name)
- os.makedirs(actual_package_path)
- setup_text = render_template(setup_template_path, {'package_name': name, 'user': getpass.getuser()})
- with open(os.path.join(package_path, 'setup.py'), 'w') as wfh:
- wfh.write(setup_text)
- touch(os.path.join(actual_package_path, '__init__.py'))
-
-
-class CreateAgendaSubcommand(CreateSubcommand):
-
- name = 'agenda'
- description = """
- Create an agenda whith the specified plugins enabled. And parameters set to their
- default values.
- """
-
- def initialize(self):
- self.parser.add_argument('plugins', nargs='+',
- help='Plugins to be added')
- self.parser.add_argument('-i', '--iterations', type=int, default=1,
- help='Sets the number of iterations for all workloads')
- self.parser.add_argument('-r', '--include-runtime-params', action='store_true',
- help="""
- Adds runtime parameters to the global section of the generated
- agenda. Note: these do not have default values, so only name
- will be added. Also, runtime parameters are devices-specific, so
- a device must be specified (either in the list of plugins,
- or in the existing config).
- """)
- self.parser.add_argument('-o', '--output', metavar='FILE',
- help='Output file. If not specfied, STDOUT will be used instead.')
-
- def execute(self, args): # pylint: disable=no-self-use,too-many-branches,too-many-statements
- loader = PluginLoader(packages=settings.plugin_packages,
- paths=settings.plugin_paths)
- agenda = OrderedDict()
- agenda['config'] = OrderedDict(instrumentation=[], result_processors=[])
- agenda['global'] = OrderedDict(iterations=args.iterations)
- agenda['workloads'] = []
- device = None
- device_config = None
- for name in args.plugins:
- extcls = loader.get_plugin_class(name)
- config = loader.get_default_config(name)
- del config['modules']
-
- if extcls.kind == 'workload':
- entry = OrderedDict()
- entry['name'] = extcls.name
- if name != extcls.name:
- entry['label'] = name
- entry['params'] = config
- agenda['workloads'].append(entry)
- elif extcls.kind == 'device':
- if device is not None:
- raise ConfigError('Specifying multiple devices: {} and {}'.format(device.name, name))
- device = extcls
- device_config = config
- agenda['config']['device'] = name
- agenda['config']['device_config'] = config
- else:
- if extcls.kind == 'instrument':
- agenda['config']['instrumentation'].append(name)
- if extcls.kind == 'result_processor':
- agenda['config']['result_processors'].append(name)
- agenda['config'][name] = config
-
- if args.include_runtime_params:
- if not device:
- if settings.device:
- device = loader.get_plugin_class(settings.device)
- device_config = loader.get_default_config(settings.device)
- else:
- raise ConfigError('-r option requires for a device to be in the list of plugins')
- rps = OrderedDict()
- for rp in device.runtime_parameters:
- if hasattr(rp, 'get_runtime_parameters'):
- # a core parameter needs to be expanded for each of the
- # device's cores, if they're avialable
- for crp in rp.get_runtime_parameters(device_config.get('core_names', [])):
- rps[crp.name] = None
- else:
- rps[rp.name] = None
- agenda['global']['runtime_params'] = rps
-
- if args.output:
- wfh = open(args.output, 'w')
- else:
- wfh = sys.stdout
- yaml.dump(agenda, wfh, indent=4, default_flow_style=False)
- if args.output:
- wfh.close()
-
-
-class CreateCommand(Command):
-
- name = 'create'
- description = '''Used to create various WA-related objects (see positional arguments list for what
- objects may be created).\n\nUse "wa create <object> -h" for object-specific arguments.'''
- formatter_class = argparse.RawDescriptionHelpFormatter
- subcmd_classes = [
- CreateWorkloadSubcommand,
- CreatePackageSubcommand,
- CreateAgendaSubcommand,
- ]
-
- def initialize(self, context):
- subparsers = self.parser.add_subparsers(dest='what')
- self.subcommands = [] # pylint: disable=W0201
- for subcmd_cls in self.subcmd_classes:
- subcmd = subcmd_cls(self.logger, subparsers)
- self.subcommands.append(subcmd)
-
- def execute(self, args):
- for subcmd in self.subcommands:
- if subcmd.name == args.what:
- subcmd.execute(args)
- break
- else:
- raise CommandError('Not a valid create parameter: {}'.format(args.name))
-
-
-def create_workload(name, kind='basic', where='local', check_name=True, **kwargs):
- if check_name:
- extloader = PluginLoader(packages=settings.plugin_packages, paths=settings.plugin_paths)
- if name in [wl.name for wl in extloader.list_workloads()]:
- raise CommandError('Workload with name "{}" already exists.'.format(name))
-
- class_name = get_class_name(name)
- if where == 'local':
- workload_dir = _d(os.path.join(settings.user_directory, 'workloads', name))
- else:
- workload_dir = _d(os.path.join(where, name))
-
- if kind == 'basic':
- create_basic_workload(workload_dir, name, class_name, **kwargs)
- elif kind == 'uiauto':
- create_uiautomator_workload(workload_dir, name, class_name, **kwargs)
- elif kind == 'android':
- create_android_benchmark(workload_dir, name, class_name, **kwargs)
- elif kind == 'android_uiauto':
- create_android_uiauto_benchmark(workload_dir, name, class_name, **kwargs)
- else:
- raise CommandError('Unknown workload type: {}'.format(kind))
-
- print 'Workload created in {}'.format(workload_dir)
-
-
-def create_basic_workload(path, name, class_name):
- source_file = os.path.join(path, '__init__.py')
- with open(source_file, 'w') as wfh:
- wfh.write(render_template('basic_workload', {'name': name, 'class_name': class_name}))
-
-
-def create_uiautomator_workload(path, name, class_name):
- uiauto_path = _d(os.path.join(path, 'uiauto'))
- create_uiauto_project(uiauto_path, name)
- source_file = os.path.join(path, '__init__.py')
- with open(source_file, 'w') as wfh:
- wfh.write(render_template('uiauto_workload', {'name': name, 'class_name': class_name}))
-
-
-def create_android_benchmark(path, name, class_name):
- source_file = os.path.join(path, '__init__.py')
- with open(source_file, 'w') as wfh:
- wfh.write(render_template('android_benchmark', {'name': name, 'class_name': class_name}))
-
-
-def create_android_uiauto_benchmark(path, name, class_name):
- uiauto_path = _d(os.path.join(path, 'uiauto'))
- create_uiauto_project(uiauto_path, name)
- source_file = os.path.join(path, '__init__.py')
- with open(source_file, 'w') as wfh:
- wfh.write(render_template('android_uiauto_benchmark', {'name': name, 'class_name': class_name}))
-
-
-def create_uiauto_project(path, name, target='1'):
- sdk_path = get_sdk_path()
- android_path = os.path.join(sdk_path, 'tools', 'android')
- package_name = 'com.arm.wlauto.uiauto.' + name.lower()
-
- # ${ANDROID_HOME}/tools/android create uitest-project -n com.arm.wlauto.uiauto.linpack -t 1 -p ../test2
- command = '{} create uitest-project --name {} --target {} --path {}'.format(android_path,
- package_name,
- target,
- path)
- try:
- check_output(command, shell=True)
- except subprocess.CalledProcessError as e:
- if 'is is not valid' in e.output:
- message = 'No Android SDK target found; have you run "{} update sdk" and download a platform?'
- raise CommandError(message.format(android_path))
-
- build_script = os.path.join(path, 'build.sh')
- with open(build_script, 'w') as wfh:
- template = string.Template(UIAUTO_BUILD_SCRIPT)
- wfh.write(template.substitute({'package_name': package_name}))
- os.chmod(build_script, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
-
- source_file = _f(os.path.join(path, 'src',
- os.sep.join(package_name.split('.')[:-1]),
- 'UiAutomation.java'))
- with open(source_file, 'w') as wfh:
- wfh.write(render_template('UiAutomation.java', {'name': name, 'package_name': package_name}))
-
-
-# Utility functions
-
-def get_sdk_path():
- sdk_path = os.getenv('ANDROID_HOME')
- if not sdk_path:
- raise CommandError('Please set ANDROID_HOME environment variable to point to ' +
- 'the locaton of Android SDK')
- return sdk_path
-
-
-def get_class_name(name, postfix=''):
- name = identifier(name)
- return ''.join(map(capitalize, name.split('_'))) + postfix
-
-
-def render_template(name, params):
- filepath = os.path.join(TEMPLATES_DIR, name)
- with open(filepath) as fh:
- text = fh.read()
- template = string.Template(text)
- return template.substitute(params)
-
-
-def touch(path):
- with open(path, 'w') as _:
- pass
diff --git a/wlauto/commands/list.py b/wlauto/commands/list.py
deleted file mode 100644
index f261c7eb..00000000
--- a/wlauto/commands/list.py
+++ /dev/null
@@ -1,74 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import PluginLoader, Command, settings
-from wlauto.utils.formatter import DescriptionListFormatter
-from wlauto.utils.doc import get_summary
-from wlauto.core import pluginloader
-
-class ListCommand(Command):
-
- name = 'list'
- description = 'List available WA plugins with a short description of each.'
-
- def initialize(self, context):
- plugin_types = ['{}s'.format(name) for name in pluginloader.kinds]
- self.parser.add_argument('kind', metavar='KIND',
- help=('Specify the kind of plugin to list. Must be '
- 'one of: {}'.format(', '.join(plugin_types))),
- choices=plugin_types)
- self.parser.add_argument('-n', '--name', help='Filter results by the name specified')
- self.parser.add_argument('-o', '--packaged-only', action='store_true',
- help='''
- Only list plugins packaged with WA itself. Do not list plugins
- installed locally or from other packages.
- ''')
- self.parser.add_argument('-p', '--platform', help='Only list results that are supported by '
- 'the specified platform')
-
- def execute(self, state, args):
- filters = {}
- if args.name:
- filters['name'] = args.name
-
- results = pluginloader.list_plugins(args.kind[:-1])
- if filters or args.platform:
- filtered_results = []
- for result in results:
- passed = True
- for k, v in filters.iteritems():
- if getattr(result, k) != v:
- passed = False
- break
- if passed and args.platform:
- passed = check_platform(result, args.platform)
- if passed:
- filtered_results.append(result)
- else: # no filters specified
- filtered_results = results
-
- if filtered_results:
- output = DescriptionListFormatter()
- for result in sorted(filtered_results, key=lambda x: x.name):
- output.add_item(get_summary(result), result.name)
- print output.format_data()
-
-
-def check_platform(plugin, platform):
- supported_platforms = getattr(plugin, 'supported_platforms', [])
- if supported_platforms:
- return platform in supported_platforms
- return True
diff --git a/wlauto/commands/record.py b/wlauto/commands/record.py
deleted file mode 100644
index 23cf5410..00000000
--- a/wlauto/commands/record.py
+++ /dev/null
@@ -1,217 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import sys
-
-
-from wlauto import Command, settings
-from wlauto.core import pluginloader
-from wlauto.common.resources import Executable
-from wlauto.core.resource import NO_ONE
-from wlauto.core.resolver import ResourceResolver
-from wlauto.core.configuration import RunConfiguration
-from wlauto.common.android.workload import ApkWorkload
-
-
-class RecordCommand(Command):
-
- name = 'record'
- description = '''Performs a revent recording
-
- This command helps making revent recordings. It will automatically
- deploy revent and even has the option of automatically opening apps.
-
- Revent allows you to record raw inputs such as screen swipes or button presses.
- This can be useful for recording inputs for workloads such as games that don't
- have XML UI layouts that can be used with UIAutomator. As a drawback from this,
- revent recordings are specific to the device type they were recorded on.
-
- WA uses two parts to the names of revent recordings in the format,
- {device_name}.{suffix}.revent.
-
- - device_name can either be specified manually with the ``-d`` argument or
- it can be automatically determined. On Android device it will be obtained
- from ``build.prop``, on Linux devices it is obtained from ``/proc/device-tree/model``.
- - suffix is used by WA to determine which part of the app execution the
- recording is for, currently these are either ``setup`` or ``run``. This
- should be specified with the ``-s`` argument.
- '''
-
- def initialize(self, context):
- self.context = context
- self.parser.add_argument('-d', '--device', help='The name of the device')
- self.parser.add_argument('-o', '--output', help='Directory to save the recording in')
-
- # Need validation
- self.parser.add_argument('-s', '--suffix', help='The suffix of the revent file, e.g. ``setup``')
- self.parser.add_argument('-C', '--clear', help='Clear app cache before launching it',
- action="store_true")
-
- group = self.parser.add_mutually_exclusive_group(required=False)
- group.add_argument('-p', '--package', help='Package to launch before recording')
- group.add_argument('-w', '--workload', help='Name of a revent workload (mostly games)')
-
- # Validate command options
- def validate_args(self, args):
- if args.clear and not (args.package or args.workload):
- self.logger.error("Package/Workload must be specified if you want to clear cache")
- self.parser.print_help()
- sys.exit()
- if args.workload and args.suffix:
- self.logger.error("cannot specify manual suffixes for workloads")
- self.parser.print_help()
- sys.exit()
- if args.suffix:
- args.suffix += "."
-
- # pylint: disable=W0201
- def execute(self, state, args):
- self.validate_args(args)
- self.logger.info("Connecting to device...")
-
- # Setup config
- self.config = RunConfiguration(pluginloader)
- for filepath in settings.config_paths:
- self.config.load_config(filepath)
- self.config.set_agenda(Agenda())
- self.config.finalize()
-
- # Setup device
- self.device_manager = pluginloader.get_manager(self.config.device)
- self.device_manager.validate()
- self.device_manager.connect()
- context = LightContext(self.config, self.device_manager)
- self.device_manager.initialize(context)
- self.device = self.device_manager.target
- if args.device:
- self.device_name = args.device
- else:
- self.device_name = self.device.model
-
- # Install Revent
- host_binary = context.resolver.get(Executable(NO_ONE, self.device.abi, 'revent'))
- self.target_binary = self.device.install_if_needed(host_binary)
-
- if args.workload:
- self.workload_record(args, context)
- elif args.package:
- self.package_record(args, context)
- else:
- self.manual_record(args, context)
-
- def manual_record(self, args, context):
- revent_file = self.device.get_workpath('{}.{}revent'.format(self.device_name, args.suffix or ""))
- self._record(revent_file, "", args.output)
-
- def package_record(self, args, context):
- revent_file = self.device.get_workpath('{}.{}revent'.format(self.device_name, args.suffix or ""))
- if args.clear:
- self.device.execute("pm clear {}".format(args.package))
-
- self.logger.info("Starting {}".format(args.package))
- self.device.execute('monkey -p {} -c android.intent.category.LAUNCHER 1'.format(args.package))
-
- self._record(revent_file, "", args.output)
-
- def workload_record(self, args, context):
- setup_file = self.device.get_workpath('{}.setup.revent'.format(self.device_name))
- run_file = self.device.get_workpath('{}.run.revent'.format(self.device_name))
-
- self.logger.info("Deploying {}".format(args.workload))
- workload = pluginloader.get_workload(args.workload, self.device)
- workload.apk_init_resources(context)
- workload.initialize_package(context)
- workload.do_post_install(context)
- workload.start_activity()
-
- if args.clear:
- workload.reset(context)
-
- self._record(setup_file, " SETUP",
- args.output or os.path.join(workload.dependencies_directory, 'revent_files'))
- self._record(run_file, " RUN",
- args.output or os.path.join(workload.dependencies_directory, 'revent_files'))
-
- self.logger.info("Tearing down {}".format(args.workload))
- workload.apk_teardown(context)
-
- def _record(self, revent_file, name, output_path):
- self.logger.info("Press Enter when you are ready to record{}...".format(name))
- raw_input("")
- command = "{} record -t 100000 -s {}".format(self.target_binary, revent_file)
- self.device.kick_off(command)
-
- self.logger.info("Press Enter when you have finished recording {}...".format(name))
- raw_input("")
- self.device.killall("revent")
-
- output_path = output_path or os.getcwdu()
- if not os.path.isdir(output_path):
- os.mkdirs(output_path)
-
- revent_file_name = self.device.path.basename(revent_file)
- host_path = os.path.join(output_path, revent_file_name)
- if os.path.exists(host_path):
- self.logger.info("Revent file '{}' already exists, overwrite? [y/n]".format(revent_file_name))
- if raw_input("") == "y":
- os.remove(host_path)
- else:
- self.logger.warning("Did not pull and overwrite '{}'".format(revent_file_name))
- return
- self.logger.info("Pulling '{}' from device".format(self.device.path.basename(revent_file)))
- self.device.pull(revent_file, output_path)
-
-class ReplayCommand(RecordCommand):
-
- name = 'replay'
- description = '''Replay a revent recording
-
- Revent allows you to record raw inputs such as screen swipes or button presses.
- See ``wa show record`` to see how to make an revent recording.
- '''
-
- def initialize(self, context):
- self.context = context
- self.parser.add_argument('revent', help='The name of the file to replay')
- self.parser.add_argument('-p', '--package', help='Package to launch before recording')
- self.parser.add_argument('-C', '--clear', help='Clear app cache before launching it',
- action="store_true")
-
-
- # pylint: disable=W0201
- def run(self, args):
- self.logger.info("Pushing file to device")
- self.device.push(args.revent, self.device.working_directory)
- revent_file = self.device.path.join(self.device.working_directory, os.path.split(args.revent)[1])
-
- if args.clear:
- self.device.execute("pm clear {}".format(args.package))
-
- if args.package:
- self.logger.info("Starting {}".format(args.package))
- self.device.execute('monkey -p {} -c android.intent.category.LAUNCHER 1'.format(args.package))
-
- command = "{} replay {}".format(self.target_binary, revent_file)
- self.device.execute(command)
- self.logger.info("Finished replay")
-
-
-# Used to satisfy the API
-class LightContext(object):
- def __init__(self, config, device_manager):
- self.resolver = ResourceResolver(config)
- self.resolver.load()
- self.device_manager = device_manager
diff --git a/wlauto/commands/run.py b/wlauto/commands/run.py
deleted file mode 100644
index 57a78819..00000000
--- a/wlauto/commands/run.py
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import os
-import sys
-import shutil
-
-import wlauto
-from wlauto import Command, settings
-from wlauto.core import pluginloader
-from wlauto.core.configuration import RunConfiguration
-from wlauto.core.configuration.parsers import AgendaParser, ConfigParser
-from wlauto.core.execution import Executor
-from wlauto.core.output import init_wa_output
-from wlauto.core.version import get_wa_version
-from wlauto.exceptions import NotFoundError, ConfigError
-from wlauto.utils.log import add_log_file
-from wlauto.utils.types import toggle_set
-
-
-class RunCommand(Command):
-
- name = 'run'
- description = 'Execute automated workloads on a remote device and process the resulting output.'
-
- def initialize(self, context):
- self.parser.add_argument('agenda', metavar='AGENDA',
- help="""
- Agenda for this workload automation run. This defines which
- workloads will be executed, how many times, with which
- tunables, etc. See example agendas in {} for an example of
- how this file should be structured.
- """.format(os.path.dirname(wlauto.__file__)))
- self.parser.add_argument('-d', '--output-directory', metavar='DIR', default=None,
- help="""
- Specify a directory where the output will be generated. If
- the directory already exists, the script will abort unless -f
- option (see below) is used, in which case the contents of the
- directory will be overwritten. If this option is not specified,
- then {} will be used instead.
- """.format(settings.default_output_directory))
- self.parser.add_argument('-f', '--force', action='store_true',
- help="""
- Overwrite output directory if it exists. By default, the script
- will abort in this situation to prevent accidental data loss.
- """)
- self.parser.add_argument('-i', '--id', action='append', dest='only_run_ids', metavar='ID',
- help="""
- Specify a workload spec ID from an agenda to run. If this is
- specified, only that particular spec will be run, and other
- workloads in the agenda will be ignored. This option may be
- used to specify multiple IDs.
- """)
- self.parser.add_argument('--disable', action='append', dest='instruments_to_disable',
- default=[],
- metavar='INSTRUMENT', help="""
- Specify an instrument to disable from the command line. This
- equivalent to adding "~{metavar}" to the instrumentation list in
- the agenda. This can be used to temporarily disable a troublesome
- instrument for a particular run without introducing permanent
- change to the config (which one might then forget to revert).
- This option may be specified multiple times.
- """)
-
- def execute(self, config, args):
- output = self.set_up_output_directory(config, args)
- add_log_file(output.logfile)
-
- self.logger.debug('Version: {}'.format(get_wa_version()))
- self.logger.debug('Command Line: {}'.format(' '.join(sys.argv)))
-
- disabled_instruments = toggle_set(["~{}".format(i)
- for i in args.instruments_to_disable])
- config.jobs_config.disable_instruments(disabled_instruments)
- config.jobs_config.only_run_ids(args.only_run_ids)
-
- parser = AgendaParser()
- if os.path.isfile(args.agenda):
- parser.load_from_path(config, args.agenda)
- shutil.copy(args.agenda, output.raw_config_dir)
- else:
- try:
- pluginloader.get_plugin_class(args.agenda, kind='workload')
- agenda = {'workloads': [{'name': args.agenda}]}
- parser.load(config, agenda, 'CMDLINE_ARGS')
- except NotFoundError:
- msg = 'Agenda file "{}" does not exist, and there no workload '\
- 'with that name.\nYou can get a list of available '\
- 'by running "wa list workloads".'
- raise ConfigError(msg.format(args.agenda))
-
- executor = Executor()
- executor.execute(config, output)
-
- def set_up_output_directory(self, config, args):
- if args.output_directory:
- output_directory = args.output_directory
- else:
- output_directory = settings.default_output_directory
- self.logger.debug('Using output directory: {}'.format(output_directory))
- try:
- return init_wa_output(output_directory, config, args.force)
- except RuntimeError as e:
- if 'path exists' in str(e):
- msg = 'Output directory "{}" exists.\nPlease specify another '\
- 'location, or use -f option to overwrite.'
- self.logger.critical(msg.format(output_directory))
- sys.exit(1)
- else:
- raise e
diff --git a/wlauto/commands/show.py b/wlauto/commands/show.py
deleted file mode 100644
index 75bbac8b..00000000
--- a/wlauto/commands/show.py
+++ /dev/null
@@ -1,114 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import sys
-import subprocess
-from cStringIO import StringIO
-
-from wlauto import Command
-from wlauto.core.configuration import settings
-from wlauto.core import pluginloader
-from wlauto.utils.doc import (get_summary, get_description, get_type_name, format_column, format_body,
- format_paragraph, indent, strip_inlined_text)
-from wlauto.utils.misc import get_pager
-from wlauto.utils.terminalsize import get_terminal_size
-
-
-class ShowCommand(Command):
-
- name = 'show'
-
- description = """
- Display documentation for the specified plugin (workload, instrument, etc.).
- """
-
- def initialize(self, context):
- self.parser.add_argument('name', metavar='EXTENSION',
- help='''The name of the plugin for which information will
- be shown.''')
-
- def execute(self, state, args):
- # pylint: disable=unpacking-non-sequence
- plugin = pluginloader.get_plugin_class(args.name)
- out = StringIO()
- term_width, term_height = get_terminal_size()
- format_plugin(plugin, out, term_width)
- text = out.getvalue()
- pager = get_pager()
- if len(text.split('\n')) > term_height and pager:
- try:
- sp = subprocess.Popen(pager, stdin=subprocess.PIPE)
- sp.communicate(text)
- except OSError:
- self.logger.warning('Could not use PAGER "{}"'.format(pager))
- sys.stdout.write(text)
- else:
- sys.stdout.write(text)
-
-
-def format_plugin(plugin, out, width):
- format_plugin_name(plugin, out)
- out.write('\n')
- format_plugin_summary(plugin, out, width)
- out.write('\n')
- if hasattr(plugin, 'supported_platforms'):
- format_supported_platforms(plugin, out, width)
- out.write('\n')
- if plugin.parameters:
- format_plugin_parameters(plugin, out, width)
- out.write('\n')
- format_plugin_description(plugin, out, width)
-
-
-def format_plugin_name(plugin, out):
- out.write('\n{}\n'.format(plugin.name))
-
-
-def format_plugin_summary(plugin, out, width):
- out.write('{}\n'.format(format_body(strip_inlined_text(get_summary(plugin)), width)))
-
-
-def format_supported_platforms(plugin, out, width):
- text = 'supported on: {}'.format(', '.join(plugin.supported_platforms))
- out.write('{}\n'.format(format_body(text, width)))
-
-
-def format_plugin_description(plugin, out, width):
- # skip the initial paragraph of multi-paragraph description, as already
- # listed above.
- description = get_description(plugin).split('\n\n', 1)[-1]
- out.write('{}\n'.format(format_body(strip_inlined_text(description), width)))
-
-
-def format_plugin_parameters(plugin, out, width, shift=4):
- out.write('parameters:\n\n')
- param_texts = []
- for param in plugin.parameters:
- description = format_paragraph(strip_inlined_text(param.description or ''), width - shift)
- param_text = '{}'.format(param.name)
- if param.mandatory:
- param_text += " (MANDATORY)"
- param_text += '\n{}\n'.format(description)
- param_text += indent('type: {}\n'.format(get_type_name(param.kind)))
- if param.allowed_values:
- param_text += indent('allowed values: {}\n'.format(', '.join(map(str, param.allowed_values))))
- elif param.constraint:
- param_text += indent('constraint: {}\n'.format(get_type_name(param.constraint)))
- if param.default is not None:
- param_text += indent('default: {}\n'.format(param.default))
- param_texts.append(indent(param_text, shift))
-
- out.write(format_column('\n'.join(param_texts), width))
diff --git a/wlauto/commands/templates/UiAutomation.java b/wlauto/commands/templates/UiAutomation.java
deleted file mode 100644
index bd33d9a7..00000000
--- a/wlauto/commands/templates/UiAutomation.java
+++ /dev/null
@@ -1,25 +0,0 @@
-package ${package_name};
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.KeyEvent;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "${name}";
-
- public void runUiAutomation() throws Exception {
- // UI Automation code goes here
- }
-
-}
diff --git a/wlauto/commands/templates/android_benchmark b/wlauto/commands/templates/android_benchmark
deleted file mode 100644
index 82796bd5..00000000
--- a/wlauto/commands/templates/android_benchmark
+++ /dev/null
@@ -1,27 +0,0 @@
-from wlauto import AndroidBenchmark, Parameter
-
-
-class ${class_name}(AndroidBenchmark):
-
- name = '${name}'
- # NOTE: Please do not leave these comments in the code.
- #
- # Replace with the package for the app in the APK file.
- package = 'com.foo.bar'
- # Replace with the full path to the activity to run.
- activity = '.RunBuzz'
- description = "This is an placeholder description"
-
- parameters = [
- # Workload parameters go here e.g.
- Parameter('Example parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
- description='This is an example parameter')
- ]
-
- def run(self, context):
- pass
-
- def update_result(self, context):
- super(${class_name}, self).update_result(context)
- # process results and add them using
- # context.result.add_metric
diff --git a/wlauto/commands/templates/android_uiauto_benchmark b/wlauto/commands/templates/android_uiauto_benchmark
deleted file mode 100644
index 5d6893a8..00000000
--- a/wlauto/commands/templates/android_uiauto_benchmark
+++ /dev/null
@@ -1,24 +0,0 @@
-from wlauto import AndroidUiAutoBenchmark, Parameter
-
-
-class ${class_name}(AndroidUiAutoBenchmark):
-
- name = '${name}'
- # NOTE: Please do not leave these comments in the code.
- #
- # Replace with the package for the app in the APK file.
- package = 'com.foo.bar'
- # Replace with the full path to the activity to run.
- activity = '.RunBuzz'
- description = "This is an placeholder description"
-
- parameters = [
- # Workload parameters go here e.g.
- Parameter('Example parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
- description='This is an example parameter')
- ]
-
- def update_result(self, context):
- super(${class_name}, self).update_result(context)
- # process results and add them using
- # context.result.add_metric
diff --git a/wlauto/commands/templates/basic_workload b/wlauto/commands/templates/basic_workload
deleted file mode 100644
index e75316f1..00000000
--- a/wlauto/commands/templates/basic_workload
+++ /dev/null
@@ -1,28 +0,0 @@
-from wlauto import Workload, Parameter
-
-
-class ${class_name}(Workload):
-
- name = '${name}'
- description = "This is an placeholder description"
-
- parameters = [
- # Workload parameters go here e.g.
- Parameter('Example parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
- description='This is an example parameter')
- ]
-
- def setup(self, context):
- pass
-
- def run(self, context):
- pass
-
- def update_result(self, context):
- pass
-
- def teardown(self, context):
- pass
-
- def validate(self):
- pass
diff --git a/wlauto/commands/templates/setup.template b/wlauto/commands/templates/setup.template
deleted file mode 100644
index f9097b59..00000000
--- a/wlauto/commands/templates/setup.template
+++ /dev/null
@@ -1,102 +0,0 @@
-import os
-import sys
-import warnings
-from multiprocessing import Process
-
-try:
- from setuptools.command.install import install as orig_install
- from setuptools import setup
-except ImportError:
- from distutils.command.install import install as orig_install
- from distutils.core import setup
-
-try:
- import pwd
-except ImportError:
- pwd = None
-
-warnings.filterwarnings('ignore', "Unknown distribution option: 'install_requires'")
-
-try:
- os.remove('MANIFEST')
-except OSError:
- pass
-
-
-packages = []
-data_files = {}
-source_dir = os.path.dirname(__file__)
-for root, dirs, files in os.walk('$package_name'):
- rel_dir = os.path.relpath(root, source_dir)
- data = []
- if '__init__.py' in files:
- for f in files:
- if os.path.splitext(f)[1] not in ['.py', '.pyc', '.pyo']:
- data.append(f)
- package_name = rel_dir.replace(os.sep, '.')
- package_dir = root
- packages.append(package_name)
- data_files[package_name] = data
- else:
- # use previous package name
- filepaths = [os.path.join(root, f) for f in files]
- data_files[package_name].extend([os.path.relpath(f, package_dir) for f in filepaths])
-
-params = dict(
- name='$package_name',
- version='0.0.1',
- packages=packages,
- package_data=data_files,
- url='N/A',
- maintainer='$user',
- maintainer_email='$user@example.com',
- install_requires=[
- 'wlauto',
- ],
- # https://pypi.python.org/pypi?%3Aaction=list_classifiers
- classifiers=[
- 'Development Status :: 3 - Alpha',
- 'Environment :: Console',
- 'License :: Other/Proprietary License',
- 'Operating System :: Unix',
- 'Programming Language :: Python :: 2.7',
- ],
-)
-
-
-def update_wa_packages():
- sudo_user = os.getenv('SUDO_USER')
- if sudo_user:
- user_entry = pwd.getpwnam(sudo_user)
- os.setgid(user_entry.pw_gid)
- os.setuid(user_entry.pw_uid)
- env_root = os.getenv('WA_USER_DIRECTORY', os.path.join(os.path.expanduser('~'), '.workload_automation'))
- if not os.path.isdir(env_root):
- os.makedirs(env_root)
- wa_packages_file = os.path.join(env_root, 'packages')
- if os.path.isfile(wa_packages_file):
- with open(wa_packages_file, 'r') as wfh:
- package_list = wfh.read().split()
- if params['name'] not in package_list:
- package_list.append(params['name'])
- else: # no existing package file
- package_list = [params['name']]
- with open(wa_packages_file, 'w') as wfh:
- wfh.write('\n'.join(package_list))
-
-
-class install(orig_install):
-
- def run(self):
- orig_install.run(self)
- # Must be done in a separate process because will drop privileges if
- # sudo, and won't be able to reacquire them.
- p = Process(target=update_wa_packages)
- p.start()
- p.join()
-
-
-params['cmdclass'] = {'install': install}
-
-
-setup(**params)
diff --git a/wlauto/commands/templates/uiauto_workload b/wlauto/commands/templates/uiauto_workload
deleted file mode 100644
index 66cc193a..00000000
--- a/wlauto/commands/templates/uiauto_workload
+++ /dev/null
@@ -1,35 +0,0 @@
-from wlauto import UiAutomatorWorkload, Parameter
-
-
-class ${class_name}(UiAutomatorWorkload):
-
- name = '${name}'
- description = "This is an placeholder description"
-
- parameters = [
- # Workload parameters go here e.g.
- Parameter('Example parameter', kind=int, allowed_values=[1,2,3], default=1, override=True, mandatory=False,
- description='This is an example parameter')
- ]
-
- def setup(self, context):
- super(${class_name}, self).setup(context)
- # Perform any necessary setup before starting the UI automation
- # e.g. copy files to the device, start apps, reset logs, etc.
-
-
- def update_result(self, context):
- pass
- # Process workload execution artifacts to extract metrics
- # and add them to the run result using
- # context.result.add_metric()
-
- def teardown(self, context):
- super(${class_name}, self).teardown(context)
- # Preform any necessary cleanup
-
- def validate(self):
- pass
- # Validate inter-parameter assumptions etc
-
-
diff --git a/wlauto/common/__init__.py b/wlauto/common/__init__.py
deleted file mode 100644
index cd5d64d6..00000000
--- a/wlauto/common/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/common/android/BaseUiAutomation.class b/wlauto/common/android/BaseUiAutomation.class
deleted file mode 100644
index 2683f453..00000000
--- a/wlauto/common/android/BaseUiAutomation.class
+++ /dev/null
Binary files differ
diff --git a/wlauto/common/android/__init__.py b/wlauto/common/android/__init__.py
deleted file mode 100644
index 16224d6f..00000000
--- a/wlauto/common/android/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/common/android/resources.py b/wlauto/common/android/resources.py
deleted file mode 100644
index 95a84cbf..00000000
--- a/wlauto/common/android/resources.py
+++ /dev/null
@@ -1,40 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto.common.resources import FileResource
-
-
-class ReventFile(FileResource):
-
- name = 'revent'
-
- def __init__(self, owner, stage):
- super(ReventFile, self).__init__(owner)
- self.stage = stage
-
-
-class JarFile(FileResource):
-
- name = 'jar'
-
-
-class ApkFile(FileResource):
-
- name = 'apk'
-
- def __init__(self, owner, version):
- super(ApkFile, self).__init__(owner)
- self.version = version
diff --git a/wlauto/common/android/workload.py b/wlauto/common/android/workload.py
deleted file mode 100644
index bee513ec..00000000
--- a/wlauto/common/android/workload.py
+++ /dev/null
@@ -1,506 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import sys
-import time
-
-from wlauto.core.plugin import Parameter
-from wlauto.core.workload import Workload
-from wlauto.core.resource import NO_ONE
-from wlauto.common.resources import PluginAsset, Executable
-from wlauto.exceptions import WorkloadError, ResourceError, ConfigError
-from wlauto.utils.android import ApkInfo, ANDROID_NORMAL_PERMISSIONS
-from wlauto.utils.types import boolean
-import wlauto.common.android.resources
-
-
-DELAY = 5
-
-
-class UiAutomatorWorkload(Workload):
- """
- Base class for all workloads that rely on a UI Automator JAR file.
-
- This class should be subclassed by workloads that rely on android UiAutomator
- to work. This class handles transferring the UI Automator JAR file to the device
- and invoking it to run the workload. By default, it will look for the JAR file in
- the same directory as the .py file for the workload (this can be changed by overriding
- the ``uiauto_file`` property in the subclassing workload).
-
- To inintiate UI Automation, the fully-qualified name of the Java class and the
- corresponding method name are needed. By default, the package part of the class name
- is derived from the class file, and class and method names are ``UiAutomation``
- and ``runUiAutomaton`` respectively. If you have generated the boilder plate for the
- UiAutomatior code using ``create_workloads`` utility, then everything should be named
- correctly. If you're creating the Java project manually, you need to make sure the names
- match what is expected, or you could override ``uiauto_package``, ``uiauto_class`` and
- ``uiauto_method`` class attributes with the value that match your Java code.
-
- You can also pass parameters to the JAR file. To do this add the parameters to
- ``self.uiauto_params`` dict inside your class's ``__init__`` or ``setup`` methods.
-
- """
-
- supported_platforms = ['android']
-
- uiauto_package = ''
- uiauto_class = 'UiAutomation'
- uiauto_method = 'runUiAutomation'
-
- # Can be overidden by subclasses to adjust to run time of specific
- # benchmarks.
- run_timeout = 4 * 60 # seconds
-
- def __init__(self, device, _call_super=True, **kwargs): # pylint: disable=W0613
- if _call_super:
- super(UiAutomatorWorkload, self).__init__(device, **kwargs)
- self.uiauto_file = None
- self.device_uiauto_file = None
- self.command = None
- self.uiauto_params = {}
-
- def init_resources(self, context):
- self.uiauto_file = context.resolver.get(wlauto.common.android.resources.JarFile(self))
- if not self.uiauto_file:
- raise ResourceError('No UI automation JAR file found for workload {}.'.format(self.name))
- self.device_uiauto_file = self.device.path.join(self.device.working_directory,
- os.path.basename(self.uiauto_file))
- if not self.uiauto_package:
- self.uiauto_package = os.path.splitext(os.path.basename(self.uiauto_file))[0]
-
- def setup(self, context):
- method_string = '{}.{}#{}'.format(self.uiauto_package, self.uiauto_class, self.uiauto_method)
- params_dict = self.uiauto_params
- params_dict['workdir'] = self.device.working_directory
- params = ''
- for k, v in self.uiauto_params.iteritems():
- params += ' -e {} {}'.format(k, v)
- self.command = 'uiautomator runtest {}{} -c {}'.format(self.device_uiauto_file, params, method_string)
- self.device.push(self.uiauto_file, self.device_uiauto_file)
- self.device.killall('uiautomator')
-
- def run(self, context):
- result = self.device.execute(self.command, self.run_timeout)
- if 'FAILURE' in result:
- raise WorkloadError(result)
- else:
- self.logger.debug(result)
- time.sleep(DELAY)
-
- def update_result(self, context):
- pass
-
- def teardown(self, context):
- self.device.remove(self.device_uiauto_file)
-
- def validate(self):
- if not self.uiauto_file:
- raise WorkloadError('No UI automation JAR file found for workload {}.'.format(self.name))
- if not self.uiauto_package:
- raise WorkloadError('No UI automation package specified for workload {}.'.format(self.name))
-
-
-class ApkWorkload(Workload):
- """
- A workload based on an APK file.
-
- Defines the following attributes:
-
- :package: The package name of the app. This is usually a Java-style name of the form
- ``com.companyname.appname``.
- :activity: This is the initial activity of the app. This will be used to launch the
- app during the setup.
- :view: The class of the main view pane of the app. This needs to be defined in order
- to collect SurfaceFlinger-derived statistics (such as FPS) for the app, but
- may otherwise be left as ``None``.
- :install_timeout: Timeout for the installation of the APK. This may vary wildly based on
- the size and nature of a specific APK, and so should be defined on
- per-workload basis.
-
- .. note:: To a lesser extent, this will also vary based on the the
- device and the nature of adb connection (USB vs Ethernet),
- so, as with all timeouts, so leeway must be included in
- the specified value.
-
- .. note:: Both package and activity for a workload may be obtained from the APK using
- the ``aapt`` tool that comes with the ADT (Android Developemnt Tools) bundle.
-
- """
- package = None
- activity = None
- view = None
- supported_platforms = ['android']
-
- parameters = [
- Parameter('install_timeout', kind=int, default=300,
- description='Timeout for the installation of the apk.'),
- Parameter('check_apk', kind=boolean, default=True,
- description='''
- Discover the APK for this workload on the host, and check that
- the version matches the one on device (if already installed).
- '''),
- Parameter('force_install', kind=boolean, default=False,
- description='''
- Always re-install the APK, even if matching version is found
- on already installed on the device.
- '''),
- Parameter('uninstall_apk', kind=boolean, default=False,
- description='If ``True``, will uninstall workload\'s APK as part of teardown.'),
- ]
-
- def __init__(self, device, _call_super=True, **kwargs):
- if _call_super:
- super(ApkWorkload, self).__init__(device, **kwargs)
- self.apk_file = None
- self.apk_version = None
- self.logcat_log = None
-
- def init_resources(self, context):
- self.apk_file = context.resolver.get(wlauto.common.android.resources.ApkFile(self),
- version=getattr(self, 'version', None),
- strict=self.check_apk)
-
- def validate(self):
- if self.check_apk:
- if not self.apk_file:
- raise WorkloadError('No APK file found for workload {}.'.format(self.name))
- else:
- if self.force_install:
- raise ConfigError('force_install cannot be "True" when check_apk is set to "False".')
-
- def setup(self, context):
- self.initialize_package(context)
- self.start_activity()
- self.device.execute('am kill-all') # kill all *background* activities
- self.device.clear_logcat()
-
- def initialize_package(self, context):
- installed_version = self.device.get_package_version(self.package)
- if self.check_apk:
- self.initialize_with_host_apk(context, installed_version)
- else:
- if not installed_version:
- message = '''{} not found found on the device and check_apk is set to "False"
- so host version was not checked.'''
- raise WorkloadError(message.format(self.package))
- message = 'Version {} installed on device; skipping host APK check.'
- self.logger.debug(message.format(installed_version))
- self.reset(context)
- self.apk_version = installed_version
-
- def initialize_with_host_apk(self, context, installed_version):
- host_version = ApkInfo(self.apk_file).version_name
- if installed_version != host_version:
- if installed_version:
- message = '{} host version: {}, device version: {}; re-installing...'
- self.logger.debug(message.format(os.path.basename(self.apk_file),
- host_version, installed_version))
- else:
- message = '{} host version: {}, not found on device; installing...'
- self.logger.debug(message.format(os.path.basename(self.apk_file),
- host_version))
- self.force_install = True # pylint: disable=attribute-defined-outside-init
- else:
- message = '{} version {} found on both device and host.'
- self.logger.debug(message.format(os.path.basename(self.apk_file),
- host_version))
- if self.force_install:
- if installed_version:
- self.device.uninstall(self.package)
- self.install_apk(context)
- else:
- self.reset(context)
- self.apk_version = host_version
-
- def start_activity(self):
- output = self.device.execute('am start -W -n {}/{}'.format(self.package, self.activity))
- if 'Error:' in output:
- self.device.execute('am force-stop {}'.format(self.package)) # this will dismiss any erro dialogs
- raise WorkloadError(output)
- self.logger.debug(output)
-
- def reset(self, context): # pylint: disable=W0613
- self.device.execute('am force-stop {}'.format(self.package))
- self.device.execute('pm clear {}'.format(self.package))
-
- # As of android API level 23, apps can request permissions at runtime,
- # this will grant all of them so requests do not pop up when running the app
- if self.device.os_version['sdk'] >= 23:
- self._grant_requested_permissions()
-
- def install_apk(self, context):
- output = self.device.install(self.apk_file, self.install_timeout)
- if 'Failure' in output:
- if 'ALREADY_EXISTS' in output:
- self.logger.warn('Using already installed APK (did not unistall properly?)')
- else:
- raise WorkloadError(output)
- else:
- self.logger.debug(output)
- self.do_post_install(context)
-
- def _grant_requested_permissions(self):
- dumpsys_output = self.device.execute(command="dumpsys package {}".format(self.package))
- permissions = []
- lines = iter(dumpsys_output.splitlines())
- for line in lines:
- if "requested permissions:" in line:
- break
-
- for line in lines:
- if "android.permission." in line:
- permissions.append(line.split(":")[0].strip())
- else:
- break
-
- for permission in permissions:
- # "Normal" Permisions are automatically granted and cannot be changed
- permission_name = permission.rsplit('.', 1)[1]
- if permission_name not in ANDROID_NORMAL_PERMISSIONS:
- self.device.execute("pm grant {} {}".format(self.package, permission))
-
- def do_post_install(self, context):
- """ May be overwritten by dervied classes."""
- pass
-
- def run(self, context):
- pass
-
- def update_result(self, context):
- self.logcat_log = os.path.join(context.output_directory, 'logcat.log')
- context.device_manager.dump_logcat(self.logcat_log)
- context.add_iteration_artifact(name='logcat',
- path='logcat.log',
- kind='log',
- description='Logact dump for the run.')
-
- def teardown(self, context):
- self.device.execute('am force-stop {}'.format(self.package))
- if self.uninstall_apk:
- self.device.uninstall(self.package)
-
-
-AndroidBenchmark = ApkWorkload # backward compatibility
-
-
-class ReventWorkload(Workload):
-
- default_setup_timeout = 5 * 60 # in seconds
- default_run_timeout = 10 * 60 # in seconds
-
- @property
- def on_device_setup_revent(self):
- return self.device.get_workpath('{}.setup.revent'.format(self.device.model))
-
- @property
- def on_device_run_revent(self):
- return self.device.get_workpath('{}.run.revent'.format(self.device.model))
-
- def __init__(self, device, _call_super=True, **kwargs):
- if _call_super:
- super(ReventWorkload, self).__init__(device, **kwargs)
- self.on_device_revent_binary = None
- self.setup_timeout = kwargs.get('setup_timeout', self.default_setup_timeout)
- self.run_timeout = kwargs.get('run_timeout', self.default_run_timeout)
- self.revent_setup_file = None
- self.revent_run_file = None
-
- def initialize(self, context):
- self.revent_setup_file = context.resolver.get(wlauto.common.android.resources.ReventFile(self, 'setup'))
- self.revent_run_file = context.resolver.get(wlauto.common.android.resources.ReventFile(self, 'run'))
-
- def setup(self, context):
- self._check_revent_files(context)
- self.device.killall('revent')
- command = '{} replay {}'.format(self.on_device_revent_binary, self.on_device_setup_revent)
- self.device.execute(command, timeout=self.setup_timeout)
-
- def run(self, context):
- command = '{} replay {}'.format(self.on_device_revent_binary, self.on_device_run_revent)
- self.logger.debug('Replaying {}'.format(os.path.basename(self.on_device_run_revent)))
- self.device.execute(command, timeout=self.run_timeout)
- self.logger.debug('Replay completed.')
-
- def update_result(self, context):
- pass
-
- def teardown(self, context):
- self.device.remove(self.on_device_setup_revent)
- self.device.remove(self.on_device_run_revent)
-
- def _check_revent_files(self, context):
- # check the revent binary
- revent_binary = context.resolver.get(Executable(NO_ONE, self.device.abi, 'revent'))
- if not os.path.isfile(revent_binary):
- message = '{} does not exist. '.format(revent_binary)
- message += 'Please build revent for your system and place it in that location'
- raise WorkloadError(message)
- if not self.revent_setup_file:
- # pylint: disable=too-few-format-args
- message = '{0}.setup.revent file does not exist, Please provide one for your device, {0}'.format(self.device.name)
- raise WorkloadError(message)
- if not self.revent_run_file:
- # pylint: disable=too-few-format-args
- message = '{0}.run.revent file does not exist, Please provide one for your device, {0}'.format(self.device.name)
- raise WorkloadError(message)
-
- self.on_device_revent_binary = self.device.install_executable(revent_binary)
- self.device.push(self.revent_run_file, self.on_device_run_revent)
- self.device.push(self.revent_setup_file, self.on_device_setup_revent)
-
-
-class AndroidUiAutoBenchmark(UiAutomatorWorkload, AndroidBenchmark):
-
- supported_platforms = ['android']
-
- def __init__(self, device, **kwargs):
- UiAutomatorWorkload.__init__(self, device, **kwargs)
- AndroidBenchmark.__init__(self, device, _call_super=False, **kwargs)
-
- def init_resources(self, context):
- UiAutomatorWorkload.init_resources(self, context)
- AndroidBenchmark.init_resources(self, context)
-
- def setup(self, context):
- UiAutomatorWorkload.setup(self, context)
- AndroidBenchmark.setup(self, context)
-
- def update_result(self, context):
- UiAutomatorWorkload.update_result(self, context)
- AndroidBenchmark.update_result(self, context)
-
- def teardown(self, context):
- UiAutomatorWorkload.teardown(self, context)
- AndroidBenchmark.teardown(self, context)
-
-
-class GameWorkload(ApkWorkload, ReventWorkload):
- """
- GameWorkload is the base class for all the workload that use revent files to
- run.
-
- For more in depth details on how to record revent files, please see
- :ref:`revent_files_creation`. To subclass this class, please refer to
- :ref:`GameWorkload`.
-
- Additionally, this class defines the following attributes:
-
- :asset_file: A tarball containing additional assets for the workload. These are the assets
- that are not part of the APK but would need to be downloaded by the workload
- (usually, on first run of the app). Since the presence of a network connection
- cannot be assumed on some devices, this provides an alternative means of obtaining
- the assets.
- :saved_state_file: A tarball containing the saved state for a workload. This tarball gets
- deployed in the same way as the asset file. The only difference being that
- it is usually much slower and re-deploying the tarball should alone be
- enough to reset the workload to a known state (without having to reinstall
- the app or re-deploy the other assets).
- :loading_time: Time it takes for the workload to load after the initial activity has been
- started.
-
- """
-
- # May be optionally overwritten by subclasses
- asset_file = None
- saved_state_file = None
- view = 'SurfaceView'
- loading_time = 10
- supported_platforms = ['android']
-
- parameters = [
- Parameter('install_timeout', default=500, override=True),
- Parameter('assets_push_timeout', kind=int, default=500,
- description='Timeout used during deployment of the assets package (if there is one).'),
- Parameter('clear_data_on_reset', kind=bool, default=True,
- description="""
- If set to ``False``, this will prevent WA from clearing package
- data for this workload prior to running it.
- """),
- ]
-
- def __init__(self, device, **kwargs): # pylint: disable=W0613
- ApkWorkload.__init__(self, device, **kwargs)
- ReventWorkload.__init__(self, device, _call_super=False, **kwargs)
- self.logcat_process = None
- self.module_dir = os.path.dirname(sys.modules[self.__module__].__file__)
- self.revent_dir = os.path.join(self.module_dir, 'revent_files')
-
- def apk_init_resources(self, context):
- ApkWorkload.init_resources(self, context)
-
- def init_resources(self, context):
- self.apk_init_resources(context)
- ReventWorkload.init_resources(self, context)
-
- def setup(self, context):
- ApkWorkload.setup(self, context)
- self.logger.debug('Waiting for the game to load...')
- time.sleep(self.loading_time)
- ReventWorkload.setup(self, context)
-
- def do_post_install(self, context):
- ApkWorkload.do_post_install(self, context)
- self._deploy_assets(context, self.assets_push_timeout)
-
- def reset(self, context):
- # If saved state exists, restore it; if not, do full
- # uninstall/install cycle.
- self.device.execute('am force-stop {}'.format(self.package))
- if self.saved_state_file:
- self._deploy_resource_tarball(context, self.saved_state_file)
- else:
- if self.clear_data_on_reset:
- self.device.execute('pm clear {}'.format(self.package))
- self._deploy_assets(context)
-
- def run(self, context):
- ReventWorkload.run(self, context)
-
- def apk_teardown(self, context):
- if not self.saved_state_file:
- ApkWorkload.teardown(self, context)
- else:
- self.device.execute('am force-stop {}'.format(self.package))
-
- def teardown(self, context):
- self.apk_teardown(context)
- ReventWorkload.teardown(self, context)
-
- def _deploy_assets(self, context, timeout=300):
- if self.asset_file:
- self._deploy_resource_tarball(context, self.asset_file, timeout)
- if self.saved_state_file: # must be deployed *after* asset tarball!
- self._deploy_resource_tarball(context, self.saved_state_file, timeout)
-
- def _deploy_resource_tarball(self, context, resource_file, timeout=300):
- kind = 'data'
- if ':' in resource_file:
- kind, resource_file = resource_file.split(':', 1)
- ondevice_cache = self.device.path.join(self.device.working_directory, '.cache', self.name, resource_file)
- if not self.device.file_exists(ondevice_cache):
- asset_tarball = context.resolver.get(PluginAsset(self, resource_file))
- if not asset_tarball:
- message = 'Could not find resource {} for workload {}.'
- raise WorkloadError(message.format(resource_file, self.name))
- # adb push will create intermediate directories if they don't
- # exist.
- self.device.push(asset_tarball, ondevice_cache, timeout=timeout)
-
- device_asset_directory = self.device.path.join(context.device_manager.external_storage_directory, 'Android', kind)
- deploy_command = 'cd {} && {} tar -xzf {}'.format(device_asset_directory,
- self.device.busybox,
- ondevice_cache)
- self.device.execute(deploy_command, timeout=timeout, as_root=True)
diff --git a/wlauto/common/bin/arm64/busybox b/wlauto/common/bin/arm64/busybox
deleted file mode 100755
index 6d09a079..00000000
--- a/wlauto/common/bin/arm64/busybox
+++ /dev/null
Binary files differ
diff --git a/wlauto/common/bin/arm64/m5 b/wlauto/common/bin/arm64/m5
deleted file mode 100755
index 45d604d5..00000000
--- a/wlauto/common/bin/arm64/m5
+++ /dev/null
Binary files differ
diff --git a/wlauto/common/bin/arm64/revent b/wlauto/common/bin/arm64/revent
deleted file mode 100755
index 1b1b5e8c..00000000
--- a/wlauto/common/bin/arm64/revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/common/bin/armeabi/busybox b/wlauto/common/bin/armeabi/busybox
deleted file mode 100755
index 1714d40a..00000000
--- a/wlauto/common/bin/armeabi/busybox
+++ /dev/null
Binary files differ
diff --git a/wlauto/common/bin/armeabi/m5 b/wlauto/common/bin/armeabi/m5
deleted file mode 100755
index 43290079..00000000
--- a/wlauto/common/bin/armeabi/m5
+++ /dev/null
Binary files differ
diff --git a/wlauto/common/bin/armeabi/revent b/wlauto/common/bin/armeabi/revent
deleted file mode 100755
index f908b4d3..00000000
--- a/wlauto/common/bin/armeabi/revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/common/bin/x86_64/busybox b/wlauto/common/bin/x86_64/busybox
deleted file mode 100644
index 9460e830..00000000
--- a/wlauto/common/bin/x86_64/busybox
+++ /dev/null
Binary files differ
diff --git a/wlauto/common/gem5/LICENSE b/wlauto/common/gem5/LICENSE
deleted file mode 100644
index 92b96dbc..00000000
--- a/wlauto/common/gem5/LICENSE
+++ /dev/null
@@ -1,6 +0,0 @@
-The gem5 simulator can be obtained from http://repo.gem5.org/gem5/ and the
-corresponding documentation can be found at http://www.gem5.org.
-
-The source for the m5 binaries bundled with Workload Automation (found at
-wlauto/common/bin/arm64/m5 and wlauto/common/bin/armeabi/m5) can be found at
-util/m5 in the gem5 source at http://repo.gem5.org/gem5/. \ No newline at end of file
diff --git a/wlauto/common/gem5/__init__.py b/wlauto/common/gem5/__init__.py
deleted file mode 100644
index 6300329b..00000000
--- a/wlauto/common/gem5/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/common/resources.py b/wlauto/common/resources.py
deleted file mode 100644
index 5f19b068..00000000
--- a/wlauto/common/resources.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import os
-
-from wlauto.core.resource import Resource
-
-
-class FileResource(Resource):
- """
- Base class for all resources that are a regular file in the
- file system.
-
- """
-
- def delete(self, instance):
- os.remove(instance)
-
-
-class File(FileResource):
-
- name = 'file'
-
- def __init__(self, owner, path, url=None):
- super(File, self).__init__(owner)
- self.path = path
- self.url = url
-
- def __str__(self):
- return '<{}\'s {} {}>'.format(self.owner, self.name, self.path or self.url)
-
-
-class PluginAsset(File):
-
- name = 'plugin_asset'
-
- def __init__(self, owner, path):
- super(PluginAsset, self).__init__(owner, os.path.join(owner.name, path))
-
-
-class Executable(FileResource):
-
- name = 'executable'
-
- def __init__(self, owner, platform, filename):
- super(Executable, self).__init__(owner)
- self.platform = platform
- self.filename = filename
-
- def __str__(self):
- return '<{}\'s {} {}>'.format(self.owner, self.platform, self.filename)
diff --git a/wlauto/config_example.py b/wlauto/config_example.py
deleted file mode 100644
index bea7ef07..00000000
--- a/wlauto/config_example.py
+++ /dev/null
@@ -1,289 +0,0 @@
-"""
-Default config for Workload Automation. DO NOT MODIFY this file. This file
-gets copied to ~/.workload_automation/config.py on initial run of run_workloads.
-Add your configuration to that file instead.
-
-"""
-# *** WARNING: ***
-# Configuration listed in this file is NOT COMPLETE. This file sets the default
-# configuration for WA and gives EXAMPLES of other configuration available. It
-# is not supposed to be an exhaustive list.
-# PLEASE REFER TO WA DOCUMENTATION FOR THE COMPLETE LIST OF AVAILABLE
-# EXTENSIONS AND THEIR configuration.
-
-
-# This defines when the device will be rebooted during Workload Automation execution. #
-# #
-# Valid policies are: #
-# never: The device will never be rebooted. #
-# as_needed: The device will only be rebooted if the need arises (e.g. if it #
-# becomes unresponsive #
-# initial: The device will be rebooted when the execution first starts, just before executing #
-# the first workload spec. #
-# each_spec: The device will be rebooted before running a new workload spec. #
-# each_iteration: The device will be rebooted before each new iteration. #
-# #
-reboot_policy = 'as_needed'
-
-# Defines the order in which the agenda spec will be executed. At the moment, #
-# the following execution orders are supported: #
-# #
-# by_iteration: The first iteration of each workload spec is executed one ofter the other, #
-# so all workloads are executed before proceeding on to the second iteration. #
-# This is the default if no order is explicitly specified. #
-# If multiple sections were specified, this will also split them up, so that specs #
-# in the same section are further apart in the execution order. #
-# by_section: Same as "by_iteration", but runn specs from the same section one after the other #
-# by_spec: All iterations of the first spec are executed before moving on to the next #
-# spec. This may also be specified as ``"classic"``, as this was the way #
-# workloads were executed in earlier versions of WA. #
-# random: Randomisizes the order in which specs run. #
-execution_order = 'by_iteration'
-
-
-# This indicates when a job will be re-run.
-# Possible values:
-# OK: This iteration has completed and no errors have been detected
-# PARTIAL: One or more instruments have failed (the iteration may still be running).
-# FAILED: The workload itself has failed.
-# ABORTED: The user interupted the workload
-#
-# If set to an empty list, a job will not be re-run ever.
-retry_on_status = ['FAILED', 'PARTIAL']
-
-# How many times a job will be re-run before giving up
-max_retries = 3
-
-####################################################################################################
-######################################### Device Settings ##########################################
-####################################################################################################
-# Specify the device you want to run workload automation on. This must be a #
-# string with the ID of the device. At the moment, only 'TC2' is supported. #
-# #
-device = 'generic_android'
-
-# Configuration options that will be passed onto the device. These are obviously device-specific, #
-# so check the documentation for the particular device to find out which options and values are #
-# valid. The settings listed below are common to all devices #
-# #
-device_config = dict(
- # The name used by adb to identify the device. Use "adb devices" in bash to list
- # the devices currently seen by adb.
- #adb_name='10.109.173.2:5555',
-
- # The directory on the device that WA will use to push files to
- #working_directory='/sdcard/wa-working',
-
- # This specifies the device's CPU cores. The order must match how they
- # appear in cpufreq. The example below is for TC2.
- # core_names = ['a7', 'a7', 'a7', 'a15', 'a15']
-
- # Specifies cluster mapping for the device's cores.
- # core_clusters = [0, 0, 0, 1, 1]
-)
-
-
-####################################################################################################
-################################### Instrumention Configuration ####################################
-####################################################################################################
-# This defines the additionnal instrumentation that will be enabled during workload execution, #
-# which in turn determines what additional data (such as /proc/interrupts content or Streamline #
-# traces) will be available in the results directory. #
-# #
-instrumentation = [
- # Records the time it took to run the workload
- 'execution_time',
-
- # Collects /proc/interrupts before and after execution and does a diff.
- 'interrupts',
-
- # Collects the contents of/sys/devices/system/cpu before and after execution and does a diff.
- 'cpufreq',
-
- # Gets energy usage from the workload form HWMON devices
- # NOTE: the hardware needs to have the right sensors in order for this to work
- #'hwmon',
-
- # Run perf in the background during workload execution and then collect the results. perf is a
- # standard Linux performance analysis tool.
- #'perf',
-
- # Collect Streamline traces during workload execution. Streamline is part of DS-5
- #'streamline',
-
- # Collects traces by interacting with Ftrace Linux kernel internal tracer
- #'trace-cmd',
-
- # Obtains the power consumption of the target device's core measured by National Instruments
- # Data Acquisition(DAQ) device.
- #'daq',
-
- # Collects CCI counter data.
- #'cci_pmu_logger',
-
- # Collects FPS (Frames Per Second) and related metrics (such as jank) from
- # the View of the workload (Note: only a single View per workload is
- # supported at the moment, so this is mainly useful for games).
- #'fps',
-]
-
-
-####################################################################################################
-################################# Result Processors Configuration ##################################
-####################################################################################################
-# Specifies how results will be processed and presented. #
-# #
-result_processors = [
- # Creates a status.txt that provides a summary status for the run
- 'status',
-
- # Creates a results.txt file for each iteration that lists all collected metrics
- # in "name = value (units)" format
- 'standard',
-
- # Creates a results.csv that contains metrics for all iterations of all workloads
- # in the .csv format.
- 'csv',
-
- # Creates a summary.csv that contains summary metrics for all iterations of all
- # all in the .csv format. Summary metrics are defined on per-worklod basis
- # are typically things like overall scores. The contents of summary.csv are
- # always a subset of the contents of results.csv (if it is generated).
- #'summary_csv',
-
- # Creates a results.csv that contains metrics for all iterations of all workloads
- # in the JSON format
- #'json',
-
- # Write results to an sqlite3 database. By default, a new database will be
- # generated for each run, however it is possible to specify a path to an
- # existing DB file (see result processor configuration below), in which
- # case results from multiple runs may be stored in the one file.
- #'sqlite',
-]
-
-
-####################################################################################################
-################################### Logging output Configuration ###################################
-####################################################################################################
-# Specify the format of logging messages. The format uses the old formatting syntax: #
-# #
-# http://docs.python.org/2/library/stdtypes.html#string-formatting-operations #
-# #
-# The attributes that can be used in formats are listested here: #
-# #
-# http://docs.python.org/2/library/logging.html#logrecord-attributes #
-# #
-logging = {
- # Log file format
- 'file format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
- # Verbose console output format
- 'verbose format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
- # Regular console output format
- 'regular format': '%(levelname)-8s %(message)s',
- # Colouring the console output
- 'colour_enabled': True,
-}
-
-
-####################################################################################################
-#################################### Instruments Configuration #####################################
-####################################################################################################
-# Instrumention Configuration is related to specific insturment's settings. Some of the #
-# instrumentations require specific settings in order for them to work. These settings are #
-# specified here. #
-# Note that these settings only take effect if the corresponding instrument is
-# enabled above.
-
-####################################################################################################
-######################################## perf configuration ########################################
-
-# The hardware events such as instructions executed, cache-misses suffered, or branches
-# mispredicted to be reported by perf. Events can be obtained from the device by tpying
-# 'perf list'.
-#perf_events = ['migrations', 'cs']
-
-# The perf options which can be obtained from man page for perf-record
-#perf_options = '-a -i'
-
-####################################################################################################
-####################################### hwmon configuration ########################################
-
-# The kinds of sensors hwmon instrument will look for
-#hwmon_sensors = ['energy', 'temp']
-
-####################################################################################################
-###################################### trace-cmd configuration #####################################
-
-# trace-cmd events to be traced. The events can be found by rooting on the device then type
-# 'trace-cmd list -e'
-#trace_events = ['power*']
-
-####################################################################################################
-######################################### DAQ configuration ########################################
-
-# The host address of the machine that runs the daq Server which the insturment communicates with
-#daq_server_host = '10.1.17.56'
-
-# The port number for daq Server in which daq insturment communicates with
-#daq_server_port = 56788
-
-# The values of resistors 1 and 2 (in Ohms) across which the voltages are measured
-#daq_resistor_values = [0.002, 0.002]
-
-####################################################################################################
-################################### cci_pmu_logger configuration ###################################
-
-# The events to be counted by PMU
-# NOTE: The number of events must not exceed the number of counters available (which is 4 for CCI-400)
-#cci_pmu_events = ['0x63', '0x83']
-
-# The name of the events which will be used when reporting PMU counts
-#cci_pmu_event_labels = ['event_0x63', 'event_0x83']
-
-# The period (in jiffies) between counter reads
-#cci_pmu_period = 15
-
-####################################################################################################
-################################### fps configuration ##############################################
-
-# Data points below this FPS will dropped as not constituting "real" gameplay. The assumption
-# being that while actually running, the FPS in the game will not drop below X frames per second,
-# except on loading screens, menus, etc, which should not contribute to FPS calculation.
-#fps_drop_threshold=5
-
-# If set to True, this will keep the raw dumpsys output in the results directory (this is maily
-# used for debugging). Note: frames.csv with collected frames data will always be generated
-# regardless of this setting.
-#fps_keep_raw=False
-
-####################################################################################################
-################################# Result Processor Configuration ###################################
-####################################################################################################
-
-# Specifies an alternative database to store results in. If the file does not
-# exist, it will be created (the directiory of the file must exist however). If
-# the file does exist, the results will be added to the existing data set (each
-# run as a UUID, so results won't clash even if identical agendas were used).
-# Note that in order for this to work, the version of the schema used to generate
-# the DB file must match that of the schema used for the current run. Please
-# see "What's new" secition in WA docs to check if the schema has changed in
-# recent releases of WA.
-#sqlite_database = '/work/results/myresults.sqlite'
-
-# If the file specified by sqlite_database exists, setting this to True will
-# cause that file to be overwritten rather than updated -- existing results in
-# the file will be lost.
-#sqlite_overwrite = False
-
-# distribution: internal
-
-####################################################################################################
-#################################### Resource Getter configuration #################################
-####################################################################################################
-
-# The location on your system where /arm/scratch is mounted. Used by
-# Scratch resource getter.
-#scratch_mount_point = '/arm/scratch'
-
-# end distribution
diff --git a/wlauto/core/__init__.py b/wlauto/core/__init__.py
deleted file mode 100644
index cd5d64d6..00000000
--- a/wlauto/core/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/core/command.py b/wlauto/core/command.py
deleted file mode 100644
index 4b2a7d93..00000000
--- a/wlauto/core/command.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import textwrap
-
-from wlauto.core.plugin import Plugin
-from wlauto.utils.doc import format_body
-from wlauto.core.version import get_wa_version
-
-
-def init_argument_parser(parser):
- parser.add_argument('-c', '--config', action='append', default=[],
- help='specify an additional config.py')
- parser.add_argument('-v', '--verbose', action='count',
- help='The scripts will produce verbose output.')
- parser.add_argument('--version', action='version',
- version='%(prog)s {}'.format(get_wa_version()))
- return parser
-
-
-class Command(Plugin):
- """
- Defines a Workload Automation command. This will be executed from the
- command line as ``wa <command> [args ...]``. This defines the name to be
- used when invoking wa, the code that will actually be executed on
- invocation and the argument parser to be used to parse the reset of the
- command line arguments.
-
- """
- kind = "command"
- help = None
- usage = None
- description = None
- epilog = None
- formatter_class = None
-
- def __init__(self, subparsers):
- super(Command, self).__init__()
- self.group = subparsers
- parser_params = dict(help=(self.help or self.description), usage=self.usage,
- description=format_body(textwrap.dedent(self.description), 80),
- epilog=self.epilog)
- if self.formatter_class:
- parser_params['formatter_class'] = self.formatter_class
- self.parser = subparsers.add_parser(self.name, **parser_params)
- init_argument_parser(self.parser) # propagate top-level options
- self.initialize(None)
-
- def initialize(self, context):
- """
- Perform command-specific initialisation (e.g. adding command-specific
- options to the command's parser). ``context`` is always ``None``.
-
- """
- pass
-
- def execute(self, state, args):
- """
- Execute this command.
-
- :state: An initialized ``ConfigManager`` that contains the current state of
- WA exeuction up to that point (processed configuraition, loaded
- plugins, etc).
- :args: An ``argparse.Namespace`` containing command line arguments (as returned by
- ``argparse.ArgumentParser.parse_args()``. This would usually be the result of
- invoking ``self.parser``.
-
- """
- raise NotImplementedError()
diff --git a/wlauto/core/configuration/__init__.py b/wlauto/core/configuration/__init__.py
deleted file mode 100644
index a3593794..00000000
--- a/wlauto/core/configuration/__init__.py
+++ /dev/null
@@ -1,19 +0,0 @@
-# Copyright 2013-2016 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-from wlauto.core.configuration.configuration import (settings,
- RunConfiguration,
- JobGenerator,
- ConfigurationPoint)
-from wlauto.core.configuration.plugin_cache import PluginCache
diff --git a/wlauto/core/configuration/configuration.py b/wlauto/core/configuration/configuration.py
deleted file mode 100644
index 9b043a0d..00000000
--- a/wlauto/core/configuration/configuration.py
+++ /dev/null
@@ -1,1036 +0,0 @@
-# Copyright 2014-2016 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-import re
-from copy import copy
-from collections import OrderedDict, defaultdict
-
-from wlauto.exceptions import ConfigError, NotFoundError
-from wlauto.utils.misc import (get_article, merge_config_values)
-from wlauto.utils.types import (identifier, integer, boolean,
- list_of_strings, toggle_set,
- obj_dict)
-from wlauto.core.configuration.tree import SectionNode
-from wlauto.utils.serializer import is_pod
-
-# Mapping for kind conversion; see docs for convert_types below
-KIND_MAP = {
- int: integer,
- bool: boolean,
- dict: OrderedDict,
-}
-
-ITERATION_STATUS = [
- 'NOT_STARTED',
- 'RUNNING',
-
- 'OK',
- 'NONCRITICAL',
- 'PARTIAL',
- 'FAILED',
- 'ABORTED',
- 'SKIPPED',
-]
-
-##########################
-### CONFIG POINT TYPES ###
-##########################
-
-
-class RebootPolicy(object):
- """
- Represents the reboot policy for the execution -- at what points the device
- should be rebooted. This, in turn, is controlled by the policy value that is
- passed in on construction and would typically be read from the user's settings.
- Valid policy values are:
-
- :never: The device will never be rebooted.
- :as_needed: Only reboot the device if it becomes unresponsive, or needs to be flashed, etc.
- :initial: The device will be rebooted when the execution first starts, just before
- executing the first workload spec.
- :each_spec: The device will be rebooted before running a new workload spec.
- :each_iteration: The device will be rebooted before each new iteration.
-
- """
-
- valid_policies = ['never', 'as_needed', 'initial', 'each_spec', 'each_iteration']
-
- def __init__(self, policy):
- policy = policy.strip().lower().replace(' ', '_')
- if policy not in self.valid_policies:
- message = 'Invalid reboot policy {}; must be one of {}'.format(policy, ', '.join(self.valid_policies))
- raise ConfigError(message)
- self.policy = policy
-
- @property
- def can_reboot(self):
- return self.policy != 'never'
-
- @property
- def perform_initial_boot(self):
- return self.policy not in ['never', 'as_needed']
-
- @property
- def reboot_on_each_spec(self):
- return self.policy in ['each_spec', 'each_iteration']
-
- @property
- def reboot_on_each_iteration(self):
- return self.policy == 'each_iteration'
-
- def __str__(self):
- return self.policy
-
- __repr__ = __str__
-
- def __cmp__(self, other):
- if isinstance(other, RebootPolicy):
- return cmp(self.policy, other.policy)
- else:
- return cmp(self.policy, other)
-
- def to_pod(self):
- return self.policy
-
- @staticmethod
- def from_pod(pod):
- return RebootPolicy(pod)
-
-
-class status_list(list):
-
- def append(self, item):
- list.append(self, str(item).upper())
-
-
-class LoggingConfig(dict):
-
- defaults = {
- 'file_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
- 'verbose_format': '%(asctime)s %(levelname)-8s %(name)s: %(message)s',
- 'regular_format': '%(levelname)-8s %(message)s',
- 'color': True,
- }
-
- def __init__(self, config=None):
- dict.__init__(self)
- if isinstance(config, dict):
- config = {identifier(k.lower()): v for k, v in config.iteritems()}
- self['regular_format'] = config.pop('regular_format', self.defaults['regular_format'])
- self['verbose_format'] = config.pop('verbose_format', self.defaults['verbose_format'])
- self['file_format'] = config.pop('file_format', self.defaults['file_format'])
- self['color'] = config.pop('colour_enabled', self.defaults['color']) # legacy
- self['color'] = config.pop('color', self.defaults['color'])
- if config:
- message = 'Unexpected logging configuation parameters: {}'
- raise ValueError(message.format(bad_vals=', '.join(config.keys())))
- elif config is None:
- for k, v in self.defaults.iteritems():
- self[k] = v
- else:
- raise ValueError(config)
-
-
-def get_type_name(kind):
- typename = str(kind)
- if '\'' in typename:
- typename = typename.split('\'')[1]
- elif typename.startswith('<function'):
- typename = typename.split()[1]
- return typename
-
-
-class ConfigurationPoint(object):
- """
- This defines a generic configuration point for workload automation. This is
- used to handle global settings, plugin parameters, etc.
-
- """
-
- def __init__(self, name,
- kind=None,
- mandatory=None,
- default=None,
- override=False,
- allowed_values=None,
- description=None,
- constraint=None,
- merge=False,
- aliases=None,
- global_alias=None):
- """
- Create a new Parameter object.
-
- :param name: The name of the parameter. This will become an instance
- member of the plugin object to which the parameter is
- applied, so it must be a valid python identifier. This
- is the only mandatory parameter.
- :param kind: The type of parameter this is. This must be a callable
- that takes an arbitrary object and converts it to the
- expected type, or raised ``ValueError`` if such conversion
- is not possible. Most Python standard types -- ``str``,
- ``int``, ``bool``, etc. -- can be used here. This
- defaults to ``str`` if not specified.
- :param mandatory: If set to ``True``, then a non-``None`` value for
- this parameter *must* be provided on plugin
- object construction, otherwise ``ConfigError``
- will be raised.
- :param default: The default value for this parameter. If no value
- is specified on plugin construction, this value
- will be used instead. (Note: if this is specified
- and is not ``None``, then ``mandatory`` parameter
- will be ignored).
- :param override: A ``bool`` that specifies whether a parameter of
- the same name further up the hierarchy should
- be overridden. If this is ``False`` (the
- default), an exception will be raised by the
- ``AttributeCollection`` instead.
- :param allowed_values: This should be the complete list of allowed
- values for this parameter. Note: ``None``
- value will always be allowed, even if it is
- not in this list. If you want to disallow
- ``None``, set ``mandatory`` to ``True``.
- :param constraint: If specified, this must be a callable that takes
- the parameter value as an argument and return a
- boolean indicating whether the constraint has been
- satisfied. Alternatively, can be a two-tuple with
- said callable as the first element and a string
- describing the constraint as the second.
- :param merge: The default behaviour when setting a value on an object
- that already has that attribute is to overrided with
- the new value. If this is set to ``True`` then the two
- values will be merged instead. The rules by which the
- values are merged will be determined by the types of
- the existing and new values -- see
- ``merge_config_values`` documentation for details.
- :param aliases: Alternative names for the same configuration point.
- These are largely for backwards compatibility.
- :param global_alias: An alias for this parameter that can be specified at
- the global level. A global_alias can map onto many
- ConfigurationPoints.
- """
- self.name = identifier(name)
- if kind in KIND_MAP:
- kind = KIND_MAP[kind]
- if kind is not None and not callable(kind):
- raise ValueError('Kind must be callable.')
- self.kind = kind
- self.mandatory = mandatory
- if not is_pod(default):
- msg = "The default for '{}' must be a Plain Old Data type, but it is of type '{}' instead."
- raise TypeError(msg.format(self.name, type(default)))
- self.default = default
- self.override = override
- self.allowed_values = allowed_values
- self.description = description
- if self.kind is None and not self.override:
- self.kind = str
- if constraint is not None and not callable(constraint) and not isinstance(constraint, tuple):
- raise ValueError('Constraint must be callable or a (callable, str) tuple.')
- self.constraint = constraint
- self.merge = merge
- self.aliases = aliases or []
- self.global_alias = global_alias
-
- if self.default is not None:
- try:
- self.validate_value("init", self.default)
- except ConfigError:
- raise ValueError('Default value "{}" is not valid'.format(self.default))
-
- def match(self, name):
- if name == self.name or name in self.aliases:
- return True
- elif name == self.global_alias:
- return True
- return False
-
- def set_value(self, obj, value=None, check_mandatory=True):
- if value is None:
- if self.default is not None:
- value = self.default
- elif check_mandatory and self.mandatory:
- msg = 'No values specified for mandatory parameter "{}" in {}'
- raise ConfigError(msg.format(self.name, obj.name))
- else:
- try:
- value = self.kind(value)
- except (ValueError, TypeError):
- typename = get_type_name(self.kind)
- msg = 'Bad value "{}" for {}; must be {} {}'
- article = get_article(typename)
- raise ConfigError(msg.format(value, self.name, article, typename))
- if value is not None:
- self.validate_value(obj.name, value)
- if self.merge and hasattr(obj, self.name):
- value = merge_config_values(getattr(obj, self.name), value)
- setattr(obj, self.name, value)
-
- def validate(self, obj):
- value = getattr(obj, self.name, None)
- if value is not None:
- self.validate_value(obj.name, value)
- else:
- if self.mandatory:
- msg = 'No value specified for mandatory parameter "{}" in {}.'
- raise ConfigError(msg.format(self.name, obj.name))
-
- def validate_value(self, name, value):
- if self.allowed_values:
- self.validate_allowed_values(name, value)
- if self.constraint:
- self.validate_constraint(name, value)
-
- def validate_allowed_values(self, name, value):
- if 'list' in str(self.kind):
- for v in value:
- if v not in self.allowed_values:
- msg = 'Invalid value {} for {} in {}; must be in {}'
- raise ConfigError(msg.format(v, self.name, name, self.allowed_values))
- else:
- if value not in self.allowed_values:
- msg = 'Invalid value {} for {} in {}; must be in {}'
- raise ConfigError(msg.format(value, self.name, name, self.allowed_values))
-
- def validate_constraint(self, name, value):
- msg_vals = {'value': value, 'param': self.name, 'plugin': name}
- if isinstance(self.constraint, tuple) and len(self.constraint) == 2:
- constraint, msg = self.constraint # pylint: disable=unpacking-non-sequence
- elif callable(self.constraint):
- constraint = self.constraint
- msg = '"{value}" failed constraint validation for "{param}" in "{plugin}".'
- else:
- raise ValueError('Invalid constraint for "{}": must be callable or a 2-tuple'.format(self.name))
- if not constraint(value):
- raise ConfigError(value, msg.format(**msg_vals))
-
- def __repr__(self):
- d = copy(self.__dict__)
- del d['description']
- return 'ConfigurationPoint({})'.format(d)
-
- __str__ = __repr__
-
-
-class RuntimeParameter(object):
-
- def __init__(self, name,
- kind=None,
- description=None,
- merge=False):
-
- self.name = re.compile(name)
- if kind is not None:
- if kind in KIND_MAP:
- kind = KIND_MAP[kind]
- if not callable(kind):
- raise ValueError('Kind must be callable.')
- else:
- kind = str
- self.kind = kind
- self.description = description
- self.merge = merge
-
- def validate_kind(self, value, name):
- try:
- value = self.kind(value)
- except (ValueError, TypeError):
- typename = get_type_name(self.kind)
- msg = 'Bad value "{}" for {}; must be {} {}'
- article = get_article(typename)
- raise ConfigError(msg.format(value, name, article, typename))
-
- def match(self, name):
- if self.name.match(name):
- return True
- return False
-
- def update_value(self, name, new_value, source, dest):
- self.validate_kind(new_value, name)
-
- if name in dest:
- old_value, sources = dest[name]
- else:
- old_value = None
- sources = {}
- sources[source] = new_value
-
- if self.merge:
- new_value = merge_config_values(old_value, new_value)
-
- dest[name] = (new_value, sources)
-
-
-class RuntimeParameterManager(object):
-
- runtime_parameters = []
-
- def __init__(self, target_manager):
- self.state = {}
- self.target_manager = target_manager
-
- def get_initial_state(self):
- """
- Should be used to load the starting state from the device. This state
- should be updated if any changes are made to the device, and they are successful.
- """
- pass
-
- def match(self, name):
- for rtp in self.runtime_parameters:
- if rtp.match(name):
- return True
- return False
-
- def update_value(self, name, value, source, dest):
- for rtp in self.runtime_parameters:
- if rtp.match(name):
- rtp.update_value(name, value, source, dest)
- break
- else:
- msg = 'Unknown runtime parameter "{}"'
- raise ConfigError(msg.format(name))
-
- def static_validation(self, params):
- """
- Validate values that do not require a active device connection.
- This method should also pop all runtime parameters meant for this manager
- from params, even if they are not beign statically validated.
- """
- pass
-
- def dynamic_validation(self, params):
- """
- Validate values that require an active device connection
- """
- pass
-
- def commit(self):
- """
- All values have been validated, this will now actually set values
- """
- pass
-
-################################
-### RuntimeParameterManagers ###
-################################
-
-
-class CpuFreqParameters(object):
-
- runtime_parameters = {
- "cores": RuntimeParameter("(.+)_cores"),
- "min_frequency": RuntimeParameter("(.+)_min_frequency", kind=int),
- "max_frequency": RuntimeParameter("(.+)_max_frequency", kind=int),
- "frequency": RuntimeParameter("(.+)_frequency", kind=int),
- "governor": RuntimeParameter("(.+)_governor"),
- "governor_tunables": RuntimeParameter("(.+)_governor_tunables"),
- }
-
- def __init__(self, target):
- super(CpuFreqParameters, self).__init__(target)
- self.core_names = set(target.core_names)
-
- def match(self, name):
- for param in self.runtime_parameters.itervalues():
- if param.match(name):
- return True
- return False
-
- def update_value(self, name, value, source):
- for param in self.runtime_parameters.iteritems():
- core_name_match = param.name.match(name)
- if not core_name_match:
- continue
-
- core_name = core_name_match.groups()[0]
- if core_name not in self.core_names:
- msg = '"{}" in {} is not a valid core name, must be in: {}'
- raise ConfigError(msg.format(core_name, name, ", ".join(self.core_names)))
-
- param.update_value(name, value, source)
- break
- else:
- RuntimeError('"{}" does not belong to CpuFreqParameters'.format(name))
-
- def _get_merged_value(self, core, param_name):
- return self.runtime_parameters[param_name].merged_values["{}_{}".format(core, param_name)]
-
- def _cross_validate(self, core):
- min_freq = self._get_merged_value(core, "min_frequency")
- max_frequency = self._get_merged_value(core, "max_frequency")
- if max_frequency < min_freq:
- msg = "{core}_max_frequency must be larger than {core}_min_frequency"
- raise ConfigError(msg.format(core=core))
- frequency = self._get_merged_value(core, "frequency")
- if not min_freq < frequency < max_frequency:
- msg = "{core}_frequency must be between {core}_min_frequency and {core}_max_frequency"
- raise ConfigError(msg.format(core=core))
- #TODO: more checks
-
- def commit_to_device(self, target):
- pass
- # TODO: Write values to device is correct order ect
-
-#####################
-### Configuration ###
-#####################
-
-
-def _to_pod(cfg_point, value):
- if is_pod(value):
- return value
- if hasattr(cfg_point.kind, 'to_pod'):
- return value.to_pod()
- msg = '{} value "{}" is not serializable'
- raise ValueError(msg.format(cfg_point.name, value))
-
-
-class Configuration(object):
-
- config_points = []
- name = ''
-
- # The below line must be added to all subclasses
- configuration = {cp.name: cp for cp in config_points}
-
- @classmethod
- def from_pod(cls, pod):
- instance = cls()
- for cfg_point in cls.config_points:
- if name in pod:
- value = pod.pop(name)
- if hasattr(cfg_point.kind, 'from_pod'):
- value = cfg_point.kind.from_pod(value)
- cfg_point.set_value(instance, value)
- if pod:
- msg = 'Invalid entry(ies) for "{}": "{}"'
- raise ValueError(msg.format(cls.name, '", "'.join(pod.keys())))
- return instance
-
- def __init__(self):
- for confpoint in self.config_points:
- confpoint.set_value(self, check_mandatory=False)
-
- def set(self, name, value, check_mandatory=True):
- if name not in self.configuration:
- raise ConfigError('Unknown {} configuration "{}"'.format(self.name,
- name))
- self.configuration[name].set_value(self, value,
- check_mandatory=check_mandatory)
-
- def update_config(self, values, check_mandatory=True):
- for k, v in values.iteritems():
- self.set(k, v, check_mandatory=check_mandatory)
-
- def validate(self):
- for cfg_point in self.config_points:
- cfg_point.validate(self)
-
- def to_pod(self):
- pod = {}
- for cfg_point in self.config_points:
- value = getattr(self, cfg_point.name, None)
- pod[cfg_point.name] = _to_pod(cfg_point, value)
- return pod
-
-
-# This configuration for the core WA framework
-class MetaConfiguration(Configuration):
-
- name = "Meta Configuration"
-
- plugin_packages = [
- 'wlauto.commands',
- 'wlauto.workloads',
- 'wlauto.instrumentation',
- 'wlauto.result_processors',
- 'wlauto.managers',
- 'wlauto.resource_getters',
- ]
-
- config_points = [
- ConfigurationPoint(
- 'user_directory',
- description="""
- Path to the user directory. This is the location WA will look for
- user configuration, additional plugins and plugin dependencies.
- """,
- kind=str,
- default=os.path.join(os.path.expanduser('~'), '.workload_automation'),
- ),
- ConfigurationPoint(
- 'assets_repository',
- description="""
- The local mount point for the filer hosting WA assets.
- """,
- ),
- ConfigurationPoint(
- 'logging',
- kind=LoggingConfig,
- default=LoggingConfig.defaults,
- description="""
- WA logging configuration. This should be a dict with a subset
- of the following keys::
-
- :normal_format: Logging format used for console output
- :verbose_format: Logging format used for verbose console output
- :file_format: Logging format used for run.log
- :color: If ``True`` (the default), console logging output will
- contain bash color escape codes. Set this to ``False`` if
- console output will be piped somewhere that does not know
- how to handle those.
- """,
- ),
- ConfigurationPoint(
- 'verbosity',
- kind=int,
- default=0,
- description="""
- Verbosity of console output.
- """,
- ),
- ConfigurationPoint( # TODO: Needs some format for dates etc/ comes from cfg
- 'default_output_directory',
- default="wa_output",
- description="""
- The default output directory that will be created if not
- specified when invoking a run.
- """,
- ),
- ]
- configuration = {cp.name: cp for cp in config_points}
-
- @property
- def dependencies_directory(self):
- return os.path.join(self.user_directory, 'dependencies')
-
- @property
- def plugins_directory(self):
- return os.path.join(self.user_directory, 'plugins')
-
- @property
- def user_config_file(self):
- return os.path.join(self.user_directory, 'config.yaml')
-
- def __init__(self, environ):
- super(MetaConfiguration, self).__init__()
- user_directory = environ.pop('WA_USER_DIRECTORY', '')
- if user_directory:
- self.set('user_directory', user_directory)
-
-
-# This is generic top-level configuration for WA runs.
-class RunConfiguration(Configuration):
-
- name = "Run Configuration"
-
- # Metadata is separated out because it is not loaded into the auto generated config file
- meta_data = [
- ConfigurationPoint('run_name', kind=str,
- description='''
- A string that labels the WA run that is being performed. This would typically
- be set in the ``config`` section of an agenda (see
- :ref:`configuration in an agenda <configuration_in_agenda>`) rather than in the config file.
-
- .. _old-style format strings: http://docs.python.org/2/library/stdtypes.html#string-formatting-operations
- .. _log record attributes: http://docs.python.org/2/library/logging.html#logrecord-attributes
- '''),
- ConfigurationPoint('project', kind=str,
- description='''
- A string naming the project for which data is being collected. This may be
- useful, e.g. when uploading data to a shared database that is populated from
- multiple projects.
- '''),
- ConfigurationPoint('project_stage', kind=dict,
- description='''
- A dict or a string that allows adding additional identifier. This is may be
- useful for long-running projects.
- '''),
- ]
- config_points = [
- ConfigurationPoint('execution_order', kind=str, default='by_iteration',
- allowed_values=['by_iteration', 'by_spec', 'by_section', 'random'],
- description='''
- Defines the order in which the agenda spec will be executed. At the moment,
- the following execution orders are supported:
-
- ``"by_iteration"``
- The first iteration of each workload spec is executed one after the other,
- so all workloads are executed before proceeding on to the second iteration.
- E.g. A1 B1 C1 A2 C2 A3. This is the default if no order is explicitly specified.
-
- In case of multiple sections, this will spread them out, such that specs
- from the same section are further part. E.g. given sections X and Y, global
- specs A and B, and two iterations, this will run ::
-
- X.A1, Y.A1, X.B1, Y.B1, X.A2, Y.A2, X.B2, Y.B2
-
- ``"by_section"``
- Same as ``"by_iteration"``, however this will group specs from the same
- section together, so given sections X and Y, global specs A and B, and two iterations,
- this will run ::
-
- X.A1, X.B1, Y.A1, Y.B1, X.A2, X.B2, Y.A2, Y.B2
-
- ``"by_spec"``
- All iterations of the first spec are executed before moving on to the next
- spec. E.g. A1 A2 A3 B1 C1 C2 This may also be specified as ``"classic"``,
- as this was the way workloads were executed in earlier versions of WA.
-
- ``"random"``
- Execution order is entirely random.
- '''),
- ConfigurationPoint('reboot_policy', kind=RebootPolicy, default='as_needed',
- allowed_values=RebootPolicy.valid_policies,
- description='''
- This defines when during execution of a run the Device will be rebooted. The
- possible values are:
-
- ``"never"``
- The device will never be rebooted.
- ``"initial"``
- The device will be rebooted when the execution first starts, just before
- executing the first workload spec.
- ``"each_spec"``
- The device will be rebooted before running a new workload spec.
- Note: this acts the same as each_iteration when execution order is set to by_iteration
- ``"each_iteration"``
- The device will be rebooted before each new iteration.
- '''),
- ConfigurationPoint('device', kind=str, mandatory=True,
- description='''
- This setting defines what specific Device subclass will be used to interact
- the connected device. Obviously, this must match your setup.
- '''),
- ConfigurationPoint('retry_on_status', kind=status_list,
- default=['FAILED', 'PARTIAL'],
- allowed_values=ITERATION_STATUS,
- description='''
- This is list of statuses on which a job will be cosidered to have failed and
- will be automatically retried up to ``max_retries`` times. This defaults to
- ``["FAILED", "PARTIAL"]`` if not set. Possible values are:
-
- ``"OK"``
- This iteration has completed and no errors have been detected
-
- ``"PARTIAL"``
- One or more instruments have failed (the iteration may still be running).
-
- ``"FAILED"``
- The workload itself has failed.
-
- ``"ABORTED"``
- The user interupted the workload
- '''),
- ConfigurationPoint('max_retries', kind=int, default=3,
- description='''
- The maximum number of times failed jobs will be retried before giving up. If
- not set, this will default to ``3``.
-
- .. note:: this number does not include the original attempt
- '''),
- ]
- configuration = {cp.name: cp for cp in config_points + meta_data}
-
- def __init__(self):
- super(RunConfiguration, self).__init__()
- for confpoint in self.meta_data:
- confpoint.set_value(self, check_mandatory=False)
- self.device_config = None
-
- def merge_device_config(self, plugin_cache):
- """
- Merges global device config and validates that it is correct for the
- selected device.
- """
- # pylint: disable=no-member
- if self.device is None:
- msg = 'Attemting to merge device config with unspecified device'
- raise RuntimeError(msg)
- self.device_config = plugin_cache.get_plugin_config(self.device,
- generic_name="device_config")
-
- def to_pod(self):
- pod = super(RunConfiguration, self).to_pod()
- pod['device_config'] = dict(self.device_config or {})
- return pod
-
- @classmethod
- def from_pod(cls, pod):
- meta_pod = {}
- for cfg_point in cls.meta_data:
- meta_pod[cfg_point.name] = pod.pop(cfg_point.name, None)
-
- instance = super(RunConfiguration, cls).from_pod(pod)
- for cfg_point in cls.meta_data:
- cfg_point.set_value(instance, meta_pod[cfg_point.name])
-
- return instance
-
-
-class JobSpec(Configuration):
-
- name = "Job Spec"
-
- config_points = [
- ConfigurationPoint('iterations', kind=int, default=1,
- description='''
- How many times to repeat this workload spec
- '''),
- ConfigurationPoint('workload_name', kind=str, mandatory=True,
- aliases=["name"],
- description='''
- The name of the workload to run.
- '''),
- ConfigurationPoint('workload_parameters', kind=obj_dict,
- aliases=["params", "workload_params"],
- description='''
- Parameter to be passed to the workload
- '''),
- ConfigurationPoint('runtime_parameters', kind=obj_dict,
- aliases=["runtime_params"],
- description='''
- Runtime parameters to be set prior to running
- the workload.
- '''),
- ConfigurationPoint('boot_parameters', kind=obj_dict,
- aliases=["boot_params"],
- description='''
- Parameters to be used when rebooting the target
- prior to running the workload.
- '''),
- ConfigurationPoint('label', kind=str,
- description='''
- Similar to IDs but do not have the uniqueness restriction.
- If specified, labels will be used by some result
- processes instead of (or in addition to) the workload
- name. For example, the csv result processor will put
- the label in the "workload" column of the CSV file.
- '''),
- ConfigurationPoint('instrumentation', kind=toggle_set, merge=True,
- aliases=["instruments"],
- description='''
- The instruments to enable (or disabled using a ~)
- during this workload spec.
- '''),
- ConfigurationPoint('flash', kind=dict, merge=True,
- description='''
-
- '''),
- ConfigurationPoint('classifiers', kind=dict, merge=True,
- description='''
- Classifiers allow you to tag metrics from this workload
- spec to help in post processing them. Theses are often
- used to help identify what runtime_parameters were used
- for results when post processing.
- '''),
- ]
- configuration = {cp.name: cp for cp in config_points}
-
- @classmethod
- def from_pod(cls, pod):
- job_id = pod.pop('id')
- instance = super(JobSpec, cls).from_pod(pod)
- instance['id'] = job_id
- return instance
-
- @property
- def section_id(self):
- if self.id is not None:
- self.id.rsplit('-', 1)[0]
-
- @property
- def workload_id(self):
- if self.id is not None:
- self.id.rsplit('-', 1)[-1]
-
- def __init__(self):
- super(JobSpec, self).__init__()
- self.to_merge = defaultdict(OrderedDict)
- self._sources = []
- self.id = None
-
- def to_pod(self):
- pod = super(JobSpec, self).to_pod()
- pod['id'] = self.id
- return pod
-
- def update_config(self, source, check_mandatory=True):
- self._sources.append(source)
- values = source.config
- for k, v in values.iteritems():
- if k == "id":
- continue
- elif k.endswith('_parameters'):
- if v:
- self.to_merge[k][source] = copy(v)
- else:
- try:
- self.set(k, v, check_mandatory=check_mandatory)
- except ConfigError as e:
- msg = 'Error in {}:\n\t{}'
- raise ConfigError(msg.format(source.name, e.message))
-
- def merge_workload_parameters(self, plugin_cache):
- # merge global generic and specific config
- workload_params = plugin_cache.get_plugin_config(self.workload_name,
- generic_name="workload_parameters")
-
- cfg_points = plugin_cache.get_plugin_parameters(self.workload_name)
- for source in self._sources:
- config = self.to_merge["workload_parameters"].get(source)
- if config is None:
- continue
-
- for name, cfg_point in cfg_points.iteritems():
- if name in config:
- value = config.pop(name)
- cfg_point.set_value(workload_params, value,
- check_mandatory=False)
- if config:
- msg = 'conflicting entry(ies) for "{}" in {}: "{}"'
- msg = msg.format(self.workload_name, source.name,
- '", "'.join(workload_params[source]))
-
- self.workload_parameters = workload_params
-
- def merge_runtime_parameters(self, plugin_cache, target_manager):
-
- # Order global runtime parameters
- runtime_parameters = OrderedDict()
- try:
- global_runtime_params = plugin_cache.get_plugin_config("runtime_parameters")
- except NotFoundError:
- global_runtime_params = {}
- for source in plugin_cache.sources:
- runtime_parameters[source] = global_runtime_params[source]
-
- # Add runtime parameters from JobSpec
- for source, values in self.to_merge['runtime_parameters'].iteritems():
- runtime_parameters[source] = values
-
- # Merge
- self.runtime_parameters = target_manager.merge_runtime_parameters(runtime_parameters)
-
- def finalize(self):
- self.id = "-".join([source.config['id'] for source in self._sources[1:]]) # ignore first id, "global"
-
-
-# This is used to construct the list of Jobs WA will run
-class JobGenerator(object):
-
- name = "Jobs Configuration"
-
- @property
- def enabled_instruments(self):
- self._read_enabled_instruments = True
- return self._enabled_instruments
-
- def __init__(self, plugin_cache):
- self.plugin_cache = plugin_cache
- self.ids_to_run = []
- self.sections = []
- self.workloads = []
- self._enabled_instruments = set()
- self._read_enabled_instruments = False
- self.disabled_instruments = []
-
- self.job_spec_template = obj_dict(not_in_dict=['name'])
- self.job_spec_template.name = "globally specified job spec configuration"
- self.job_spec_template.id = "global"
- # Load defaults
- for cfg_point in JobSpec.configuration.itervalues():
- cfg_point.set_value(self.job_spec_template, check_mandatory=False)
-
- self.root_node = SectionNode(self.job_spec_template)
-
- def set_global_value(self, name, value):
- JobSpec.configuration[name].set_value(self.job_spec_template, value,
- check_mandatory=False)
- if name == "instrumentation":
- self.update_enabled_instruments(value)
-
- def add_section(self, section, workloads):
- new_node = self.root_node.add_section(section)
- for workload in workloads:
- new_node.add_workload(workload)
-
- def add_workload(self, workload):
- self.root_node.add_workload(workload)
-
- def disable_instruments(self, instruments):
- #TODO: Validate
- self.disabled_instruments = ["~{}".format(i) for i in instruments]
-
- def update_enabled_instruments(self, value):
- if self._read_enabled_instruments:
- msg = "'enabled_instruments' cannot be updated after it has been accessed"
- raise RuntimeError(msg)
- self._enabled_instruments.update(value)
-
- def only_run_ids(self, ids):
- if isinstance(ids, str):
- ids = [ids]
- self.ids_to_run = ids
-
- def generate_job_specs(self, target_manager):
- specs = []
- for leaf in self.root_node.leaves():
- workload_entries = leaf.workload_entries
- sections = [leaf]
- for ancestor in leaf.ancestors():
- workload_entries = ancestor.workload_entries + workload_entries
- sections.insert(0, ancestor)
-
- for workload_entry in workload_entries:
- job_spec = create_job_spec(workload_entry, sections,
- target_manager, self.plugin_cache,
- self.disabled_instruments)
- if self.ids_to_run:
- for job_id in self.ids_to_run:
- if job_id in job_spec.id:
- break
- else:
- continue
- self.update_enabled_instruments(job_spec.instrumentation.values())
- specs.append(job_spec)
- return specs
-
-
-def create_job_spec(workload_entry, sections, target_manager, plugin_cache,
- disabled_instruments):
- job_spec = JobSpec()
-
- # PHASE 2.1: Merge general job spec configuration
- for section in sections:
- job_spec.update_config(section, check_mandatory=False)
- job_spec.update_config(workload_entry, check_mandatory=False)
-
- # PHASE 2.2: Merge global, section and workload entry "workload_parameters"
- job_spec.merge_workload_parameters(plugin_cache)
-
- # TODO: PHASE 2.3: Validate device runtime/boot paramerers
- job_spec.merge_runtime_parameters(plugin_cache, target_manager)
- target_manager.validate_runtime_parameters(job_spec.runtime_parameters)
-
- # PHASE 2.4: Disable globally disabled instrumentation
- job_spec.set("instrumentation", disabled_instruments)
- job_spec.finalize()
-
- return job_spec
-
-
-settings = MetaConfiguration(os.environ)
diff --git a/wlauto/core/configuration/default.py b/wlauto/core/configuration/default.py
deleted file mode 100644
index 5145a6b4..00000000
--- a/wlauto/core/configuration/default.py
+++ /dev/null
@@ -1,42 +0,0 @@
-from wlauto.core.configuration.configuration import MetaConfiguration, RunConfiguration
-from wlauto.core.configuration.plugin_cache import PluginCache
-from wlauto.utils.serializer import yaml
-from wlauto.utils.doc import strip_inlined_text
-
-DEFAULT_INSTRUMENTS = ['execution_time',
- 'interrupts',
- 'cpufreq',
- 'status',
- 'standard',
- 'csv']
-
-
-def _format_yaml_comment(param, short_description=False):
- comment = param.description
- comment = strip_inlined_text(comment)
- if short_description:
- comment = comment.split('\n\n')[0]
- comment = comment.replace('\n', '\n# ')
- comment = "# {}\n".format(comment)
- return comment
-
-
-def _format_instruments(output):
- plugin_cache = PluginCache()
- output.write("instrumentation:\n")
- for plugin in DEFAULT_INSTRUMENTS:
- plugin_cls = plugin_cache.loader.get_plugin_class(plugin)
- output.writelines(_format_yaml_comment(plugin_cls, short_description=True))
- output.write(" - {}\n".format(plugin))
- output.write("\n")
-
-
-def generate_default_config(path):
- with open(path, 'w') as output:
- for param in MetaConfiguration.config_points + RunConfiguration.config_points:
- entry = {param.name: param.default}
- comment = _format_yaml_comment(param)
- output.writelines(comment)
- yaml.dump(entry, output, default_flow_style=False)
- output.write("\n")
- _format_instruments(output)
diff --git a/wlauto/core/configuration/manager.py b/wlauto/core/configuration/manager.py
deleted file mode 100644
index b8bacd38..00000000
--- a/wlauto/core/configuration/manager.py
+++ /dev/null
@@ -1,213 +0,0 @@
-import random
-from itertools import izip_longest, groupby, chain
-
-from wlauto.core import pluginloader
-from wlauto.core.configuration.configuration import (MetaConfiguration,
- RunConfiguration,
- JobGenerator, settings)
-from wlauto.core.configuration.parsers import ConfigParser
-from wlauto.core.configuration.plugin_cache import PluginCache
-
-
-class CombinedConfig(object):
-
- @staticmethod
- def from_pod(pod):
- instance = CombinedConfig()
- instance.settings = MetaConfiguration.from_pod(pod.get('settings', {}))
- instance.run_config = RunConfiguration.from_pod(pod.get('run_config', {}))
- return instance
-
- def __init__(self, settings=None, run_config=None):
- self.settings = settings
- self.run_config = run_config
-
- def to_pod(self):
- return {'settings': self.settings.to_pod(),
- 'run_config': self.run_config.to_pod()}
-
-
-class Job(object):
-
- def __init__(self, spec, iteration, context):
- self.spec = spec
- self.iteration = iteration
- self.context = context
- self.status = 'new'
- self.workload = None
- self.output = None
-
- def load(self, target, loader=pluginloader):
- self.workload = loader.get_workload(self.spec.workload_name,
- target,
- **self.spec.workload_parameters)
- self.workload.init_resources(self.context)
- self.workload.validate()
-
-
-class ConfigManager(object):
- """
- Represents run-time state of WA. Mostly used as a container for loaded
- configuration and discovered plugins.
-
- This exists outside of any command or run and is associated with the running
- instance of wA itself.
- """
-
- @property
- def enabled_instruments(self):
- return self.jobs_config.enabled_instruments
-
- @property
- def job_specs(self):
- if not self._jobs_generated:
- msg = 'Attempting to access job specs before '\
- 'jobs have been generated'
- raise RuntimeError(msg)
- return [j.spec for j in self._jobs]
-
- @property
- def jobs(self):
- if not self._jobs_generated:
- msg = 'Attempting to access jobs before '\
- 'they have been generated'
- raise RuntimeError(msg)
- return self._jobs
-
- def __init__(self, settings=settings):
- self.settings = settings
- self.run_config = RunConfiguration()
- self.plugin_cache = PluginCache()
- self.jobs_config = JobGenerator(self.plugin_cache)
- self.loaded_config_sources = []
- self._config_parser = ConfigParser()
- self._jobs = []
- self._jobs_generated = False
- self.agenda = None
-
- def load_config_file(self, filepath):
- self._config_parser.load_from_path(self, filepath)
- self.loaded_config_sources.append(filepath)
-
- def load_config(self, values, source, wrap_exceptions=True):
- self._config_parser.load(self, values, source)
- self.loaded_config_sources.append(source)
-
- def get_plugin(self, name=None, kind=None, *args, **kwargs):
- return self.plugin_cache.get_plugin(name, kind, *args, **kwargs)
-
- def get_instruments(self, target):
- instruments = []
- for name in self.enabled_instruments:
- instruments.append(self.get_plugin(name, kind='instrument',
- target=target))
- return instruments
-
- def finalize(self):
- if not self.agenda:
- msg = 'Attempting to finalize config before agenda has been set'
- raise RuntimeError(msg)
- self.run_config.merge_device_config(self.plugin_cache)
- return CombinedConfig(self.settings, self.run_config)
-
- def generate_jobs(self, context):
- job_specs = self.jobs_config.generate_job_specs(context.tm)
- exec_order = self.run_config.execution_order
- for spec, i in permute_iterations(job_specs, exec_order):
- job = Job(spec, i, context)
- job.load(context.tm.target)
- self._jobs.append(job)
- self._jobs_generated = True
-
-
-def permute_by_job(specs):
- """
- This is that "classic" implementation that executes all iterations of a
- workload spec before proceeding onto the next spec.
-
- """
- for spec in specs:
- for i in range(1, spec.iterations + 1):
- yield (spec, i)
-
-
-def permute_by_iteration(specs):
- """
- Runs the first iteration for all benchmarks first, before proceeding to the
- next iteration, i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2,
- C1, C2...
-
- If multiple sections where specified in the agenda, this will run all
- sections for the first global spec first, followed by all sections for the
- second spec, etc.
-
- e.g. given sections X and Y, and global specs A and B, with 2 iterations,
- this will run
-
- X.A1, Y.A1, X.B1, Y.B1, X.A2, Y.A2, X.B2, Y.B2
-
- """
- groups = [list(g) for k, g in groupby(specs, lambda s: s.workload_id)]
-
- all_tuples = []
- for spec in chain(*groups):
- all_tuples.append([(spec, i + 1)
- for i in xrange(spec.iterations)])
- for t in chain(*map(list, izip_longest(*all_tuples))):
- if t is not None:
- yield t
-
-
-def permute_by_section(specs):
- """
- Runs the first iteration for all benchmarks first, before proceeding to the
- next iteration, i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2,
- C1, C2...
-
- If multiple sections where specified in the agenda, this will run all specs
- for the first section followed by all specs for the seciod section, etc.
-
- e.g. given sections X and Y, and global specs A and B, with 2 iterations,
- this will run
-
- X.A1, X.B1, Y.A1, Y.B1, X.A2, X.B2, Y.A2, Y.B2
-
- """
- groups = [list(g) for k, g in groupby(specs, lambda s: s.section_id)]
-
- all_tuples = []
- for spec in chain(*groups):
- all_tuples.append([(spec, i + 1)
- for i in xrange(spec.iterations)])
- for t in chain(*map(list, izip_longest(*all_tuples))):
- if t is not None:
- yield t
-
-
-def permute_randomly(specs):
- """
- This will generate a random permutation of specs/iteration tuples.
-
- """
- result = []
- for spec in specs:
- for i in xrange(1, spec.iterations + 1):
- result.append((spec, i))
- random.shuffle(result)
- for t in result:
- yield t
-
-
-permute_map = {
- 'by_iteration': permute_by_iteration,
- 'by_job': permute_by_job,
- 'by_section': permute_by_section,
- 'random': permute_randomly,
-}
-
-
-def permute_iterations(specs, exec_order):
- if exec_order not in permute_map:
- msg = 'Unknown execution order "{}"; must be in: {}'
- raise ValueError(msg.format(exec_order, permute_map.keys()))
- return permute_map[exec_order](specs)
diff --git a/wlauto/core/configuration/parsers.py b/wlauto/core/configuration/parsers.py
deleted file mode 100644
index df6d019e..00000000
--- a/wlauto/core/configuration/parsers.py
+++ /dev/null
@@ -1,308 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-
-from wlauto.exceptions import ConfigError
-from wlauto.utils.serializer import read_pod, SerializerSyntaxError
-from wlauto.utils.types import toggle_set, counter
-from wlauto.core.configuration.configuration import JobSpec
-
-
-###############
-### Parsers ###
-###############
-
-class ConfigParser(object):
-
- def load_from_path(self, state, filepath):
- self.load(state, _load_file(filepath, "Config"), filepath)
-
- def load(self, state, raw, source, wrap_exceptions=True): # pylint: disable=too-many-branches
- try:
- if 'run_name' in raw:
- msg = '"run_name" can only be specified in the config '\
- 'section of an agenda'
- raise ConfigError(msg)
-
- if 'id' in raw:
- raise ConfigError('"id" cannot be set globally')
-
- merge_result_processors_instruments(raw)
-
- # Get WA core configuration
- for cfg_point in state.settings.configuration.itervalues():
- value = get_aliased_param(cfg_point, raw)
- if value is not None:
- state.settings.set(cfg_point.name, value)
-
- # Get run specific configuration
- for cfg_point in state.run_config.configuration.itervalues():
- value = get_aliased_param(cfg_point, raw)
- if value is not None:
- state.run_config.set(cfg_point.name, value)
-
- # Get global job spec configuration
- for cfg_point in JobSpec.configuration.itervalues():
- value = get_aliased_param(cfg_point, raw)
- if value is not None:
- state.jobs_config.set_global_value(cfg_point.name, value)
-
- for name, values in raw.iteritems():
- # Assume that all leftover config is for a plug-in or a global
- # alias it is up to PluginCache to assert this assumption
- state.plugin_cache.add_configs(name, values, source)
-
- except ConfigError as e:
- if wrap_exceptions:
- raise ConfigError('Error in "{}":\n{}'.format(source, str(e)))
- else:
- raise e
-
-
-class AgendaParser(object):
-
- def load_from_path(self, state, filepath):
- raw = _load_file(filepath, 'Agenda')
- self.load(state, raw, filepath)
-
- def load(self, state, raw, source):
- try:
- if not isinstance(raw, dict):
- raise ConfigError('Invalid agenda, top level entry must be a dict')
-
- self._populate_and_validate_config(state, raw, source)
- sections = self._pop_sections(raw)
- global_workloads = self._pop_workloads(raw)
-
- if raw:
- msg = 'Invalid top level agenda entry(ies): "{}"'
- raise ConfigError(msg.format('", "'.join(raw.keys())))
-
- sect_ids, wkl_ids = self._collect_ids(sections, global_workloads)
- self._process_global_workloads(state, global_workloads, wkl_ids)
- self._process_sections(state, sections, sect_ids, wkl_ids)
-
- state.agenda = source
-
- except (ConfigError, SerializerSyntaxError) as e:
- raise ConfigError('Error in "{}":\n\t{}'.format(source, str(e)))
-
- def _populate_and_validate_config(self, state, raw, source):
- for name in ['config', 'global']:
- entry = raw.pop(name, None)
- if entry is None:
- continue
-
- if not isinstance(entry, dict):
- msg = 'Invalid entry "{}" - must be a dict'
- raise ConfigError(msg.format(name))
-
- if 'run_name' in entry:
- state.run_config.set('run_name', entry.pop('run_name'))
-
- state.load_config(entry, source, wrap_exceptions=False)
-
- def _pop_sections(self, raw):
- sections = raw.pop("sections", [])
- if not isinstance(sections, list):
- raise ConfigError('Invalid entry "sections" - must be a list')
- return sections
-
- def _pop_workloads(self, raw):
- workloads = raw.pop("workloads", [])
- if not isinstance(workloads, list):
- raise ConfigError('Invalid entry "workloads" - must be a list')
- return workloads
-
- def _collect_ids(self, sections, global_workloads):
- seen_section_ids = set()
- seen_workload_ids = set()
-
- for workload in global_workloads:
- workload = _get_workload_entry(workload)
- _collect_valid_id(workload.get("id"), seen_workload_ids, "workload")
-
- for section in sections:
- _collect_valid_id(section.get("id"), seen_section_ids, "section")
- for workload in section["workloads"] if "workloads" in section else []:
- workload = _get_workload_entry(workload)
- _collect_valid_id(workload.get("id"), seen_workload_ids,
- "workload")
-
- return seen_section_ids, seen_workload_ids
-
- def _process_global_workloads(self, state, global_workloads, seen_wkl_ids):
- for workload_entry in global_workloads:
- workload = _process_workload_entry(workload_entry, seen_wkl_ids,
- state.jobs_config)
- state.jobs_config.add_workload(workload)
-
- def _process_sections(self, state, sections, seen_sect_ids, seen_wkl_ids):
- for section in sections:
- workloads = []
- for workload_entry in section.pop("workloads", []):
- workload = _process_workload_entry(workload_entry, seen_workload_ids,
- state.jobs_config)
- workloads.append(workload)
-
- section = _construct_valid_entry(section, seen_sect_ids,
- "s", state.jobs_config)
- state.jobs_config.add_section(section, workloads)
-
-
-########################
-### Helper functions ###
-########################
-
-def get_aliased_param(cfg_point, d, default=None, pop=True):
- """
- Given a ConfigurationPoint and a dict, this function will search the dict for
- the ConfigurationPoint's name/aliases. If more than one is found it will raise
- a ConfigError. If one (and only one) is found then it will return the value
- for the ConfigurationPoint. If the name or aliases are present in the dict it will
- return the "default" parameter of this function.
- """
- aliases = [cfg_point.name] + cfg_point.aliases
- alias_map = [a for a in aliases if a in d]
- if len(alias_map) > 1:
- raise ConfigError(DUPLICATE_ENTRY_ERROR.format(aliases))
- elif alias_map:
- if pop:
- return d.pop(alias_map[0])
- else:
- return d[alias_map[0]]
- else:
- return default
-
-
-def _load_file(filepath, error_name):
- if not os.path.isfile(filepath):
- raise ValueError("{} does not exist".format(filepath))
- try:
- raw = read_pod(filepath)
- except SerializerSyntaxError as e:
- raise ConfigError('Error parsing {} {}: {}'.format(error_name, filepath, e))
- if not isinstance(raw, dict):
- message = '{} does not contain a valid {} structure; top level must be a dict.'
- raise ConfigError(message.format(filepath, error_name))
- return raw
-
-
-def merge_result_processors_instruments(raw):
- instr_config = JobSpec.configuration['instrumentation']
- instruments = toggle_set(get_aliased_param(instr_config, raw, default=[]))
- result_processors = toggle_set(raw.pop('result_processors', []))
- if instruments and result_processors:
- conflicts = instruments.conflicts_with(result_processors)
- if conflicts:
- msg = '"instrumentation" and "result_processors" have '\
- 'conflicting entries: {}'
- entires = ', '.join('"{}"'.format(c.strip("~")) for c in conflicts)
- raise ConfigError(msg.format(entires))
- raw['instrumentation'] = instruments.merge_with(result_processors)
-
-
-def _pop_aliased(d, names, entry_id):
- name_count = sum(1 for n in names if n in d)
- if name_count > 1:
- names_list = ', '.join(names)
- msg = 'Inivalid workload entry "{}": at moust one of ({}}) must be specified.'
- raise ConfigError(msg.format(workload_entry['id'], names_list))
- for name in names:
- if name in d:
- return d.pop(name)
- return None
-
-
-def _construct_valid_entry(raw, seen_ids, prefix, jobs_config):
- workload_entry = {}
-
- # Generate an automatic ID if the entry doesn't already have one
- if 'id' not in raw:
- while True:
- new_id = '{}{}'.format(prefix, counter(name=prefix))
- if new_id not in seen_ids:
- break
- workload_entry['id'] = new_id
- seen_ids.add(new_id)
- else:
- workload_entry['id'] = raw.pop('id')
-
- # Process instrumentation
- merge_result_processors_instruments(raw)
-
- # Validate all workload_entry
- for name, cfg_point in JobSpec.configuration.iteritems():
- value = get_aliased_param(cfg_point, raw)
- if value is not None:
- value = cfg_point.kind(value)
- cfg_point.validate_value(name, value)
- workload_entry[name] = value
-
- wk_id = workload_entry['id']
- param_names = ['workload_params', 'workload_parameters']
- if prefix == 'wk':
- param_names += ['params', 'parameters']
- workload_entry["workload_parameters"] = _pop_aliased(raw, param_names, wk_id)
-
- param_names = ['runtime_parameters', 'runtime_params']
- if prefix == 's':
- param_names += ['params', 'parameters']
- workload_entry["runtime_parameters"] = _pop_aliased(raw, param_names, wk_id)
-
- param_names = ['boot_parameters', 'boot_params']
- workload_entry["boot_parameters"] = _pop_aliased(raw, param_names, wk_id)
-
- if "instrumentation" in workload_entry:
- jobs_config.update_enabled_instruments(workload_entry["instrumentation"])
-
- # error if there are unknown workload_entry
- if raw:
- msg = 'Invalid entry(ies) in "{}": "{}"'
- raise ConfigError(msg.format(workload_entry['id'], ', '.join(raw.keys())))
-
- return workload_entry
-
-
-def _collect_valid_id(entry_id, seen_ids, entry_type):
- if entry_id is None:
- return
- if entry_id in seen_ids:
- raise ConfigError('Duplicate {} ID "{}".'.format(entry_type, entry_id))
- # "-" is reserved for joining section and workload IDs
- if "-" in entry_id:
- msg = 'Invalid {} ID "{}"; IDs cannot contain a "-"'
- raise ConfigError(msg.format(entry_type, entry_id))
- if entry_id == "global":
- msg = 'Invalid {} ID "global"; is a reserved ID'
- raise ConfigError(msg.format(entry_type))
- seen_ids.add(entry_id)
-
-
-def _get_workload_entry(workload):
- if isinstance(workload, basestring):
- workload = {'name': workload}
- elif not isinstance(workload, dict):
- raise ConfigError('Invalid workload entry: "{}"')
- return workload
-
-
-def _process_workload_entry(workload, seen_workload_ids, jobs_config):
- workload = _get_workload_entry(workload)
- workload = _construct_valid_entry(workload, seen_workload_ids,
- "wk", jobs_config)
- return workload
-
diff --git a/wlauto/core/configuration/plugin_cache.py b/wlauto/core/configuration/plugin_cache.py
deleted file mode 100644
index fe403843..00000000
--- a/wlauto/core/configuration/plugin_cache.py
+++ /dev/null
@@ -1,210 +0,0 @@
-# Copyright 2016 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from copy import copy
-from collections import defaultdict
-
-from wlauto.core import pluginloader
-from wlauto.exceptions import ConfigError
-from wlauto.utils.types import obj_dict
-from devlib.utils.misc import memoized
-
-GENERIC_CONFIGS = ["device_config", "workload_parameters",
- "boot_parameters", "runtime_parameters"]
-
-
-class PluginCache(object):
- """
- The plugin cache is used to store configuration that cannot be processed at
- this stage, whether thats because it is unknown if its needed
- (in the case of disabled plug-ins) or it is not know what it belongs to (in
- the case of "device-config" ect.). It also maintains where configuration came
- from, and the priority order of said sources.
- """
-
- def __init__(self, loader=pluginloader):
- self.loader = loader
- self.sources = []
- self.plugin_configs = defaultdict(lambda: defaultdict(dict))
- self.global_alias_values = defaultdict(dict)
-
- # Generate a mapping of what global aliases belong to
- self._global_alias_map = defaultdict(dict)
- self._list_of_global_aliases = set()
- for plugin in self.loader.list_plugins():
- for param in plugin.parameters:
- if param.global_alias:
- self._global_alias_map[plugin.name][param.global_alias] = param
- self._list_of_global_aliases.add(param.global_alias)
-
- def add_source(self, source):
- if source in self.sources:
- raise Exception("Source has already been added.")
- self.sources.append(source)
-
- def add_global_alias(self, alias, value, source):
- if source not in self.sources:
- msg = "Source '{}' has not been added to the plugin cache."
- raise RuntimeError(msg.format(source))
-
- if not self.is_global_alias(alias):
- msg = "'{} is not a valid global alias'"
- raise RuntimeError(msg.format(alias))
-
- self.global_alias_values[alias][source] = value
-
- def add_configs(self, plugin_name, values, source):
- if self.is_global_alias(plugin_name):
- self.add_global_alias(plugin_name, values, source)
- return
- for name, value in values.iteritems():
- self.add_config(plugin_name, name, value, source)
-
- def add_config(self, plugin_name, name, value, source):
- if source not in self.sources:
- msg = "Source '{}' has not been added to the plugin cache."
- raise RuntimeError(msg.format(source))
-
- if (not self.loader.has_plugin(plugin_name) and
- plugin_name not in GENERIC_CONFIGS):
- msg = 'configuration provided for unknown plugin "{}"'
- raise ConfigError(msg.format(plugin_name))
-
- if (plugin_name not in GENERIC_CONFIGS and
- name not in self.get_plugin_parameters(plugin_name)):
- msg = "'{}' is not a valid parameter for '{}'"
- raise ConfigError(msg.format(name, plugin_name))
-
- self.plugin_configs[plugin_name][source][name] = value
-
- def is_global_alias(self, name):
- return name in self._list_of_global_aliases
-
- def get_plugin_config(self, plugin_name, generic_name=None):
- config = obj_dict(not_in_dict=['name'])
- config.name = plugin_name
-
- # Load plugin defaults
- cfg_points = self.get_plugin_parameters(plugin_name)
- for cfg_point in cfg_points.itervalues():
- cfg_point.set_value(config, check_mandatory=False)
-
- # Merge global aliases
- for alias, param in self._global_alias_map[plugin_name].iteritems():
- if alias in self.global_alias_values:
- for source in self.sources:
- if source not in self.global_alias_values[alias]:
- continue
- val = self.global_alias_values[alias][source]
- param.set_value(config, value=val)
-
- # Merge user config
- # Perform a simple merge with the order of sources representing priority
- if generic_name is None:
- plugin_config = self.plugin_configs[plugin_name]
- for source in self.sources:
- if source not in plugin_config:
- continue
- for name, value in plugin_config[source].iteritems():
- cfg_points[name].set_value(config, value=value)
- # A more complicated merge that involves priority of sources and specificity
- else:
- self._merge_using_priority_specificity(plugin_name, generic_name, config)
-
- return config
-
- def get_plugin(self, name, kind=None, *args, **kwargs):
- config = self.get_plugin_config(name)
- kwargs = dict(config.items() + kwargs.items())
- return self.loader.get_plugin(name, kind=kind, *args, **kwargs)
-
- @memoized
- def get_plugin_parameters(self, name):
- params = self.loader.get_plugin_class(name).parameters
- return {param.name: param for param in params}
-
- # pylint: disable=too-many-nested-blocks, too-many-branches
- def _merge_using_priority_specificity(self, specific_name,
- generic_name, final_config):
- """
- WA configuration can come from various sources of increasing priority,
- as well as being specified in a generic and specific manner (e.g.
- ``device_config`` and ``nexus10`` respectivly). WA has two rules for
- the priority of configuration:
-
- - Configuration from higher priority sources overrides
- configuration from lower priority sources.
- - More specific configuration overrides less specific configuration.
-
- There is a situation where these two rules come into conflict. When a
- generic configuration is given in config source of high priority and a
- specific configuration is given in a config source of lower priority.
- In this situation it is not possible to know the end users intention
- and WA will error.
-
- :param generic_name: The name of the generic configuration
- e.g ``device_config``
- :param specific_name: The name of the specific configuration used
- e.g ``nexus10``
- :param cfg_point: A dict of ``ConfigurationPoint``s to be used when
- merging configuration. keys=config point name,
- values=config point
-
- :rtype: A fully merged and validated configuration in the form of a
- obj_dict.
- """
- generic_config = copy(self.plugin_configs[generic_name])
- specific_config = copy(self.plugin_configs[specific_name])
- cfg_points = self.get_plugin_parameters(specific_name)
- sources = self.sources
- seen_specific_config = defaultdict(list)
-
- # set_value uses the 'name' attribute of the passed object in it error
- # messages, to ensure these messages make sense the name will have to be
- # changed several times during this function.
- final_config.name = specific_name
-
- # pylint: disable=too-many-nested-blocks
- for source in sources:
- try:
- if source in generic_config:
- final_config.name = generic_name
- for name, cfg_point in cfg_points.iteritems():
- if name in generic_config[source]:
- if name in seen_specific_config:
- msg = ('"{generic_name}" configuration "{config_name}" has already been '
- 'specified more specifically for {specific_name} in:\n\t\t{sources}')
- msg = msg.format(generic_name=generic_name,
- config_name=name,
- specific_name=specific_name,
- sources=", ".join(seen_specific_config[name]))
- raise ConfigError(msg)
- value = generic_config[source][name]
- cfg_point.set_value(final_config, value, check_mandatory=False)
-
- if source in specific_config:
- final_config.name = specific_name
- for name, cfg_point in cfg_points.iteritems():
- if name in specific_config[source]:
- seen_specific_config[name].append(str(source))
- value = specific_config[source][name]
- cfg_point.set_value(final_config, value, check_mandatory=False)
-
- except ConfigError as e:
- raise ConfigError('Error in "{}":\n\t{}'.format(source, str(e)))
-
- # Validate final configuration
- final_config.name = specific_name
- for cfg_point in cfg_points.itervalues():
- cfg_point.validate(final_config)
diff --git a/wlauto/core/configuration/tree.py b/wlauto/core/configuration/tree.py
deleted file mode 100644
index 1cec5d38..00000000
--- a/wlauto/core/configuration/tree.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2016 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-
-class JobSpecSource(object):
-
- kind = ""
-
- def __init__(self, config, parent=None):
- self.config = config
- self.parent = parent
-
- @property
- def id(self):
- return self.config['id']
-
- def name(self):
- raise NotImplementedError()
-
-
-class WorkloadEntry(JobSpecSource):
- kind = "workload"
-
- @property
- def name(self):
- if self.parent.id == "global":
- return 'workload "{}"'.format(self.id)
- else:
- return 'workload "{}" from section "{}"'.format(self.id, self.parent.id)
-
-
-class SectionNode(JobSpecSource):
-
- kind = "section"
-
- @property
- def name(self):
- if self.id == "global":
- return "globally specified configuration"
- else:
- return 'section "{}"'.format(self.id)
-
- @property
- def is_leaf(self):
- return not bool(self.children)
-
- def __init__(self, config, parent=None):
- super(SectionNode, self).__init__(config, parent=parent)
- self.workload_entries = []
- self.children = []
-
- def add_section(self, section):
- new_node = SectionNode(section, parent=self)
- self.children.append(new_node)
- return new_node
-
- def add_workload(self, workload_config):
- self.workload_entries.append(WorkloadEntry(workload_config, self))
-
- def descendants(self):
- for child in self.children:
- for n in child.descendants():
- yield n
- yield child
-
- def ancestors(self):
- if self.parent is not None:
- yield self.parent
- for ancestor in self.parent.ancestors():
- yield ancestor
-
- def leaves(self):
- if self.is_leaf:
- yield self
- else:
- for n in self.descendants():
- if n.is_leaf:
- yield n
diff --git a/wlauto/core/device_manager.py b/wlauto/core/device_manager.py
deleted file mode 100644
index ca0c3a68..00000000
--- a/wlauto/core/device_manager.py
+++ /dev/null
@@ -1,198 +0,0 @@
-import string
-from copy import copy
-
-from wlauto.core.plugin import Plugin, Parameter
-from wlauto.core.configuration.configuration import RuntimeParameter
-from wlauto.exceptions import ConfigError
-from wlauto.utils.types import list_of_integers, list_of, caseless_string
-
-from devlib.platform import Platform
-from devlib.target import AndroidTarget, Cpuinfo, KernelVersion, KernelConfig
-
-__all__ = ['RuntimeParameter', 'CoreParameter', 'DeviceManager', 'TargetInfo']
-
-UNKOWN_RTP = 'Unknown runtime parameter "{}"'
-
-
-class TargetInfo(object):
-
- @staticmethod
- def from_pod(pod):
- instance = TargetInfo()
- instance.target = pod['target']
- instance.abi = pod['abi']
- instance.cpuinfo = Cpuinfo(pod['cpuinfo'])
- instance.os = pod['os']
- instance.os_version = pod['os_version']
- instance.abi = pod['abi']
- instance.is_rooted = pod['is_rooted']
- instance.kernel_version = KernelVersion(pod['kernel_release'],
- pod['kernel_version'])
- instance.kernel_config = KernelConfig(pod['kernel_config'])
-
- if pod["target"] == "AndroidTarget":
- instance.screen_resolution = pod['screen_resolution']
- instance.prop = pod['prop']
- instance.prop = pod['android_id']
-
- return instance
-
- def __init__(self, target=None):
- if target:
- self.target = target.__class__.__name__
- self.cpuinfo = target.cpuinfo
- self.os = target.os
- self.os_version = target.os_version
- self.abi = target.abi
- self.is_rooted = target.is_rooted
- self.kernel_version = target.kernel_version
- self.kernel_config = target.config
-
- if isinstance(target, AndroidTarget):
- self.screen_resolution = target.screen_resolution
- self.prop = target.getprop()
- self.android_id = target.android_id
-
- else:
- self.target = None
- self.cpuinfo = None
- self.os = None
- self.os_version = None
- self.abi = None
- self.is_rooted = None
- self.kernel_version = None
- self.kernel_config = None
-
- if isinstance(target, AndroidTarget):
- self.screen_resolution = None
- self.prop = None
- self.android_id = None
-
- def to_pod(self):
- pod = {}
- pod['target'] = self.target
- pod['abi'] = self.abi
- pod['cpuinfo'] = self.cpuinfo.sections
- pod['os'] = self.os
- pod['os_version'] = self.os_version
- pod['abi'] = self.abi
- pod['is_rooted'] = self.is_rooted
- pod['kernel_release'] = self.kernel_version.release
- pod['kernel_version'] = self.kernel_version.version
- pod['kernel_config'] = dict(self.kernel_config.iteritems())
-
- if self.target == "AndroidTarget":
- pod['screen_resolution'] = self.screen_resolution
- pod['prop'] = self.prop
- pod['android_id'] = self.android_id
-
- return pod
-
-
-class DeviceManager(Plugin):
-
- kind = "manager"
- name = None
- target_type = None
- platform_type = Platform
- has_gpu = None
- path_module = None
- info = None
-
- parameters = [
- Parameter('core_names', kind=list_of(caseless_string),
- description="""
- This is a list of all cpu cores on the device with each
- element being the core type, e.g. ``['a7', 'a7', 'a15']``. The
- order of the cores must match the order they are listed in
- ``'/sys/devices/system/cpu'``. So in this case, ``'cpu0'`` must
- be an A7 core, and ``'cpu2'`` an A15.'
- """),
- Parameter('core_clusters', kind=list_of_integers,
- description="""
- This is a list indicating the cluster affinity of the CPU cores,
- each element correponding to the cluster ID of the core coresponding
- to its index. E.g. ``[0, 0, 1]`` indicates that cpu0 and cpu1 are on
- cluster 0, while cpu2 is on cluster 1. If this is not specified, this
- will be inferred from ``core_names`` if possible (assuming all cores with
- the same name are on the same cluster).
- """),
- Parameter('working_directory',
- description='''
- Working directory to be used by WA. This must be in a location where the specified user
- has write permissions. This will default to /home/<username>/wa (or to /root/wa, if
- username is 'root').
- '''),
- Parameter('binaries_directory',
- description='Location of executable binaries on this device (must be in PATH).'),
- ]
- modules = []
-
- runtime_parameter_managers = [
- ]
-
- def __init__(self):
- super(DeviceManager, self).__init__()
- self.runtime_parameter_values = None
-
- # Framework
-
- def connect(self):
- raise NotImplementedError("connect method must be implemented for device managers")
-
- def initialize(self, context):
- super(DeviceManager, self).initialize(context)
- self.info = TargetInfo(self.target)
- self.target.setup()
-
- def start(self):
- pass
-
- def stop(self):
- pass
-
- def validate(self):
- pass
-
- # Runtime Parameters
-
- def merge_runtime_parameters(self, params):
- merged_values = {}
- for source, values in params.iteritems():
- for name, value in values:
- for rtpm in self.runtime_parameter_managers:
- if rtpm.match(name):
- rtpm.update_value(name, value, source, merged_values)
- break
- else:
- msg = 'Unknown runtime parameter "{}" in "{}"'
- raise ConfigError(msg.format(name, source))
- return merged_values
-
- def static_runtime_parameter_validation(self, params):
- params = copy(params)
- for rtpm in self.runtime_parameters_managers:
- rtpm.static_validation(params)
- if params:
- msg = 'Unknown runtime_parameters for "{}": "{}"'
- raise ConfigError(msg.format(self.name, '", "'.join(params.iterkeys())))
-
- def dynamic_runtime_parameter_validation(self, params):
- for rtpm in self.runtime_parameters_managers:
- rtpm.dynamic_validation(params)
-
- def commit_runtime_parameters(self, params):
- params = copy(params)
- for rtpm in self.runtime_parameters_managers:
- rtpm.commit(params)
-
- #Runtime parameter getters/setters
- def get_sysfile_values(self):
- return self._written_sysfiles
-
- def set_sysfile_values(self, params):
- for sysfile, value in params.iteritems():
- verify = not sysfile.endswith('!')
- sysfile = sysfile.rstrip('!')
- self._written_sysfiles.append((sysfile, value))
- self.target.write_value(sysfile, value, verify=verify)
diff --git a/wlauto/core/entry_point.py b/wlauto/core/entry_point.py
deleted file mode 100644
index d3dea0f0..00000000
--- a/wlauto/core/entry_point.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import sys
-import argparse
-import logging
-import os
-import subprocess
-import warnings
-
-from wlauto.core import pluginloader
-from wlauto.core.command import init_argument_parser
-from wlauto.core.configuration import settings
-from wlauto.core.configuration.manager import ConfigManager
-from wlauto.core.host import init_user_directory
-from wlauto.exceptions import WAError, DevlibError, ConfigError
-from wlauto.utils.doc import format_body
-from wlauto.utils.log import init_logging
-from wlauto.utils.misc import get_traceback
-
-warnings.filterwarnings(action='ignore', category=UserWarning, module='zope')
-
-
-logger = logging.getLogger('command_line')
-
-
-def load_commands(subparsers):
- commands = {}
- for command in pluginloader.list_commands():
- commands[command.name] = pluginloader.get_command(command.name,
- subparsers=subparsers)
- return commands
-
-
-def main():
- config = ConfigManager()
-
- if not os.path.exists(settings.user_directory):
- init_user_directory()
-
- try:
-
- description = ("Execute automated workloads on a remote device and process "
- "the resulting output.\n\nUse \"wa <subcommand> -h\" to see "
- "help for individual subcommands.")
- parser = argparse.ArgumentParser(description=format_body(description, 80),
- prog='wa',
- formatter_class=argparse.RawDescriptionHelpFormatter,
- )
- init_argument_parser(parser)
- # each command will add its own subparser
- commands = load_commands(parser.add_subparsers(dest='command'))
-
- args = parser.parse_args()
-
- settings.set("verbosity", args.verbose)
-
- config.load_config_file(settings.user_config_file)
- for config_file in args.config:
- if not os.path.exists(config_file):
- raise ConfigError("Config file {} not found".format(config_file))
- config.load_config_file(config_file)
-
- init_logging(settings.verbosity)
-
- command = commands[args.command]
- sys.exit(command.execute(config, args))
-
- except KeyboardInterrupt:
- logging.info('Got CTRL-C. Aborting.')
- sys.exit(3)
- except (WAError, DevlibError) as e:
- logging.critical(e)
- sys.exit(1)
- except subprocess.CalledProcessError as e:
- tb = get_traceback()
- logging.critical(tb)
- command = e.cmd
- if e.args:
- command = '{} {}'.format(command, ' '.join(e.args))
- message = 'Command \'{}\' returned non-zero exit status {}\nOUTPUT:\n{}\n'
- logging.critical(message.format(command, e.returncode, e.output))
- sys.exit(2)
- except SyntaxError as e:
- tb = get_traceback()
- logging.critical(tb)
- message = 'Syntax Error in {}, line {}, offset {}:'
- logging.critical(message.format(e.filename, e.lineno, e.offset))
- logging.critical('\t{}'.format(e.msg))
- sys.exit(2)
- except Exception as e: # pylint: disable=broad-except
- tb = get_traceback()
- logging.critical(tb)
- logging.critical('{}({})'.format(e.__class__.__name__, e))
- sys.exit(2)
diff --git a/wlauto/core/execution.py b/wlauto/core/execution.py
deleted file mode 100644
index 3ac3a2dd..00000000
--- a/wlauto/core/execution.py
+++ /dev/null
@@ -1,875 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=no-member
-
-"""
-This module contains the execution logic for Workload Automation. It defines the
-following actors:
-
- WorkloadSpec: Identifies the workload to be run and defines parameters under
- which it should be executed.
-
- Executor: Responsible for the overall execution process. It instantiates
- and/or intialises the other actors, does any necessary vaidation
- and kicks off the whole process.
-
- Execution Context: Provides information about the current state of run
- execution to instrumentation.
-
- RunInfo: Information about the current run.
-
- Runner: This executes workload specs that are passed to it. It goes through
- stages of execution, emitting an appropriate signal at each step to
- allow instrumentation to do its stuff.
-
-"""
-import logging
-import os
-import random
-import subprocess
-import uuid
-from collections import Counter, defaultdict, OrderedDict
-from contextlib import contextmanager
-from copy import copy
-from datetime import datetime
-from itertools import izip_longest
-
-import wlauto.core.signal as signal
-from wlauto.core import instrumentation
-from wlauto.core import pluginloader
-from wlauto.core.configuration import settings
-from wlauto.core.device_manager import TargetInfo
-from wlauto.core.plugin import Artifact
-from wlauto.core.resolver import ResourceResolver
-from wlauto.core.result import ResultManager, IterationResult, RunResult
-from wlauto.exceptions import (WAError, ConfigError, TimeoutError, InstrumentError,
- DeviceError, DeviceNotRespondingError)
-from wlauto.utils.misc import (ensure_directory_exists as _d,
- get_traceback, format_duration)
-from wlauto.utils.serializer import json
-
-
-# The maximum number of reboot attempts for an iteration.
-MAX_REBOOT_ATTEMPTS = 3
-
-# If something went wrong during device initialization, wait this
-# long (in seconds) before retrying. This is necessary, as retrying
-# immediately may not give the device enough time to recover to be able
-# to reboot.
-REBOOT_DELAY = 3
-
-
-class ExecutionContext(object):
-
-
- def __init__(self, cm, tm, output):
- self.logger = logging.getLogger('ExecContext')
- self.cm = cm
- self.tm = tm
- self.output = output
- self.logger.debug('Loading resource discoverers')
- self.resolver = ResourceResolver(cm)
- self.resolver.load()
-
-
-class OldExecutionContext(object):
- """
- Provides a context for instrumentation. Keeps track of things like
- current workload and iteration.
-
- This class also provides two status members that can be used by workloads
- and instrumentation to keep track of arbitrary state. ``result``
- is reset on each new iteration of a workload; run_status is maintained
- throughout a Workload Automation run.
-
- """
-
- # These are the artifacts generated by the core framework.
- default_run_artifacts = [
- Artifact('runlog', 'run.log', 'log', mandatory=True,
- description='The log for the entire run.'),
- ]
-
- @property
- def current_iteration(self):
- if self.current_job:
- spec_id = self.current_job.spec.id
- return self.job_iteration_counts[spec_id]
- else:
- return None
-
- @property
- def job_status(self):
- if not self.current_job:
- return None
- return self.current_job.result.status
-
- @property
- def workload(self):
- return getattr(self.spec, 'workload', None)
-
- @property
- def spec(self):
- return getattr(self.current_job, 'spec', None)
-
- @property
- def result(self):
- return getattr(self.current_job, 'result', self.run_result)
-
- def __init__(self, device_manager, config):
- self.device_manager = device_manager
- self.device = self.device_manager.target
- self.config = config
- self.reboot_policy = config.reboot_policy
- self.output_directory = None
- self.current_job = None
- self.resolver = None
- self.last_error = None
- self.run_info = None
- self.run_result = None
- self.run_output_directory = self.config.output_directory
- self.host_working_directory = self.config.meta_directory
- self.iteration_artifacts = None
- self.run_artifacts = copy(self.default_run_artifacts)
- self.job_iteration_counts = defaultdict(int)
- self.aborted = False
- self.runner = None
- if config.agenda.filepath:
- self.run_artifacts.append(Artifact('agenda',
- os.path.join(self.host_working_directory,
- os.path.basename(config.agenda.filepath)),
- 'meta',
- mandatory=True,
- description='Agenda for this run.'))
- for i, filepath in enumerate(settings.config_paths, 1):
- name = 'config_{}'.format(i)
- path = os.path.join(self.host_working_directory,
- name + os.path.splitext(filepath)[1])
- self.run_artifacts.append(Artifact(name,
- path,
- kind='meta',
- mandatory=True,
- description='Config file used for the run.'))
-
- def initialize(self):
- if not os.path.isdir(self.run_output_directory):
- os.makedirs(self.run_output_directory)
- self.output_directory = self.run_output_directory
- self.resolver = ResourceResolver(self.config)
- self.run_info = RunInfo(self.config)
- self.run_result = RunResult(self.run_info, self.run_output_directory)
-
- def next_job(self, job):
- """Invoked by the runner when starting a new iteration of workload execution."""
- self.current_job = job
- self.job_iteration_counts[self.spec.id] += 1
- if not self.aborted:
- outdir_name = '_'.join(map(str, [self.spec.label, self.spec.id, self.current_iteration]))
- self.output_directory = _d(os.path.join(self.run_output_directory, outdir_name))
- self.iteration_artifacts = [wa for wa in self.workload.artifacts]
- self.current_job.result.iteration = self.current_iteration
- self.current_job.result.output_directory = self.output_directory
-
- def end_job(self):
- if self.current_job.result.status == IterationResult.ABORTED:
- self.aborted = True
- self.current_job = None
- self.output_directory = self.run_output_directory
-
- def add_metric(self, *args, **kwargs):
- self.result.add_metric(*args, **kwargs)
-
- def add_artifact(self, name, path, kind, *args, **kwargs):
- if self.current_job is None:
- self.add_run_artifact(name, path, kind, *args, **kwargs)
- else:
- self.add_iteration_artifact(name, path, kind, *args, **kwargs)
-
- def add_run_artifact(self, name, path, kind, *args, **kwargs):
- path = _check_artifact_path(path, self.run_output_directory)
- self.run_artifacts.append(Artifact(name, path, kind, Artifact.ITERATION, *args, **kwargs))
-
- def add_iteration_artifact(self, name, path, kind, *args, **kwargs):
- path = _check_artifact_path(path, self.output_directory)
- self.iteration_artifacts.append(Artifact(name, path, kind, Artifact.RUN, *args, **kwargs))
-
- def get_artifact(self, name):
- if self.iteration_artifacts:
- for art in self.iteration_artifacts:
- if art.name == name:
- return art
- for art in self.run_artifacts:
- if art.name == name:
- return art
- return None
-
-
-def _check_artifact_path(path, rootpath):
- if path.startswith(rootpath):
- return os.path.abspath(path)
- rootpath = os.path.abspath(rootpath)
- full_path = os.path.join(rootpath, path)
- if not os.path.isfile(full_path):
- raise ValueError('Cannot add artifact because {} does not exist.'.format(full_path))
- return full_path
-
-
-class FakeTargetManager(object):
- # TODO: this is a FAKE
-
- def __init__(self, name, config):
- self.device_name = name
- self.device_config = config
-
- from devlib import LocalLinuxTarget
- self.target = LocalLinuxTarget({'unrooted': True})
-
- def get_target_info(self):
- return TargetInfo(self.target)
-
- def validate_runtime_parameters(self, params):
- pass
-
- def merge_runtime_parameters(self, params):
- pass
-
-
-def init_target_manager(config):
- return FakeTargetManager(config.device, config.device_config)
-
-
-class Executor(object):
- """
- The ``Executor``'s job is to set up the execution context and pass to a
- ``Runner`` along with a loaded run specification. Once the ``Runner`` has
- done its thing, the ``Executor`` performs some final reporint before
- returning.
-
- The initial context set up involves combining configuration from various
- sources, loading of requided workloads, loading and installation of
- instruments and result processors, etc. Static validation of the combined
- configuration is also performed.
-
- """
- # pylint: disable=R0915
-
- def __init__(self):
- self.logger = logging.getLogger('Executor')
- self.error_logged = False
- self.warning_logged = False
- pluginloader = None
- self.device_manager = None
- self.device = None
- self.context = None
-
- def execute(self, config_manager, output):
- """
- Execute the run specified by an agenda. Optionally, selectors may be
- used to only selecute a subset of the specified agenda.
-
- Params::
-
- :state: a ``ConfigManager`` containing processed configuraiton
- :output: an initialized ``RunOutput`` that will be used to
- store the results.
-
- """
- signal.connect(self._error_signalled_callback, signal.ERROR_LOGGED)
- signal.connect(self._warning_signalled_callback, signal.WARNING_LOGGED)
-
- self.logger.info('Initializing run')
- self.logger.debug('Finalizing run configuration.')
- config = config_manager.finalize()
- output.write_config(config)
-
- self.logger.info('Connecting to target')
- target_manager = init_target_manager(config.run_config)
- output.write_target_info(target_manager.get_target_info())
-
- self.logger.info('Initializing execution conetext')
- context = ExecutionContext(config_manager, target_manager, output)
-
- self.logger.info('Generating jobs')
- config_manager.generate_jobs(context)
- output.write_job_specs(config_manager.job_specs)
-
- self.logger.info('Installing instrumentation')
- for instrument in config_manager.get_instruments(target_manager.target):
- instrumentation.install(instrument)
- instrumentation.validate()
-
- def old_exec(self, agenda, selectors={}):
- self.config.set_agenda(agenda, selectors)
- self.config.finalize()
- config_outfile = os.path.join(self.config.meta_directory, 'run_config.json')
- with open(config_outfile, 'w') as wfh:
- json.dump(self.config, wfh)
-
- self.logger.debug('Initialising device configuration.')
- if not self.config.device:
- raise ConfigError('Make sure a device is specified in the config.')
- self.device_manager = pluginloader.get_manager(self.config.device,
- **self.config.device_config)
- self.device_manager.validate()
- self.device = self.device_manager.target
-
- self.context = ExecutionContext(self.device_manager, self.config)
-
- self.logger.debug('Loading resource discoverers.')
- self.context.initialize()
- self.context.resolver.load()
- self.context.add_artifact('run_config', config_outfile, 'meta')
-
- self.logger.debug('Installing instrumentation')
- for name, params in self.config.instrumentation.iteritems():
- instrument = pluginloader.get_instrument(name, self.device, **params)
- instrumentation.install(instrument)
- instrumentation.validate()
-
- self.logger.debug('Installing result processors')
- result_manager = ResultManager()
- for name, params in self.config.result_processors.iteritems():
- processor = pluginloader.get_result_processor(name, **params)
- result_manager.install(processor)
- result_manager.validate()
-
- self.logger.debug('Loading workload specs')
- for workload_spec in self.config.workload_specs:
- workload_spec.load(self.device, pluginloader)
- workload_spec.workload.init_resources(self.context)
- workload_spec.workload.validate()
-
- if self.config.flashing_config:
- if not self.device.flasher:
- msg = 'flashing_config specified for {} device that does not support flashing.'
- raise ConfigError(msg.format(self.device.name))
- self.logger.debug('Flashing the device')
- self.device.flasher.flash(self.device)
-
- self.logger.info('Running workloads')
- runner = self._get_runner(result_manager)
- runner.init_queue(self.config.workload_specs)
- runner.run()
- self.execute_postamble()
-
- def execute_postamble(self):
- """
- This happens after the run has completed. The overall results of the run are
- summarised to the user.
-
- """
- result = self.context.run_result
- counter = Counter()
- for ir in result.iteration_results:
- counter[ir.status] += 1
- self.logger.info('Done.')
- self.logger.info('Run duration: {}'.format(format_duration(self.context.run_info.duration)))
- status_summary = 'Ran a total of {} iterations: '.format(sum(self.context.job_iteration_counts.values()))
- parts = []
- for status in IterationResult.values:
- if status in counter:
- parts.append('{} {}'.format(counter[status], status))
- self.logger.info(status_summary + ', '.join(parts))
- self.logger.info('Results can be found in {}'.format(self.config.output_directory))
-
- if self.error_logged:
- self.logger.warn('There were errors during execution.')
- self.logger.warn('Please see {}'.format(self.config.log_file))
- elif self.warning_logged:
- self.logger.warn('There were warnings during execution.')
- self.logger.warn('Please see {}'.format(self.config.log_file))
-
- def _get_runner(self, result_manager):
- if not self.config.execution_order or self.config.execution_order == 'by_iteration':
- if self.config.reboot_policy == 'each_spec':
- self.logger.info('each_spec reboot policy with the default by_iteration execution order is '
- 'equivalent to each_iteration policy.')
- runnercls = ByIterationRunner
- elif self.config.execution_order in ['classic', 'by_spec']:
- runnercls = BySpecRunner
- elif self.config.execution_order == 'by_section':
- runnercls = BySectionRunner
- elif self.config.execution_order == 'random':
- runnercls = RandomRunner
- else:
- raise ConfigError('Unexpected execution order: {}'.format(self.config.execution_order))
- return runnercls(self.device_manager, self.context, result_manager)
-
- def _error_signalled_callback(self):
- self.error_logged = True
- signal.disconnect(self._error_signalled_callback, signal.ERROR_LOGGED)
-
- def _warning_signalled_callback(self):
- self.warning_logged = True
- signal.disconnect(self._warning_signalled_callback, signal.WARNING_LOGGED)
-
-
-class Runner(object):
- """
-
- """
-
-
-class RunnerJob(object):
- """
- Represents a single execution of a ``RunnerJobDescription``. There will be one created for each iteration
- specified by ``RunnerJobDescription.number_of_iterations``.
-
- """
-
- def __init__(self, spec, retry=0):
- self.spec = spec
- self.retry = retry
- self.iteration = None
- self.result = IterationResult(self.spec)
-
-
-class OldRunner(object):
- """
- This class is responsible for actually performing a workload automation
- run. The main responsibility of this class is to emit appropriate signals
- at the various stages of the run to allow things like traces an other
- instrumentation to hook into the process.
-
- This is an abstract base class that defines each step of the run, but not
- the order in which those steps are executed, which is left to the concrete
- derived classes.
-
- """
- class _RunnerError(Exception):
- """Internal runner error."""
- pass
-
- @property
- def config(self):
- return self.context.config
-
- @property
- def current_job(self):
- if self.job_queue:
- return self.job_queue[0]
- return None
-
- @property
- def previous_job(self):
- if self.completed_jobs:
- return self.completed_jobs[-1]
- return None
-
- @property
- def next_job(self):
- if self.job_queue:
- if len(self.job_queue) > 1:
- return self.job_queue[1]
- return None
-
- @property
- def spec_changed(self):
- if self.previous_job is None and self.current_job is not None: # Start of run
- return True
- if self.previous_job is not None and self.current_job is None: # End of run
- return True
- return self.current_job.spec.id != self.previous_job.spec.id
-
- @property
- def spec_will_change(self):
- if self.current_job is None and self.next_job is not None: # Start of run
- return True
- if self.current_job is not None and self.next_job is None: # End of run
- return True
- return self.current_job.spec.id != self.next_job.spec.id
-
- def __init__(self, device_manager, context, result_manager):
- self.device_manager = device_manager
- self.device = device_manager.target
- self.context = context
- self.result_manager = result_manager
- self.logger = logging.getLogger('Runner')
- self.job_queue = []
- self.completed_jobs = []
- self._initial_reset = True
-
- def init_queue(self, specs):
- raise NotImplementedError()
-
- def run(self): # pylint: disable=too-many-branches
- self._send(signal.RUN_START)
- self._initialize_run()
-
- try:
- while self.job_queue:
- try:
- self._init_job()
- self._run_job()
- except KeyboardInterrupt:
- self.current_job.result.status = IterationResult.ABORTED
- raise
- except Exception, e: # pylint: disable=broad-except
- self.current_job.result.status = IterationResult.FAILED
- self.current_job.result.add_event(e.message)
- if isinstance(e, DeviceNotRespondingError):
- self.logger.info('Device appears to be unresponsive.')
- if self.context.reboot_policy.can_reboot and self.device.can('reset_power'):
- self.logger.info('Attempting to hard-reset the device...')
- try:
- self.device.boot(hard=True)
- self.device.connect()
- except DeviceError: # hard_boot not implemented for the device.
- raise e
- else:
- raise e
- else: # not a DeviceNotRespondingError
- self.logger.error(e)
- finally:
- self._finalize_job()
- except KeyboardInterrupt:
- self.logger.info('Got CTRL-C. Finalizing run... (CTRL-C again to abort).')
- # Skip through the remaining jobs.
- while self.job_queue:
- self.context.next_job(self.current_job)
- self.current_job.result.status = IterationResult.ABORTED
- self._finalize_job()
- except DeviceNotRespondingError:
- self.logger.info('Device unresponsive and recovery not possible. Skipping the rest of the run.')
- self.context.aborted = True
- while self.job_queue:
- self.context.next_job(self.current_job)
- self.current_job.result.status = IterationResult.SKIPPED
- self._finalize_job()
-
- instrumentation.enable_all()
- self._finalize_run()
- self._process_results()
-
- self.result_manager.finalize(self.context)
- self._send(signal.RUN_END)
-
- def _initialize_run(self):
- self.context.runner = self
- self.context.run_info.start_time = datetime.utcnow()
- self._connect_to_device()
- self.logger.info('Initializing device')
- self.device_manager.initialize(self.context)
-
- self.logger.info('Initializing workloads')
- for workload_spec in self.context.config.workload_specs:
- workload_spec.workload.initialize(self.context)
-
- self.context.run_info.device_properties = self.device_manager.info
- self.result_manager.initialize(self.context)
- self._send(signal.RUN_INIT)
-
- if instrumentation.check_failures():
- raise InstrumentError('Detected failure(s) during instrumentation initialization.')
-
- def _connect_to_device(self):
- if self.context.reboot_policy.perform_initial_boot:
- try:
- self.device_manager.connect()
- except DeviceError: # device may be offline
- if self.device.can('reset_power'):
- with self._signal_wrap('INITIAL_BOOT'):
- self.device.boot(hard=True)
- else:
- raise DeviceError('Cannot connect to device for initial reboot; '
- 'and device does not support hard reset.')
- else: # successfully connected
- self.logger.info('\tBooting device')
- with self._signal_wrap('INITIAL_BOOT'):
- self._reboot_device()
- else:
- self.logger.info('Connecting to device')
- self.device_manager.connect()
-
- def _init_job(self):
- self.current_job.result.status = IterationResult.RUNNING
- self.context.next_job(self.current_job)
-
- def _run_job(self): # pylint: disable=too-many-branches
- spec = self.current_job.spec
- if not spec.enabled:
- self.logger.info('Skipping workload %s (iteration %s)', spec, self.context.current_iteration)
- self.current_job.result.status = IterationResult.SKIPPED
- return
-
- self.logger.info('Running workload %s (iteration %s)', spec, self.context.current_iteration)
- if spec.flash:
- if not self.context.reboot_policy.can_reboot:
- raise ConfigError('Cannot flash as reboot_policy does not permit rebooting.')
- if not self.device.can('flash'):
- raise DeviceError('Device does not support flashing.')
- self._flash_device(spec.flash)
- elif not self.completed_jobs:
- # Never reboot on the very fist job of a run, as we would have done
- # the initial reboot if a reboot was needed.
- pass
- elif self.context.reboot_policy.reboot_on_each_spec and self.spec_changed:
- self.logger.debug('Rebooting on spec change.')
- self._reboot_device()
- elif self.context.reboot_policy.reboot_on_each_iteration:
- self.logger.debug('Rebooting on iteration.')
- self._reboot_device()
-
- instrumentation.disable_all()
- instrumentation.enable(spec.instrumentation)
- self.device_manager.start()
-
- if self.spec_changed:
- self._send(signal.WORKLOAD_SPEC_START)
- self._send(signal.ITERATION_START)
-
- try:
- setup_ok = False
- with self._handle_errors('Setting up device parameters'):
- self.device_manager.set_runtime_parameters(spec.runtime_parameters)
- setup_ok = True
-
- if setup_ok:
- with self._handle_errors('running {}'.format(spec.workload.name)):
- self.current_job.result.status = IterationResult.RUNNING
- self._run_workload_iteration(spec.workload)
- else:
- self.logger.info('\tSkipping the rest of the iterations for this spec.')
- spec.enabled = False
- except KeyboardInterrupt:
- self._send(signal.ITERATION_END)
- self._send(signal.WORKLOAD_SPEC_END)
- raise
- else:
- self._send(signal.ITERATION_END)
- if self.spec_will_change or not spec.enabled:
- self._send(signal.WORKLOAD_SPEC_END)
- finally:
- self.device_manager.stop()
-
- def _finalize_job(self):
- self.context.run_result.iteration_results.append(self.current_job.result)
- job = self.job_queue.pop(0)
- job.iteration = self.context.current_iteration
- if job.result.status in self.config.retry_on_status:
- if job.retry >= self.config.max_retries:
- self.logger.error('Exceeded maxium number of retries. Abandoning job.')
- else:
- self.logger.info('Job status was {}. Retrying...'.format(job.result.status))
- retry_job = RunnerJob(job.spec, job.retry + 1)
- self.job_queue.insert(0, retry_job)
- self.completed_jobs.append(job)
- self.context.end_job()
-
- def _finalize_run(self):
- self.logger.info('Finalizing workloads')
- for workload_spec in self.context.config.workload_specs:
- workload_spec.workload.finalize(self.context)
-
- self.logger.info('Finalizing.')
- self._send(signal.RUN_FIN)
-
- with self._handle_errors('Disconnecting from the device'):
- self.device.disconnect()
-
- info = self.context.run_info
- info.end_time = datetime.utcnow()
- info.duration = info.end_time - info.start_time
-
- def _process_results(self):
- self.logger.info('Processing overall results')
- with self._signal_wrap('OVERALL_RESULTS_PROCESSING'):
- if instrumentation.check_failures():
- self.context.run_result.non_iteration_errors = True
- self.result_manager.process_run_result(self.context.run_result, self.context)
-
- def _run_workload_iteration(self, workload):
- self.logger.info('\tSetting up')
- with self._signal_wrap('WORKLOAD_SETUP'):
- try:
- workload.setup(self.context)
- except:
- self.logger.info('\tSkipping the rest of the iterations for this spec.')
- self.current_job.spec.enabled = False
- raise
- try:
-
- self.logger.info('\tExecuting')
- with self._handle_errors('Running workload'):
- with self._signal_wrap('WORKLOAD_EXECUTION'):
- workload.run(self.context)
-
- self.logger.info('\tProcessing result')
- self._send(signal.BEFORE_WORKLOAD_RESULT_UPDATE)
- try:
- if self.current_job.result.status != IterationResult.FAILED:
- with self._handle_errors('Processing workload result',
- on_error_status=IterationResult.PARTIAL):
- workload.update_result(self.context)
- self._send(signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE)
-
- if self.current_job.result.status == IterationResult.RUNNING:
- self.current_job.result.status = IterationResult.OK
- finally:
- self._send(signal.AFTER_WORKLOAD_RESULT_UPDATE)
-
- finally:
- self.logger.info('\tTearing down')
- with self._handle_errors('Tearing down workload',
- on_error_status=IterationResult.NONCRITICAL):
- with self._signal_wrap('WORKLOAD_TEARDOWN'):
- workload.teardown(self.context)
- self.result_manager.add_result(self.current_job.result, self.context)
-
- def _flash_device(self, flashing_params):
- with self._signal_wrap('FLASHING'):
- self.device.flash(**flashing_params)
- self.device.connect()
-
- def _reboot_device(self):
- with self._signal_wrap('BOOT'):
- for reboot_attempts in xrange(MAX_REBOOT_ATTEMPTS):
- if reboot_attempts:
- self.logger.info('\tRetrying...')
- with self._handle_errors('Rebooting device'):
- self.device.boot(**self.current_job.spec.boot_parameters)
- break
- else:
- raise DeviceError('Could not reboot device; max reboot attempts exceeded.')
- self.device.connect()
-
- def _send(self, s):
- signal.send(s, self, self.context)
-
- def _take_screenshot(self, filename):
- if self.context.output_directory:
- filepath = os.path.join(self.context.output_directory, filename)
- else:
- filepath = os.path.join(settings.output_directory, filename)
- self.device.capture_screen(filepath)
-
- @contextmanager
- def _handle_errors(self, action, on_error_status=IterationResult.FAILED):
- try:
- if action is not None:
- self.logger.debug(action)
- yield
- except (KeyboardInterrupt, DeviceNotRespondingError):
- raise
- except (WAError, TimeoutError), we:
- self.device.check_responsive()
- if self.current_job:
- self.current_job.result.status = on_error_status
- self.current_job.result.add_event(str(we))
- try:
- self._take_screenshot('error.png')
- except Exception, e: # pylint: disable=W0703
- # We're already in error state, so the fact that taking a
- # screenshot failed is not surprising...
- pass
- if action:
- action = action[0].lower() + action[1:]
- self.logger.error('Error while {}:\n\t{}'.format(action, we))
- except Exception, e: # pylint: disable=W0703
- error_text = '{}("{}")'.format(e.__class__.__name__, e)
- if self.current_job:
- self.current_job.result.status = on_error_status
- self.current_job.result.add_event(error_text)
- self.logger.error('Error while {}'.format(action))
- self.logger.error(error_text)
- if isinstance(e, subprocess.CalledProcessError):
- self.logger.error('Got:')
- self.logger.error(e.output)
- tb = get_traceback()
- self.logger.error(tb)
-
- @contextmanager
- def _signal_wrap(self, signal_name):
- """Wraps the suite in before/after signals, ensuring
- that after signal is always sent."""
- before_signal = getattr(signal, 'BEFORE_' + signal_name)
- success_signal = getattr(signal, 'SUCCESSFUL_' + signal_name)
- after_signal = getattr(signal, 'AFTER_' + signal_name)
- try:
- self._send(before_signal)
- yield
- self._send(success_signal)
- finally:
- self._send(after_signal)
-
-
-class BySpecRunner(Runner):
- """
- This is that "classic" implementation that executes all iterations of a workload
- spec before proceeding onto the next spec.
-
- """
-
- def init_queue(self, specs):
- jobs = [[RunnerJob(s) for _ in xrange(s.number_of_iterations)] for s in specs] # pylint: disable=unused-variable
- self.job_queue = [j for spec_jobs in jobs for j in spec_jobs]
-
-
-class BySectionRunner(Runner):
- """
- Runs the first iteration for all benchmarks first, before proceeding to the next iteration,
- i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2, C1, C2...
-
- If multiple sections where specified in the agenda, this will run all specs for the first section
- followed by all specs for the seciod section, etc.
-
- e.g. given sections X and Y, and global specs A and B, with 2 iterations, this will run
-
- X.A1, X.B1, Y.A1, Y.B1, X.A2, X.B2, Y.A2, Y.B2
-
- """
-
- def init_queue(self, specs):
- jobs = [[RunnerJob(s) for _ in xrange(s.number_of_iterations)] for s in specs]
- self.job_queue = [j for spec_jobs in izip_longest(*jobs) for j in spec_jobs if j]
-
-
-class ByIterationRunner(Runner):
- """
- Runs the first iteration for all benchmarks first, before proceeding to the next iteration,
- i.e. A1, B1, C1, A2, B2, C2... instead of A1, A1, B1, B2, C1, C2...
-
- If multiple sections where specified in the agenda, this will run all sections for the first global
- spec first, followed by all sections for the second spec, etc.
-
- e.g. given sections X and Y, and global specs A and B, with 2 iterations, this will run
-
- X.A1, Y.A1, X.B1, Y.B1, X.A2, Y.A2, X.B2, Y.B2
-
- """
-
- def init_queue(self, specs):
- sections = OrderedDict()
- for s in specs:
- if s.section_id not in sections:
- sections[s.section_id] = []
- sections[s.section_id].append(s)
- specs = [s for section_specs in izip_longest(*sections.values()) for s in section_specs if s]
- jobs = [[RunnerJob(s) for _ in xrange(s.number_of_iterations)] for s in specs]
- self.job_queue = [j for spec_jobs in izip_longest(*jobs) for j in spec_jobs if j]
-
-
-class RandomRunner(Runner):
- """
- This will run specs in a random order.
-
- """
-
- def init_queue(self, specs):
- jobs = [[RunnerJob(s) for _ in xrange(s.number_of_iterations)] for s in specs] # pylint: disable=unused-variable
- all_jobs = [j for spec_jobs in jobs for j in spec_jobs]
- random.shuffle(all_jobs)
- self.job_queue = all_jobs
diff --git a/wlauto/core/exttype.py b/wlauto/core/exttype.py
deleted file mode 100644
index 5d7a7617..00000000
--- a/wlauto/core/exttype.py
+++ /dev/null
@@ -1,32 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# Separate module to avoid circular dependencies
-from wlauto.core.configuration import settings
-from wlauto.core.plugin import Plugin
-from wlauto.utils.misc import load_class
-from wlauto.core import pluginloader
-
-
-def get_plugin_type(ext):
- """Given an instance of ``wlauto.core.Plugin``, return a string representing
- the type of the plugin (e.g. ``'workload'`` for a Workload subclass instance)."""
- if not isinstance(ext, Plugin):
- raise ValueError('{} is not an instance of Plugin'.format(ext))
- for name, cls in pluginloaderkind_map.iteritems():
- if isinstance(ext, cls):
- return name
- raise ValueError('Unknown plugin type: {}'.format(ext.__class__.__name__))
diff --git a/wlauto/core/host.py b/wlauto/core/host.py
deleted file mode 100644
index 33810b93..00000000
--- a/wlauto/core/host.py
+++ /dev/null
@@ -1,33 +0,0 @@
-import os
-
-from wlauto.core.configuration import settings
-
-def init_user_directory(overwrite_existing=False): # pylint: disable=R0914
- """
- Initialise a fresh user directory.
- """
- if os.path.exists(settings.user_directory):
- if not overwrite_existing:
- raise RuntimeError('Environment {} already exists.'.format(settings.user_directory))
- shutil.rmtree(settings.user_directory)
-
- os.makedirs(settings.user_directory)
- os.makedirs(settings.dependencies_directory)
- os.makedirs(settings.plugins_directory)
-
- # TODO: generate default config.yaml here
-
- if os.getenv('USER') == 'root':
- # If running with sudo on POSIX, change the ownership to the real user.
- real_user = os.getenv('SUDO_USER')
- if real_user:
- import pwd # done here as module won't import on win32
- user_entry = pwd.getpwnam(real_user)
- uid, gid = user_entry.pw_uid, user_entry.pw_gid
- os.chown(settings.user_directory, uid, gid)
- # why, oh why isn't there a recusive=True option for os.chown?
- for root, dirs, files in os.walk(settings.user_directory):
- for d in dirs:
- os.chown(os.path.join(root, d), uid, gid)
- for f in files:
- os.chown(os.path.join(root, f), uid, gid)
diff --git a/wlauto/core/instrumentation.py b/wlauto/core/instrumentation.py
deleted file mode 100644
index 6bba95c5..00000000
--- a/wlauto/core/instrumentation.py
+++ /dev/null
@@ -1,399 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-Adding New Instrument
-=====================
-
-Any new instrument should be a subclass of Instrument and it must have a name.
-When a new instrument is added to Workload Automation, the methods of the new
-instrument will be found automatically and hooked up to the supported signals.
-Once a signal is broadcasted, the corresponding registered method is invoked.
-
-Each method in Instrument must take two arguments, which are self and context.
-Supported signals can be found in [... link to signals ...] To make
-implementations easier and common, the basic steps to add new instrument is
-similar to the steps to add new workload.
-
-Hence, the following methods are sufficient to implement to add new instrument:
-
- - setup: This method is invoked after the workload is setup. All the
- necessary setups should go inside this method. Setup, includes operations
- like, pushing the files to the target device, install them, clear logs,
- etc.
- - start: It is invoked just before the workload start execution. Here is
- where instrument measures start being registered/taken.
- - stop: It is invoked just after the workload execution stops. The measures
- should stop being taken/registered.
- - update_result: It is invoked after the workload updated its result.
- update_result is where the taken measures are added to the result so it
- can be processed by Workload Automation.
- - teardown is invoked after the workload is teared down. It is a good place
- to clean any logs generated by the instrument.
-
-For example, to add an instrument which will trace device errors, we subclass
-Instrument and overwrite the variable name.::
-
- #BINARY_FILE = os.path.join(os.path.dirname(__file__), 'trace')
- class TraceErrorsInstrument(Instrument):
-
- name = 'trace-errors'
-
- def __init__(self, device):
- super(TraceErrorsInstrument, self).__init__(device)
- self.trace_on_device = os.path.join(self.device.working_directory, 'trace')
-
-We then declare and implement the aforementioned methods. For the setup method,
-we want to push the file to the target device and then change the file mode to
-755 ::
-
- def setup(self, context):
- self.device.push(BINARY_FILE, self.device.working_directory)
- self.device.execute('chmod 755 {}'.format(self.trace_on_device))
-
-Then we implemented the start method, which will simply run the file to start
-tracing. ::
-
- def start(self, context):
- self.device.execute('{} start'.format(self.trace_on_device))
-
-Lastly, we need to stop tracing once the workload stops and this happens in the
-stop method::
-
- def stop(self, context):
- self.device.execute('{} stop'.format(self.trace_on_device))
-
-The generated result can be updated inside update_result, or if it is trace, we
-just pull the file to the host device. context has a result variable which
-has add_metric method. It can be used to add the instrumentation results metrics
-to the final result for the workload. The method can be passed 4 params, which
-are metric key, value, unit and lower_is_better, which is a boolean. ::
-
- def update_result(self, context):
- # pull the trace file to the device
- result = os.path.join(self.device.working_directory, 'trace.txt')
- self.device.pull(result, context.working_directory)
-
- # parse the file if needs to be parsed, or add result to
- # context.result
-
-At the end, we might want to delete any files generated by the instrumentation
-and the code to clear these file goes in teardown method. ::
-
- def teardown(self, context):
- self.device.remove(os.path.join(self.device.working_directory, 'trace.txt'))
-
-"""
-
-import logging
-import inspect
-from collections import OrderedDict
-
-import wlauto.core.signal as signal
-from wlauto.core.plugin import Plugin
-from wlauto.exceptions import WAError, DeviceNotRespondingError, TimeoutError
-from wlauto.utils.misc import get_traceback, isiterable
-from wlauto.utils.types import identifier
-
-
-logger = logging.getLogger('instrumentation')
-
-
-# Maps method names onto signals the should be registered to.
-# Note: the begin/end signals are paired -- if a begin_ signal is sent,
-# then the corresponding end_ signal is guaranteed to also be sent.
-# Note: using OrderedDict to preserve logical ordering for the table generated
-# in the documentation
-SIGNAL_MAP = OrderedDict([
- # Below are "aliases" for some of the more common signals to allow
- # instrumentation to have similar structure to workloads
- ('initialize', signal.RUN_INIT),
- ('setup', signal.SUCCESSFUL_WORKLOAD_SETUP),
- ('start', signal.BEFORE_WORKLOAD_EXECUTION),
- ('stop', signal.AFTER_WORKLOAD_EXECUTION),
- ('process_workload_result', signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE),
- ('update_result', signal.AFTER_WORKLOAD_RESULT_UPDATE),
- ('teardown', signal.AFTER_WORKLOAD_TEARDOWN),
- ('finalize', signal.RUN_FIN),
-
- ('on_run_start', signal.RUN_START),
- ('on_run_end', signal.RUN_END),
- ('on_workload_spec_start', signal.WORKLOAD_SPEC_START),
- ('on_workload_spec_end', signal.WORKLOAD_SPEC_END),
- ('on_iteration_start', signal.ITERATION_START),
- ('on_iteration_end', signal.ITERATION_END),
-
- ('before_initial_boot', signal.BEFORE_INITIAL_BOOT),
- ('on_successful_initial_boot', signal.SUCCESSFUL_INITIAL_BOOT),
- ('after_initial_boot', signal.AFTER_INITIAL_BOOT),
- ('before_first_iteration_boot', signal.BEFORE_FIRST_ITERATION_BOOT),
- ('on_successful_first_iteration_boot', signal.SUCCESSFUL_FIRST_ITERATION_BOOT),
- ('after_first_iteration_boot', signal.AFTER_FIRST_ITERATION_BOOT),
- ('before_boot', signal.BEFORE_BOOT),
- ('on_successful_boot', signal.SUCCESSFUL_BOOT),
- ('after_boot', signal.AFTER_BOOT),
-
- ('on_spec_init', signal.SPEC_INIT),
- ('on_run_init', signal.RUN_INIT),
- ('on_iteration_init', signal.ITERATION_INIT),
-
- ('before_workload_setup', signal.BEFORE_WORKLOAD_SETUP),
- ('on_successful_workload_setup', signal.SUCCESSFUL_WORKLOAD_SETUP),
- ('after_workload_setup', signal.AFTER_WORKLOAD_SETUP),
- ('before_workload_execution', signal.BEFORE_WORKLOAD_EXECUTION),
- ('on_successful_workload_execution', signal.SUCCESSFUL_WORKLOAD_EXECUTION),
- ('after_workload_execution', signal.AFTER_WORKLOAD_EXECUTION),
- ('before_workload_result_update', signal.BEFORE_WORKLOAD_RESULT_UPDATE),
- ('on_successful_workload_result_update', signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE),
- ('after_workload_result_update', signal.AFTER_WORKLOAD_RESULT_UPDATE),
- ('before_workload_teardown', signal.BEFORE_WORKLOAD_TEARDOWN),
- ('on_successful_workload_teardown', signal.SUCCESSFUL_WORKLOAD_TEARDOWN),
- ('after_workload_teardown', signal.AFTER_WORKLOAD_TEARDOWN),
-
- ('before_overall_results_processing', signal.BEFORE_OVERALL_RESULTS_PROCESSING),
- ('on_successful_overall_results_processing', signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING),
- ('after_overall_results_processing', signal.AFTER_OVERALL_RESULTS_PROCESSING),
-
- ('on_error', signal.ERROR_LOGGED),
- ('on_warning', signal.WARNING_LOGGED),
-])
-
-PRIORITY_MAP = OrderedDict([
- ('very_fast_', 20),
- ('fast_', 10),
- ('normal_', 0),
- ('slow_', -10),
- ('very_slow_', -20),
-])
-
-installed = []
-
-
-def is_installed(instrument):
- if isinstance(instrument, Instrument):
- if instrument in installed:
- return True
- if instrument.name in [i.name for i in installed]:
- return True
- elif isinstance(instrument, type):
- if instrument in [i.__class__ for i in installed]:
- return True
- else: # assume string
- if identifier(instrument) in [identifier(i.name) for i in installed]:
- return True
- return False
-
-
-def is_enabled(instrument):
- if isinstance(instrument, Instrument) or isinstance(instrument, type):
- name = instrument.name
- else: # assume string
- name = instrument
- try:
- installed_instrument = get_instrument(name)
- return installed_instrument.is_enabled
- except ValueError:
- return False
-
-
-failures_detected = False
-
-
-def reset_failures():
- global failures_detected # pylint: disable=W0603
- failures_detected = False
-
-
-def check_failures():
- result = failures_detected
- reset_failures()
- return result
-
-
-class ManagedCallback(object):
- """
- This wraps instruments' callbacks to ensure that errors do interfer
- with run execution.
-
- """
-
- def __init__(self, instrument, callback):
- self.instrument = instrument
- self.callback = callback
-
- def __call__(self, context):
- if self.instrument.is_enabled:
- try:
- self.callback(context)
- except (KeyboardInterrupt, DeviceNotRespondingError, TimeoutError): # pylint: disable=W0703
- raise
- except Exception as e: # pylint: disable=W0703
- logger.error('Error in insturment {}'.format(self.instrument.name))
- global failures_detected # pylint: disable=W0603
- failures_detected = True
- if isinstance(e, WAError):
- logger.error(e)
- else:
- tb = get_traceback()
- logger.error(tb)
- logger.error('{}({})'.format(e.__class__.__name__, e))
- if not context.current_iteration:
- # Error occureed outside of an iteration (most likely
- # during intial setup or teardown). Since this would affect
- # the rest of the run, mark the instument as broken so that
- # it doesn't get re-enabled for subsequent iterations.
- self.instrument.is_broken = True
- disable(self.instrument)
-
-
-# Need this to keep track of callbacks, because the dispatcher only keeps
-# weak references, so if the callbacks aren't referenced elsewhere, they will
-# be deallocated before they've had a chance to be invoked.
-_callbacks = []
-
-
-def install(instrument):
- """
- This will look for methods (or any callable members) with specific names
- in the instrument and hook them up to the corresponding signals.
-
- :param instrument: Instrument instance to install.
-
- """
- logger.debug('Installing instrument %s.', instrument)
- if is_installed(instrument):
- raise ValueError('Instrument {} is already installed.'.format(instrument.name))
- for attr_name in dir(instrument):
- priority = 0
- stripped_attr_name = attr_name
- for key, value in PRIORITY_MAP.iteritems():
- if attr_name.startswith(key):
- stripped_attr_name = attr_name[len(key):]
- priority = value
- break
- if stripped_attr_name in SIGNAL_MAP:
- attr = getattr(instrument, attr_name)
- if not callable(attr):
- raise ValueError('Attribute {} not callable in {}.'.format(attr_name, instrument))
- argspec = inspect.getargspec(attr)
- arg_num = len(argspec.args)
- # Instrument callbacks will be passed exactly two arguments: self
- # (the instrument instance to which the callback is bound) and
- # context. However, we also allow callbacks to capture the context
- # in variable arguments (declared as "*args" in the definition).
- if arg_num > 2 or (arg_num < 2 and argspec.varargs is None):
- message = '{} must take exactly 2 positional arguments; {} given.'
- raise ValueError(message.format(attr_name, arg_num))
-
- logger.debug('\tConnecting %s to %s', attr.__name__, SIGNAL_MAP[stripped_attr_name])
- mc = ManagedCallback(instrument, attr)
- _callbacks.append(mc)
- signal.connect(mc, SIGNAL_MAP[stripped_attr_name], priority=priority)
- installed.append(instrument)
-
-
-def uninstall(instrument):
- instrument = get_instrument(instrument)
- installed.remove(instrument)
-
-
-def validate():
- for instrument in installed:
- instrument.validate()
-
-
-def get_instrument(inst):
- if isinstance(inst, Instrument):
- return inst
- for installed_inst in installed:
- if identifier(installed_inst.name) == identifier(inst):
- return installed_inst
- raise ValueError('Instrument {} is not installed'.format(inst))
-
-
-def disable_all():
- for instrument in installed:
- _disable_instrument(instrument)
-
-
-def enable_all():
- for instrument in installed:
- _enable_instrument(instrument)
-
-
-def enable(to_enable):
- if isiterable(to_enable):
- for inst in to_enable:
- _enable_instrument(inst)
- else:
- _enable_instrument(to_enable)
-
-
-def disable(to_disable):
- if isiterable(to_disable):
- for inst in to_disable:
- _disable_instrument(inst)
- else:
- _disable_instrument(to_disable)
-
-
-def _enable_instrument(inst):
- inst = get_instrument(inst)
- if not inst.is_broken:
- logger.debug('Enabling instrument {}'.format(inst.name))
- inst.is_enabled = True
- else:
- logger.debug('Not enabling broken instrument {}'.format(inst.name))
-
-
-def _disable_instrument(inst):
- inst = get_instrument(inst)
- if inst.is_enabled:
- logger.debug('Disabling instrument {}'.format(inst.name))
- inst.is_enabled = False
-
-
-def get_enabled():
- return [i for i in installed if i.is_enabled]
-
-
-def get_disabled():
- return [i for i in installed if not i.is_enabled]
-
-
-class Instrument(Plugin):
- """
- Base class for instrumentation implementations.
- """
- kind = "instrument"
-
- def __init__(self, target, **kwargs):
- super(Instrument, self).__init__(**kwargs)
- self.target = target
- self.is_enabled = True
- self.is_broken = False
-
- def initialize(self, context):
- pass
-
- def finalize(self, context):
- pass
-
- def __str__(self):
- return self.name
-
- def __repr__(self):
- return 'Instrument({})'.format(self.name)
diff --git a/wlauto/core/output.py b/wlauto/core/output.py
deleted file mode 100644
index 77d5853e..00000000
--- a/wlauto/core/output.py
+++ /dev/null
@@ -1,188 +0,0 @@
-import logging
-import os
-import shutil
-import string
-import sys
-import uuid
-from copy import copy
-
-from wlauto.core.configuration.configuration import JobSpec
-from wlauto.core.configuration.manager import ConfigManager
-from wlauto.core.device_manager import TargetInfo
-from wlauto.utils.misc import touch
-from wlauto.utils.serializer import write_pod, read_pod
-
-
-logger = logging.getLogger('output')
-
-
-class RunInfo(object):
- """
- Information about the current run, such as its unique ID, run
- time, etc.
-
- """
- @staticmethod
- def from_pod(pod):
- uid = pod.pop('uuid')
- if uid is not None:
- uid = uuid.UUID(uid)
- instance = RunInfo(**pod)
- instance.uuid = uid
- return instance
-
- def __init__(self, run_name=None, project=None, project_stage=None,
- start_time=None, end_time=None, duration=None):
- self.uuid = uuid.uuid4()
- self.run_name = None
- self.project = None
- self.project_stage = None
- self.start_time = None
- self.end_time = None
- self.duration = None
-
- def to_pod(self):
- d = copy(self.__dict__)
- d['uuid'] = str(self.uuid)
- return d
-
-
-class RunState(object):
- """
- Represents the state of a WA run.
-
- """
- @staticmethod
- def from_pod(pod):
- return RunState()
-
- def __init__(self):
- pass
-
- def to_pod(self):
- return {}
-
-
-class RunOutput(object):
-
- @property
- def logfile(self):
- return os.path.join(self.basepath, 'run.log')
-
- @property
- def metadir(self):
- return os.path.join(self.basepath, '__meta')
-
- @property
- def infofile(self):
- return os.path.join(self.metadir, 'run_info.json')
-
- @property
- def statefile(self):
- return os.path.join(self.basepath, '.run_state.json')
-
- @property
- def configfile(self):
- return os.path.join(self.metadir, 'config.json')
-
- @property
- def targetfile(self):
- return os.path.join(self.metadir, 'target_info.json')
-
- @property
- def jobsfile(self):
- return os.path.join(self.metadir, 'jobs.json')
-
- @property
- def raw_config_dir(self):
- return os.path.join(self.metadir, 'raw_config')
-
- def __init__(self, path):
- self.basepath = path
- self.info = None
- self.state = None
- if (not os.path.isfile(self.statefile) or
- not os.path.isfile(self.infofile)):
- msg = '"{}" does not exist or is not a valid WA output directory.'
- raise ValueError(msg.format(self.basepath))
- self.reload()
-
- def reload(self):
- self.info = RunInfo.from_pod(read_pod(self.infofile))
- self.state = RunState.from_pod(read_pod(self.statefile))
-
- def write_info(self):
- write_pod(self.info.to_pod(), self.infofile)
-
- def write_state(self):
- write_pod(self.state.to_pod(), self.statefile)
-
- def write_config(self, config):
- write_pod(config.to_pod(), self.configfile)
-
- def read_config(self):
- if not os.path.isfile(self.configfile):
- return None
- return ConfigManager.from_pod(read_pod(self.configfile))
-
- def write_target_info(self, ti):
- write_pod(ti.to_pod(), self.targetfile)
-
- def read_config(self):
- if not os.path.isfile(self.targetfile):
- return None
- return TargetInfo.from_pod(read_pod(self.targetfile))
-
- def write_job_specs(self, job_specs):
- job_specs[0].to_pod()
- js_pod = {'jobs': [js.to_pod() for js in job_specs]}
- write_pod(js_pod, self.jobsfile)
-
- def read_job_specs(self):
- if not os.path.isfile(self.jobsfile):
- return None
- pod = read_pod(self.jobsfile)
- return [JobSpec.from_pod(jp) for jp in pod['jobs']]
-
-
-def init_wa_output(path, wa_state, force=False):
- if os.path.exists(path):
- if force:
- logger.info('Removing existing output directory.')
- shutil.rmtree(os.path.abspath(path))
- else:
- raise RuntimeError('path exists: {}'.format(path))
-
- logger.info('Creating output directory.')
- os.makedirs(path)
- meta_dir = os.path.join(path, '__meta')
- os.makedirs(meta_dir)
- _save_raw_config(meta_dir, wa_state)
- touch(os.path.join(path, 'run.log'))
-
- info = RunInfo(
- run_name=wa_state.run_config.run_name,
- project=wa_state.run_config.project,
- project_stage=wa_state.run_config.project_stage,
- )
- write_pod(info.to_pod(), os.path.join(meta_dir, 'run_info.json'))
-
- with open(os.path.join(path, '.run_state.json'), 'w') as wfh:
- wfh.write('{}')
-
- return RunOutput(path)
-
-
-def _save_raw_config(meta_dir, state):
- raw_config_dir = os.path.join(meta_dir, 'raw_config')
- os.makedirs(raw_config_dir)
-
- for i, source in enumerate(state.loaded_config_sources):
- if not os.path.isfile(source):
- continue
- basename = os.path.basename(source)
- dest_path = os.path.join(raw_config_dir, 'cfg{}-{}'.format(i, basename))
- shutil.copy(source, dest_path)
-
-
-
diff --git a/wlauto/core/plugin.py b/wlauto/core/plugin.py
deleted file mode 100644
index ccf2dece..00000000
--- a/wlauto/core/plugin.py
+++ /dev/null
@@ -1,793 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E1101
-import os
-import sys
-import inspect
-import imp
-import string
-import logging
-from collections import OrderedDict, defaultdict
-from itertools import chain
-from copy import copy
-
-from wlauto.exceptions import NotFoundError, LoaderError, ValidationError, ConfigError, HostError
-from wlauto.utils.misc import (ensure_directory_exists as _d,
- walk_modules, load_class, merge_dicts_simple, get_article)
-from wlauto.core.configuration import settings
-from wlauto.utils.types import identifier, boolean
-from wlauto.core.configuration.configuration import ConfigurationPoint as Parameter
-
-
-MODNAME_TRANS = string.maketrans(':/\\.', '____')
-
-
-class AttributeCollection(object):
- """
- Accumulator for plugin attribute objects (such as Parameters or Artifacts). This will
- replace any class member list accumulating such attributes through the magic of
- metaprogramming\ [*]_.
-
- .. [*] which is totally safe and not going backfire in any way...
-
- """
-
- @property
- def values(self):
- return self._attrs.values()
-
- def __init__(self, attrcls):
- self._attrcls = attrcls
- self._attrs = OrderedDict()
-
- def add(self, p):
- p = self._to_attrcls(p)
- if p.name in self._attrs:
- if p.override:
- newp = copy(self._attrs[p.name])
- for a, v in p.__dict__.iteritems():
- if v is not None:
- setattr(newp, a, v)
- if not hasattr(newp, "_overridden"):
- newp._overridden = p._owner
- self._attrs[p.name] = newp
- else:
- # Duplicate attribute condition is check elsewhere.
- pass
- else:
- self._attrs[p.name] = p
-
- append = add
-
- def __str__(self):
- return 'AC({})'.format(map(str, self._attrs.values()))
-
- __repr__ = __str__
-
- def _to_attrcls(self, p):
- old_owner = getattr(p, "_owner", None)
- if isinstance(p, basestring):
- p = self._attrcls(p)
- elif isinstance(p, tuple) or isinstance(p, list):
- p = self._attrcls(*p)
- elif isinstance(p, dict):
- p = self._attrcls(**p)
- elif not isinstance(p, self._attrcls):
- raise ValueError('Invalid parameter value: {}'.format(p))
- if (p.name in self._attrs and not p.override and
- p.name != 'modules'): # TODO: HACK due to "diamond dependecy" in workloads...
- raise ValueError('Attribute {} has already been defined.'.format(p.name))
- p._owner = old_owner
- return p
-
- def __iadd__(self, other):
- for p in other:
- self.add(p)
- return self
-
- def __iter__(self):
- return iter(self.values)
-
- def __contains__(self, p):
- return p in self._attrs
-
- def __getitem__(self, i):
- return self._attrs[i]
-
- def __len__(self):
- return len(self._attrs)
-
-
-class AliasCollection(AttributeCollection):
-
- def __init__(self):
- super(AliasCollection, self).__init__(Alias)
-
- def _to_attrcls(self, p):
- if isinstance(p, tuple) or isinstance(p, list):
- # must be in the form (name, {param: value, ...})
- p = self._attrcls(p[1], **p[1])
- elif not isinstance(p, self._attrcls):
- raise ValueError('Invalid parameter value: {}'.format(p))
- if p.name in self._attrs:
- raise ValueError('Attribute {} has already been defined.'.format(p.name))
- return p
-
-
-class ListCollection(list):
-
- def __init__(self, attrcls): # pylint: disable=unused-argument
- super(ListCollection, self).__init__()
-
-
-class Artifact(object):
- """
- This is an artifact generated during execution/post-processing of a workload.
- Unlike metrics, this represents an actual artifact, such as a file, generated.
- This may be "result", such as trace, or it could be "meta data" such as logs.
- These are distinguished using the ``kind`` attribute, which also helps WA decide
- how it should be handled. Currently supported kinds are:
-
- :log: A log file. Not part of "results" as such but contains information about the
- run/workload execution that be useful for diagnostics/meta analysis.
- :meta: A file containing metadata. This is not part of "results", but contains
- information that may be necessary to reproduce the results (contrast with
- ``log`` artifacts which are *not* necessary).
- :data: This file contains new data, not available otherwise and should be considered
- part of the "results" generated by WA. Most traces would fall into this category.
- :export: Exported version of results or some other artifact. This signifies that
- this artifact does not contain any new data that is not available
- elsewhere and that it may be safely discarded without losing information.
- :raw: Signifies that this is a raw dump/log that is normally processed to extract
- useful information and is then discarded. In a sense, it is the opposite of
- ``export``, but in general may also be discarded.
-
- .. note:: whether a file is marked as ``log``/``data`` or ``raw`` depends on
- how important it is to preserve this file, e.g. when archiving, vs
- how much space it takes up. Unlike ``export`` artifacts which are
- (almost) always ignored by other exporters as that would never result
- in data loss, ``raw`` files *may* be processed by exporters if they
- decided that the risk of losing potentially (though unlikely) useful
- data is greater than the time/space cost of handling the artifact (e.g.
- a database uploader may choose to ignore ``raw`` artifacts, where as a
- network filer archiver may choose to archive them).
-
- .. note: The kind parameter is intended to represent the logical function of a particular
- artifact, not its intended means of processing -- this is left entirely up to the
- result processors.
-
- """
-
- RUN = 'run'
- ITERATION = 'iteration'
-
- valid_kinds = ['log', 'meta', 'data', 'export', 'raw']
-
- def __init__(self, name, path, kind, level=RUN, mandatory=False, description=None):
- """"
- :param name: Name that uniquely identifies this artifact.
- :param path: The *relative* path of the artifact. Depending on the ``level``
- must be either relative to the run or iteration output directory.
- Note: this path *must* be delimited using ``/`` irrespective of the
- operating system.
- :param kind: The type of the artifact this is (e.g. log file, result, etc.) this
- will be used a hit to result processors. This must be one of ``'log'``,
- ``'meta'``, ``'data'``, ``'export'``, ``'raw'``.
- :param level: The level at which the artifact will be generated. Must be either
- ``'iteration'`` or ``'run'``.
- :param mandatory: Boolean value indicating whether this artifact must be present
- at the end of result processing for its level.
- :param description: A free-form description of what this artifact is.
-
- """
- if kind not in self.valid_kinds:
- raise ValueError('Invalid Artifact kind: {}; must be in {}'.format(kind, self.valid_kinds))
- self.name = name
- self.path = path.replace('/', os.sep) if path is not None else path
- self.kind = kind
- self.level = level
- self.mandatory = mandatory
- self.description = description
-
- def exists(self, context):
- """Returns ``True`` if artifact exists within the specified context, and
- ``False`` otherwise."""
- fullpath = os.path.join(context.output_directory, self.path)
- return os.path.exists(fullpath)
-
- def to_dict(self):
- return copy(self.__dict__)
-
-
-class Alias(object):
- """
- This represents a configuration alias for an plugin, mapping an alternative name to
- a set of parameter values, effectively providing an alternative set of default values.
-
- """
-
- def __init__(self, name, **kwargs):
- self.name = name
- self.params = kwargs
- self.plugin_name = None # gets set by the MetaClass
-
- def validate(self, ext):
- ext_params = set(p.name for p in ext.parameters)
- for param in self.params:
- if param not in ext_params:
- # Raising config error because aliases might have come through
- # the config.
- msg = 'Parameter {} (defined in alias {}) is invalid for {}'
- raise ConfigError(msg.format(param, self.name, ext.name))
-
-
-class PluginMeta(type):
- """
- This basically adds some magic to plugins to make implementing new plugins, such as
- workloads less complicated.
-
- It ensures that certain class attributes (specified by the ``to_propagate``
- attribute of the metaclass) get propagated down the inheritance hierarchy. The assumption
- is that the values of the attributes specified in the class are iterable; if that is not met,
- Bad Things (tm) will happen.
-
- This also provides virtual method implementation, similar to those in C-derived OO languages,
- and alias specifications.
-
- """
-
- to_propagate = [
- ('parameters', Parameter, AttributeCollection),
- ('artifacts', Artifact, AttributeCollection),
- ('core_modules', str, ListCollection),
- ]
-
- virtual_methods = ['validate', 'initialize', 'finalize']
- global_virtuals = ['initialize', 'finalize']
-
- def __new__(mcs, clsname, bases, attrs):
- mcs._propagate_attributes(bases, attrs, clsname)
- cls = type.__new__(mcs, clsname, bases, attrs)
- mcs._setup_aliases(cls)
- mcs._implement_virtual(cls, bases)
- return cls
-
- @classmethod
- def _propagate_attributes(mcs, bases, attrs, clsname):
- """
- For attributes specified by to_propagate, their values will be a union of
- that specified for cls and its bases (cls values overriding those of bases
- in case of conflicts).
-
- """
- for prop_attr, attr_cls, attr_collector_cls in mcs.to_propagate:
- should_propagate = False
- propagated = attr_collector_cls(attr_cls)
- for base in bases:
- if hasattr(base, prop_attr):
- propagated += getattr(base, prop_attr) or []
- should_propagate = True
- if prop_attr in attrs:
- pattrs = attrs[prop_attr] or []
- for pa in pattrs:
- if not isinstance(pa, basestring):
- pa._owner = clsname
- propagated += pattrs
- should_propagate = True
- if should_propagate:
- for p in propagated:
- override = bool(getattr(p, "override", None))
- overridden = bool(getattr(p, "_overridden", None))
- if override != overridden:
- msg = "Overriding non existing parameter '{}' inside '{}'"
- raise ValueError(msg.format(p.name, p._owner))
- attrs[prop_attr] = propagated
-
- @classmethod
- def _setup_aliases(mcs, cls):
- if hasattr(cls, 'aliases'):
- aliases, cls.aliases = cls.aliases, AliasCollection()
- for alias in aliases:
- if isinstance(alias, basestring):
- alias = Alias(alias)
- alias.validate(cls)
- alias.plugin_name = cls.name
- cls.aliases.add(alias)
-
- @classmethod
- def _implement_virtual(mcs, cls, bases):
- """
- This implements automatic method propagation to the bases, so
- that you don't have to do something like
-
- super(cls, self).vmname()
-
- This also ensures that the methods that have beend identified as
- "globally virtual" are executed exactly once per WA execution, even if
- invoked through instances of different subclasses
-
- """
- methods = {}
- called_globals = set()
- for vmname in mcs.virtual_methods:
- clsmethod = getattr(cls, vmname, None)
- if clsmethod:
- basemethods = [getattr(b, vmname) for b in bases if hasattr(b, vmname)]
- methods[vmname] = [bm for bm in basemethods if bm != clsmethod]
- methods[vmname].append(clsmethod)
-
- def generate_method_wrapper(vname): # pylint: disable=unused-argument
- # this creates a closure with the method name so that it
- # does not need to be passed to the wrapper as an argument,
- # leaving the wrapper to accept exactly the same set of
- # arguments as the method it is wrapping.
- name__ = vmname # pylint: disable=cell-var-from-loop
-
- def wrapper(self, *args, **kwargs):
- for dm in methods[name__]:
- if name__ in mcs.global_virtuals:
- if dm not in called_globals:
- dm(self, *args, **kwargs)
- called_globals.add(dm)
- else:
- dm(self, *args, **kwargs)
- return wrapper
-
- setattr(cls, vmname, generate_method_wrapper(vmname))
-
-
-class Plugin(object):
- """
- Base class for all WA plugins. An plugin is basically a plug-in.
- It extends the functionality of WA in some way. Plugins are discovered
- and loaded dynamically by the plugin loader upon invocation of WA scripts.
- Adding an plugin is a matter of placing a class that implements an appropriate
- interface somewhere it would be discovered by the loader. That "somewhere" is
- typically one of the plugin subdirectories under ``~/.workload_automation/``.
-
- """
- __metaclass__ = PluginMeta
-
- kind = None
- name = None
- parameters = [
- Parameter('modules', kind=list,
- description="""
- Lists the modules to be loaded by this plugin. A module is a plug-in that
- further extends functionality of an plugin.
- """),
- ]
- artifacts = []
- aliases = []
- core_modules = []
-
- @classmethod
- def get_default_config(cls):
- return {p.name: p.default for p in cls.parameters}
-
- @property
- def dependencies_directory(self):
- return _d(os.path.join(settings.dependencies_directory, self.name))
-
- @property
- def _classname(self):
- return self.__class__.__name__
-
- def __init__(self, **kwargs):
- self.logger = logging.getLogger(self._classname)
- self._modules = []
- self.capabilities = getattr(self.__class__, 'capabilities', [])
- for param in self.parameters:
- param.set_value(self, kwargs.get(param.name))
- for key in kwargs:
- if key not in self.parameters:
- message = 'Unexpected parameter "{}" for {}'
- raise ConfigError(message.format(key, self.name))
-
- def get_config(self):
- """
- Returns current configuration (i.e. parameter values) of this plugin.
-
- """
- config = {}
- for param in self.parameters:
- config[param.name] = getattr(self, param.name, None)
- return config
-
- def validate(self):
- """
- Perform basic validation to ensure that this plugin is capable of running.
- This is intended as an early check to ensure the plugin has not been mis-configured,
- rather than a comprehensive check (that may, e.g., require access to the execution
- context).
-
- This method may also be used to enforce (i.e. set as well as check) inter-parameter
- constraints for the plugin (e.g. if valid values for parameter A depend on the value
- of parameter B -- something that is not possible to enfroce using ``Parameter``\ 's
- ``constraint`` attribute.
-
- """
- if self.name is None:
- raise ValidationError('Name not set for {}'.format(self._classname))
- for param in self.parameters:
- param.validate(self)
-
- def initialize(self, context):
- pass
-
- def finalize(self, context):
- pass
-
- def check_artifacts(self, context, level):
- """
- Make sure that all mandatory artifacts have been generated.
-
- """
- for artifact in self.artifacts:
- if artifact.level != level or not artifact.mandatory:
- continue
- fullpath = os.path.join(context.output_directory, artifact.path)
- if not os.path.exists(fullpath):
- message = 'Mandatory "{}" has not been generated for {}.'
- raise ValidationError(message.format(artifact.path, self.name))
-
- def __getattr__(self, name):
- if name == '_modules':
- raise ValueError('_modules accessed too early!')
- for module in self._modules:
- if hasattr(module, name):
- return getattr(module, name)
- raise AttributeError(name)
-
- def load_modules(self, loader):
- """
- Load the modules specified by the "modules" Parameter using the provided loader. A loader
- can be any object that has an atribute called "get_module" that implements the following
- signature::
-
- get_module(name, owner, **kwargs)
-
- and returns an instance of :class:`wlauto.core.plugin.Module`. If the module with the
- specified name is not found, the loader must raise an appropriate exception.
-
- """
- modules = list(reversed(self.core_modules)) + list(reversed(self.modules or []))
- if not modules:
- return
- for module_spec in modules:
- if not module_spec:
- continue
- module = self._load_module(loader, module_spec)
- self._install_module(module)
-
- def has(self, capability):
- """Check if this plugin has the specified capability. The alternative method ``can`` is
- identical to this. Which to use is up to the caller depending on what makes semantic sense
- in the context of the capability, e.g. ``can('hard_reset')`` vs ``has('active_cooling')``."""
- return capability in self.capabilities
-
- can = has
-
- def _load_module(self, loader, module_spec):
- if isinstance(module_spec, basestring):
- name = module_spec
- params = {}
- elif isinstance(module_spec, dict):
- if len(module_spec) != 1:
- message = 'Invalid module spec: {}; dict must have exctly one key -- the module name.'
- raise ValueError(message.format(module_spec))
- name, params = module_spec.items()[0]
- else:
- message = 'Invalid module spec: {}; must be a string or a one-key dict.'
- raise ValueError(message.format(module_spec))
-
- if not isinstance(params, dict):
- message = 'Invalid module spec: {}; dict value must also be a dict.'
- raise ValueError(message.format(module_spec))
-
- module = loader.get_module(name, owner=self, **params)
- module.initialize(None)
- return module
-
- def _install_module(self, module):
- for capability in module.capabilities:
- if capability not in self.capabilities:
- self.capabilities.append(capability)
- self._modules.append(module)
-
-
-class PluginLoaderItem(object):
-
- def __init__(self, ext_tuple):
- self.name = ext_tuple.name
- self.default_package = ext_tuple.default_package
- self.default_path = ext_tuple.default_path
- self.cls = load_class(ext_tuple.cls)
-
-
-class PluginLoader(object):
- """
- Discovers, enumerates and loads available devices, configs, etc.
- The loader will attempt to discover things on construction by looking
- in predetermined set of locations defined by default_paths. Optionally,
- additional locations may specified through paths parameter that must
- be a list of additional Python module paths (i.e. dot-delimited).
-
- """
-
- def __init__(self, packages=None, paths=None, ignore_paths=None, keep_going=False):
- """
- params::
-
- :packages: List of packages to load plugins from.
- :paths: List of paths to be searched for Python modules containing
- WA plugins.
- :ignore_paths: List of paths to ignore when search for WA plugins (these would
- typically be subdirectories of one or more locations listed in
- ``paths`` parameter.
- :keep_going: Specifies whether to keep going if an error occurs while loading
- plugins.
- """
- self.logger = logging.getLogger('pluginloader')
- self.keep_going = keep_going
- self.packages = packages or []
- self.paths = paths or []
- self.ignore_paths = ignore_paths or []
- self.plugins = {}
- self.kind_map = defaultdict(dict)
- self.aliases = {}
- self.global_param_aliases = {}
- self._discover_from_packages(self.packages)
- self._discover_from_paths(self.paths, self.ignore_paths)
-
- def update(self, packages=None, paths=None, ignore_paths=None):
- """ Load plugins from the specified paths/packages
- without clearing or reloading existing plugin. """
- msg = 'Updating from: packages={} paths={}'
- self.logger.debug(msg.format(packages, paths))
- if packages:
- self.packages.extend(packages)
- self._discover_from_packages(packages)
- if paths:
- self.paths.extend(paths)
- self.ignore_paths.extend(ignore_paths or [])
- self._discover_from_paths(paths, ignore_paths or [])
-
- def clear(self):
- """ Clear all discovered items. """
- self.plugins = []
- self.kind_map.clear()
-
- def reload(self):
- """ Clear all discovered items and re-run the discovery. """
- self.logger.debug('Reloading')
- self.clear()
- self._discover_from_packages(self.packages)
- self._discover_from_paths(self.paths, self.ignore_paths)
-
- def get_plugin_class(self, name, kind=None):
- """
- Return the class for the specified plugin if found or raises ``ValueError``.
-
- """
- name, _ = self.resolve_alias(name)
- if kind is None:
- try:
- return self.plugins[name]
- except KeyError:
- raise NotFoundError('plugins {} not found.'.format(name))
- if kind not in self.kind_map:
- raise ValueError('Unknown plugin type: {}'.format(kind))
- store = self.kind_map[kind]
- if name not in store:
- msg = 'plugins {} is not {} {}.'
- raise NotFoundError(msg.format(name, get_article(kind), kind))
- return store[name]
-
- def get_plugin(self, name=None, kind=None, *args, **kwargs):
- """
- Return plugin of the specified kind with the specified name. Any
- additional parameters will be passed to the plugin's __init__.
-
- """
- name, base_kwargs = self.resolve_alias(name)
- kwargs = OrderedDict(chain(base_kwargs.iteritems(), kwargs.iteritems()))
- cls = self.get_plugin_class(name, kind)
- plugin = cls(*args, **kwargs)
- return plugin
-
- def get_default_config(self, name):
- """
- Returns the default configuration for the specified plugin name. The
- name may be an alias, in which case, the returned config will be
- augmented with appropriate alias overrides.
-
- """
- real_name, alias_config = self.resolve_alias(name)
- base_default_config = self.get_plugin_class(real_name).get_default_config()
- return merge_dicts_simple(base_default_config, alias_config)
-
- def list_plugins(self, kind=None):
- """
- List discovered plugin classes. Optionally, only list plugins of a
- particular type.
-
- """
- if kind is None:
- return self.plugins.values()
- if kind not in self.kind_map:
- raise ValueError('Unknown plugin type: {}'.format(kind))
- return self.kind_map[kind].values()
-
- def has_plugin(self, name, kind=None):
- """
- Returns ``True`` if an plugins with the specified ``name`` has been
- discovered by the loader. If ``kind`` was specified, only returns ``True``
- if the plugin has been found, *and* it is of the specified kind.
-
- """
- try:
- self.get_plugin_class(name, kind)
- return True
- except NotFoundError:
- return False
-
- def resolve_alias(self, alias_name):
- """
- Try to resolve the specified name as an plugin alias. Returns a
- two-tuple, the first value of which is actual plugin name, and the
- iisecond is a dict of parameter values for this alias. If the name passed
- is already an plugin name, then the result is ``(alias_name, {})``.
-
- """
- alias_name = identifier(alias_name.lower())
- if alias_name in self.plugins:
- return (alias_name, {})
- if alias_name in self.aliases:
- alias = self.aliases[alias_name]
- return (alias.plugin_name, alias.params)
- raise NotFoundError('Could not find plugin or alias "{}"'.format(alias_name))
-
- # Internal methods.
-
- def __getattr__(self, name):
- """
- This resolves methods for specific plugins types based on corresponding
- generic plugin methods. So it's possible to say things like ::
-
- loader.get_device('foo')
-
- instead of ::
-
- loader.get_plugin('foo', kind='device')
-
- """
- if name.startswith('get_'):
- name = name.replace('get_', '', 1)
- if name in self.kind_map:
- def __wrapper(pname, *args, **kwargs):
- return self.get_plugin(pname, name, *args, **kwargs)
- return __wrapper
- if name.startswith('list_'):
- name = name.replace('list_', '', 1).rstrip('s')
- if name in self.kind_map:
- def __wrapper(*args, **kwargs): # pylint: disable=E0102
- return self.list_plugins(name, *args, **kwargs)
- return __wrapper
- if name.startswith('has_'):
- name = name.replace('has_', '', 1)
- if name in self.kind_map:
- def __wrapper(pname, *args, **kwargs): # pylint: disable=E0102
- return self.has_plugin(pname, name, *args, **kwargs)
- return __wrapper
- raise AttributeError(name)
-
- def _discover_from_packages(self, packages):
- self.logger.debug('Discovering plugins in packages')
- try:
- for package in packages:
- for module in walk_modules(package):
- self._discover_in_module(module)
- except HostError as e:
- message = 'Problem loading plugins from {}: {}'
- raise LoaderError(message.format(e.module, str(e.orig_exc)))
-
- def _discover_from_paths(self, paths, ignore_paths):
- paths = paths or []
- ignore_paths = ignore_paths or []
-
- self.logger.debug('Discovering plugins in paths')
- for path in paths:
- self.logger.debug('Checking path %s', path)
- if os.path.isfile(path):
- self._discover_from_file(path)
- for root, _, files in os.walk(path, followlinks=True):
- should_skip = False
- for igpath in ignore_paths:
- if root.startswith(igpath):
- should_skip = True
- break
- if should_skip:
- continue
- for fname in files:
- if os.path.splitext(fname)[1].lower() != '.py':
- continue
- filepath = os.path.join(root, fname)
- self._discover_from_file(filepath)
-
- def _discover_from_file(self, filepath):
- try:
- modname = os.path.splitext(filepath[1:])[0].translate(MODNAME_TRANS)
- module = imp.load_source(modname, filepath)
- self._discover_in_module(module)
- except (SystemExit, ImportError), e:
- if self.keep_going:
- self.logger.warning('Failed to load {}'.format(filepath))
- self.logger.warning('Got: {}'.format(e))
- else:
- raise LoaderError('Failed to load {}'.format(filepath), sys.exc_info())
- except Exception as e:
- message = 'Problem loading plugins from {}: {}'
- raise LoaderError(message.format(filepath, e))
-
- def _discover_in_module(self, module): # NOQA pylint: disable=too-many-branches
- self.logger.debug('Checking module %s', module.__name__)
- #log.indent()
- try:
- for obj in vars(module).itervalues():
- if inspect.isclass(obj):
- if not issubclass(obj, Plugin):
- continue
- if not obj.kind:
- message = 'Skipping plugin {} as it does not define a kind'
- self.logger.debug(message.format(obj.__name__))
- continue
- if not obj.name:
- message = 'Skipping {} {} as it does not define a name'
- self.logger.debug(message.format(obj.kind, obj.__name__))
- continue
- try:
- self._add_found_plugin(obj)
- except LoaderError as e:
- if self.keep_going:
- self.logger.warning(e)
- else:
- raise e
- finally:
- # log.dedent()
- pass
-
- def _add_found_plugin(self, obj):
- """
- :obj: Found plugin class
- :ext: matching plugin item.
- """
- self.logger.debug('Adding %s %s', obj.kind, obj.name)
- key = identifier(obj.name.lower())
- if key in self.plugins or key in self.aliases:
- raise LoaderError('{} "{}" already exists.'.format(obj.kind, obj.name))
- # plugins are tracked both, in a common plugins
- # dict, and in per-plugin kind dict (as retrieving
- # plugins by kind is a common use case.
- self.plugins[key] = obj
- self.kind_map[obj.kind][key] = obj
-
- for alias in obj.aliases:
- alias_id = identifier(alias.name.lower())
- if alias_id in self.plugins or alias_id in self.aliases:
- raise LoaderError('{} "{}" already exists.'.format(obj.kind, obj.name))
- self.aliases[alias_id] = alias
diff --git a/wlauto/core/pluginloader.py b/wlauto/core/pluginloader.py
deleted file mode 100644
index dde6b828..00000000
--- a/wlauto/core/pluginloader.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-import sys
-
-
-class __LoaderWrapper(object):
-
- @property
- def kinds(self):
- if not self._loader:
- self.reset()
- return self._loader.kind_map.keys()
-
- @property
- def kind_map(self):
- if not self._loader:
- self.reset()
- return self._loader.kind_map
-
- def __init__(self):
- self._loader = None
-
- def reset(self):
- # These imports cannot be done at top level, because of
- # sys.modules manipulation below
- from wlauto.core.plugin import PluginLoader
- from wlauto.core.configuration import settings
- self._loader = PluginLoader(settings.plugin_packages,
- [settings.plugins_directory], [])
-
- def update(self, packages=None, paths=None, ignore_paths=None):
- if not self._loader:
- self.reset()
- self._loader.update(packages, paths, ignore_paths)
-
- def reload(self):
- if not self._loader:
- self.reset()
- self._loader.reload()
-
- def list_plugins(self, kind=None):
- if not self._loader:
- self.reset()
- return self._loader.list_plugins(kind)
-
- def has_plugin(self, name, kind=None):
- if not self._loader:
- self.reset()
- return self._loader.has_plugin(name, kind)
-
- def get_plugin_class(self, name, kind=None):
- if not self._loader:
- self.reset()
- return self._loader.get_plugin_class(name, kind)
-
- def get_plugin(self, name=None, kind=None, *args, **kwargs):
- if not self._loader:
- self.reset()
- return self._loader.get_plugin(name=name, kind=kind, *args, **kwargs)
-
- def get_default_config(self, name):
- if not self._loader:
- self.reset()
- return self._loader.get_default_config(name)
-
- def resolve_alias(self, name):
- if not self._loader:
- self.reset()
- return self._loader.resolve_alias(name)
-
- def __getattr__(self, name):
- if not self._loader:
- self.reset()
- return getattr(self._loader, name)
-
-
-sys.modules[__name__] = __LoaderWrapper()
diff --git a/wlauto/core/resolver.py b/wlauto/core/resolver.py
deleted file mode 100644
index ba643b0d..00000000
--- a/wlauto/core/resolver.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-Defines infrastructure for resource resolution. This is used to find
-various dependencies/assets/etc that WA objects rely on in a flexible way.
-
-"""
-import logging
-from collections import defaultdict
-
-# Note: this is the modified louie library in wlauto/external.
-# prioritylist does not exist in vanilla louie.
-from wlauto.utils.types import prioritylist # pylint: disable=E0611,F0401
-
-from wlauto.exceptions import ResourceError
-from wlauto.core import pluginloader
-
-class ResourceResolver(object):
- """
- Discovers and registers getters, and then handles requests for
- resources using registered getters.
-
- """
-
- def __init__(self, config):
- self.logger = logging.getLogger(self.__class__.__name__)
- self.getters = defaultdict(prioritylist)
- self.config = config
-
- def load(self):
- """
- Discover getters under the specified source. The source could
- be either a python package/module or a path.
-
- """
-
- for rescls in pluginloader.list_resource_getters():
- getter = self.config.get_plugin(name=rescls.name, kind="resource_getter", resolver=self)
- getter.register()
-
- def get(self, resource, strict=True, *args, **kwargs):
- """
- Uses registered getters to attempt to discover a resource of the specified
- kind and matching the specified criteria. Returns path to the resource that
- has been discovered. If a resource has not been discovered, this will raise
- a ``ResourceError`` or, if ``strict`` has been set to ``False``, will return
- ``None``.
-
- """
- self.logger.debug('Resolving {}'.format(resource))
- for getter in self.getters[resource.name]:
- self.logger.debug('Trying {}'.format(getter))
- result = getter.get(resource, *args, **kwargs)
- if result is not None:
- self.logger.debug('Resource {} found using {}:'.format(resource, getter))
- self.logger.debug('\t{}'.format(result))
- return result
- if strict:
- raise ResourceError('{} could not be found'.format(resource))
- self.logger.debug('Resource {} not found.'.format(resource))
- return None
-
- def register(self, getter, kind, priority=0):
- """
- Register the specified resource getter as being able to discover a resource
- of the specified kind with the specified priority.
-
- This method would typically be invoked by a getter inside its __init__.
- The idea being that getters register themselves for resources they know
- they can discover.
-
- *priorities*
-
- getters that are registered with the highest priority will be invoked first. If
- multiple getters are registered under the same priority, they will be invoked
- in the order they were registered (i.e. in the order they were discovered). This is
- essentially non-deterministic.
-
- Generally getters that are more likely to find a resource, or would find a
- "better" version of the resource should register with higher (positive) priorities.
- Fall-back getters that should only be invoked if a resource is not found by usual
- means should register with lower (negative) priorities.
-
- """
- self.logger.debug('Registering {} for {} resources'.format(getter.name, kind))
- self.getters[kind].add(getter, priority)
-
- def unregister(self, getter, kind):
- """
- Unregister a getter that has been registered earlier.
-
- """
- self.logger.debug('Unregistering {}'.format(getter.name))
- try:
- self.getters[kind].remove(getter)
- except ValueError:
- raise ValueError('Resource getter {} is not installed.'.format(getter.name))
diff --git a/wlauto/core/resource.py b/wlauto/core/resource.py
deleted file mode 100644
index 24e0ae19..00000000
--- a/wlauto/core/resource.py
+++ /dev/null
@@ -1,185 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from wlauto.core.configuration import settings
-from wlauto.core.plugin import Plugin
-
-
-class GetterPriority(object):
- """
- Enumerates standard ResourceGetter priorities. In general, getters should register
- under one of these, rather than specifying other priority values.
-
-
- :cached: The cached version of the resource. Look here first. This priority also implies
- that the resource at this location is a "cache" and is not the only version of the
- resource, so it may be cleared without losing access to the resource.
- :preferred: Take this resource in favour of the environment resource.
- :environment: Found somewhere under ~/.workload_automation/ or equivalent, or
- from environment variables, external configuration files, etc.
- These will override resource supplied with the package.
- :external_package: Resource provided by another package.
- :package: Resource provided with the package.
- :remote: Resource will be downloaded from a remote location (such as an HTTP server
- or a samba share). Try this only if no other getter was successful.
-
- """
- cached = 20
- preferred = 10
- remote = 5
- environment = 0
- external_package = -5
- package = -10
-
-
-class Resource(object):
- """
- Represents a resource that needs to be resolved. This can be pretty much
- anything: a file, environment variable, a Python object, etc. The only thing
- a resource *has* to have is an owner (which would normally be the
- Workload/Instrument/Device/etc object that needs the resource). In addition,
- a resource have any number of attributes to identify, but all of them are resource
- type specific.
-
- """
-
- name = None
-
- def __init__(self, owner):
- self.owner = owner
-
- def delete(self, instance):
- """
- Delete an instance of this resource type. This must be implemented by the concrete
- subclasses based on what the resource looks like, e.g. deleting a file or a directory
- tree, or removing an entry from a database.
-
- :note: Implementation should *not* contain any logic for deciding whether or not
- a resource should be deleted, only the actual deletion. The assumption is
- that if this method is invoked, then the decision has already been made.
-
- """
- raise NotImplementedError()
-
- def __str__(self):
- return '<{}\'s {}>'.format(self.owner, self.name)
-
-
-class ResourceGetter(Plugin):
- """
- Base class for implementing resolvers. Defines resolver interface. Resolvers are
- responsible for discovering resources (such as particular kinds of files) they know
- about based on the parameters that are passed to them. Each resolver also has a dict of
- attributes that describe its operation, and may be used to determine which get invoked.
- There is no pre-defined set of attributes and resolvers may define their own.
-
- Class attributes:
-
- :name: Name that uniquely identifies this getter. Must be set by any concrete subclass.
- :resource_type: Identifies resource type(s) that this getter can handle. This must
- be either a string (for a single type) or a list of strings for
- multiple resource types. This must be set by any concrete subclass.
- :priority: Priority with which this getter will be invoked. This should be one of
- the standard priorities specified in ``GetterPriority`` enumeration. If not
- set, this will default to ``GetterPriority.environment``.
-
- """
-
- kind = "resource_getter"
- name = None
- resource_type = None
- priority = GetterPriority.environment
-
- def __init__(self, resolver=None, **kwargs):
- super(ResourceGetter, self).__init__(**kwargs)
- self.resolver = resolver
-
- def register(self):
- """
- Registers with a resource resolver. Concrete implementations must override this
- to invoke ``self.resolver.register()`` method to register ``self`` for specific
- resource types.
-
- """
- if self.resource_type is None:
- raise ValueError('No resource type specified for {}'.format(self.name))
- elif isinstance(self.resource_type, list):
- for rt in self.resource_type:
- self.resolver.register(self, rt, self.priority)
- else:
- self.resolver.register(self, self.resource_type, self.priority)
-
- def unregister(self):
- """Unregister from a resource resolver."""
- if self.resource_type is None:
- raise ValueError('No resource type specified for {}'.format(self.name))
- elif isinstance(self.resource_type, list):
- for rt in self.resource_type:
- self.resolver.unregister(self, rt)
- else:
- self.resolver.unregister(self, self.resource_type)
-
- def get(self, resource, **kwargs):
- """
- This will get invoked by the resolver when attempting to resolve a resource, passing
- in the resource to be resolved as the first parameter. Any additional parameters would
- be specific to a particular resource type.
-
- This method will only be invoked for resource types that the getter has registered for.
-
- :param resource: an instance of :class:`wlauto.core.resource.Resource`.
-
- :returns: Implementations of this method must return either the discovered resource or
- ``None`` if the resource could not be discovered.
-
- """
- raise NotImplementedError()
-
- def delete(self, resource, *args, **kwargs):
- """
- Delete the resource if it is discovered. All arguments are passed to a call
- to``self.get()``. If that call returns a resource, it is deleted.
-
- :returns: ``True`` if the specified resource has been discovered and deleted,
- and ``False`` otherwise.
-
- """
- discovered = self.get(resource, *args, **kwargs)
- if discovered:
- resource.delete(discovered)
- return True
- else:
- return False
-
- def __str__(self):
- return '<ResourceGetter {}>'.format(self.name)
-
-
-class __NullOwner(object):
- """Represents an owner for a resource not owned by anyone."""
-
- name = 'noone'
- dependencies_directory = settings.dependencies_directory
-
- def __getattr__(self, name):
- return None
-
- def __str__(self):
- return 'no-one'
-
- __repr__ = __str__
-
-
-NO_ONE = __NullOwner()
diff --git a/wlauto/core/result.py b/wlauto/core/result.py
deleted file mode 100644
index c3035ec2..00000000
--- a/wlauto/core/result.py
+++ /dev/null
@@ -1,319 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=no-member
-
-"""
-This module defines the classes used to handle result
-processing inside Workload Automation. There will be a
-:class:`wlauto.core.workload.WorkloadResult` object generated for
-every workload iteration executed. This object will have a list of
-:class:`wlauto.core.workload.WorkloadMetric` objects. This list will be
-populated by the workload itself and may also be updated by instrumentation
-(e.g. to add power measurements). Once the result object has been fully
-populated, it will be passed into the ``process_iteration_result`` method of
-:class:`ResultProcessor`. Once the entire run has completed, a list containing
-result objects from all iterations will be passed into ``process_results``
-method of :class`ResultProcessor`.
-
-Which result processors will be active is defined by the ``result_processors``
-list in the ``~/.workload_automation/config.py``. Only the result_processors
-who's names appear in this list will be used.
-
-A :class:`ResultsManager` keeps track of active results processors.
-
-"""
-import logging
-import traceback
-from copy import copy
-from contextlib import contextmanager
-from datetime import datetime
-
-from wlauto.core.plugin import Plugin
-from wlauto.core.configuration.configuration import ITERATION_STATUS
-from wlauto.exceptions import WAError
-from wlauto.utils.types import numeric
-from wlauto.utils.misc import enum_metaclass, merge_dicts_simple
-
-
-class ResultManager(object):
- """
- Keeps track of result processors and passes on the results onto the individual processors.
-
- """
-
- def __init__(self):
- self.logger = logging.getLogger('ResultsManager')
- self.processors = []
- self._bad = []
-
- def install(self, processor):
- self.logger.debug('Installing results processor %s', processor.name)
- self.processors.append(processor)
-
- def uninstall(self, processor):
- if processor in self.processors:
- self.logger.debug('Uninstalling results processor %s', processor.name)
- self.processors.remove(processor)
- else:
- self.logger.warning('Attempting to uninstall results processor %s, which is not installed.',
- processor.name)
-
- def initialize(self, context):
- # Errors aren't handled at this stage, because this gets executed
- # before workload execution starts and we just want to propagte them
- # and terminate (so that error can be corrected and WA restarted).
- for processor in self.processors:
- processor.initialize(context)
-
- def add_result(self, result, context):
- with self._manage_processors(context):
- for processor in self.processors:
- with self._handle_errors(processor):
- processor.process_iteration_result(result, context)
- for processor in self.processors:
- with self._handle_errors(processor):
- processor.export_iteration_result(result, context)
-
- def process_run_result(self, result, context):
- with self._manage_processors(context):
- for processor in self.processors:
- with self._handle_errors(processor):
- processor.process_run_result(result, context)
- for processor in self.processors:
- with self._handle_errors(processor):
- processor.export_run_result(result, context)
-
- def finalize(self, context):
- with self._manage_processors(context):
- for processor in self.processors:
- with self._handle_errors(processor):
- processor.finalize(context)
-
- def validate(self):
- for processor in self.processors:
- processor.validate()
-
- @contextmanager
- def _manage_processors(self, context, finalize_bad=True):
- yield
- for processor in self._bad:
- if finalize_bad:
- processor.finalize(context)
- self.uninstall(processor)
- self._bad = []
-
- @contextmanager
- def _handle_errors(self, processor):
- try:
- yield
- except KeyboardInterrupt, e:
- raise e
- except WAError, we:
- self.logger.error('"{}" result processor has encountered an error'.format(processor.name))
- self.logger.error('{}("{}")'.format(we.__class__.__name__, we.message))
- self._bad.append(processor)
- except Exception, e: # pylint: disable=W0703
- self.logger.error('"{}" result processor has encountered an error'.format(processor.name))
- self.logger.error('{}("{}")'.format(e.__class__.__name__, e))
- self.logger.error(traceback.format_exc())
- self._bad.append(processor)
-
-
-class ResultProcessor(Plugin):
- """
- Base class for result processors. Defines an interface that should be implemented
- by the subclasses. A result processor can be used to do any kind of post-processing
- of the results, from writing them out to a file, to uploading them to a database,
- performing calculations, generating plots, etc.
-
- """
- kind = "result_processor"
- def initialize(self, context):
- pass
-
- def process_iteration_result(self, result, context):
- pass
-
- def export_iteration_result(self, result, context):
- pass
-
- def process_run_result(self, result, context):
- pass
-
- def export_run_result(self, result, context):
- pass
-
- def finalize(self, context):
- pass
-
-
-class RunResult(object):
- """
- Contains overall results for a run.
-
- """
-
- __metaclass__ = enum_metaclass('values', return_name=True)
-
- values = [
- 'OK',
- 'OKISH',
- 'PARTIAL',
- 'FAILED',
- 'UNKNOWN',
- ]
-
- @property
- def status(self):
- if not self.iteration_results or all([s.status == IterationResult.FAILED for s in self.iteration_results]):
- return self.FAILED
- elif any([s.status == IterationResult.FAILED for s in self.iteration_results]):
- return self.PARTIAL
- elif any([s.status == IterationResult.ABORTED for s in self.iteration_results]):
- return self.PARTIAL
- elif (any([s.status == IterationResult.PARTIAL for s in self.iteration_results]) or
- self.non_iteration_errors):
- return self.OKISH
- elif all([s.status == IterationResult.OK for s in self.iteration_results]):
- return self.OK
- else:
- return self.UNKNOWN # should never happen
-
- def __init__(self, run_info, output_directory=None):
- self.info = run_info
- self.iteration_results = []
- self.artifacts = []
- self.events = []
- self.non_iteration_errors = False
- self.output_directory = output_directory
-
-
-class RunEvent(object):
- """
- An event that occured during a run.
-
- """
- def __init__(self, message):
- self.timestamp = datetime.utcnow()
- self.message = message
-
- def to_dict(self):
- return copy(self.__dict__)
-
- def __str__(self):
- return '{} {}'.format(self.timestamp, self.message)
-
- __repr__ = __str__
-
-
-class IterationResult(object):
- """
- Contains the result of running a single iteration of a workload. It is the
- responsibility of a workload to instantiate a IterationResult, populate it,
- and return it form its get_result() method.
-
- Status explanations:
-
- :NOT_STARTED: This iteration has not yet started.
- :RUNNING: This iteration is currently running and no errors have been detected.
- :OK: This iteration has completed and no errors have been detected
- :PARTIAL: One or more instruments have failed (the iteration may still be running).
- :FAILED: The workload itself has failed.
- :ABORTED: The user interupted the workload
- :SKIPPED: The iteration was skipped due to a previous failure
-
- """
-
- __metaclass__ = enum_metaclass('values', return_name=True)
-
- values = ITERATION_STATUS
-
- def __init__(self, spec):
- self.spec = spec
- self.id = spec.id
- self.workload = spec.workload
- self.classifiers = copy(spec.classifiers)
- self.iteration = None
- self.status = self.NOT_STARTED
- self.output_directory = None
- self.events = []
- self.metrics = []
- self.artifacts = []
-
- def add_metric(self, name, value, units=None, lower_is_better=False, classifiers=None):
- self.metrics.append(Metric(name, value, units, lower_is_better,
- merge_dicts_simple(self.classifiers, classifiers)))
-
- def has_metric(self, name):
- for metric in self.metrics:
- if metric.name == name:
- return True
- return False
-
- def add_event(self, message):
- self.events.append(RunEvent(message))
-
- def to_dict(self):
- d = copy(self.__dict__)
- d['events'] = [e.to_dict() for e in self.events]
- return d
-
- def __iter__(self):
- return iter(self.metrics)
-
- def __getitem__(self, name):
- for metric in self.metrics:
- if metric.name == name:
- return metric
- raise KeyError('Metric {} not found.'.format(name))
-
-
-class Metric(object):
- """
- This is a single metric collected from executing a workload.
-
- :param name: the name of the metric. Uniquely identifies the metric
- within the results.
- :param value: The numerical value of the metric for this execution of
- a workload. This can be either an int or a float.
- :param units: Units for the collected value. Can be None if the value
- has no units (e.g. it's a count or a standardised score).
- :param lower_is_better: Boolean flag indicating where lower values are
- better than higher ones. Defaults to False.
- :param classifiers: A set of key-value pairs to further classify this metric
- beyond current iteration (e.g. this can be used to identify
- sub-tests).
-
- """
-
- def __init__(self, name, value, units=None, lower_is_better=False, classifiers=None):
- self.name = name
- self.value = numeric(value)
- self.units = units
- self.lower_is_better = lower_is_better
- self.classifiers = classifiers or {}
-
- def to_dict(self):
- return self.__dict__
-
- def __str__(self):
- result = '{}: {}'.format(self.name, self.value)
- if self.units:
- result += ' ' + self.units
- result += ' ({})'.format('-' if self.lower_is_better else '+')
- return '<{}>'.format(result)
-
- __repr__ = __str__
diff --git a/wlauto/core/signal.py b/wlauto/core/signal.py
deleted file mode 100644
index f374f96b..00000000
--- a/wlauto/core/signal.py
+++ /dev/null
@@ -1,272 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-This module wraps louie signalling mechanism. It relies on modified version of loiue
-that has prioritization added to handler invocation.
-
-"""
-import logging
-from contextlib import contextmanager
-
-from louie import dispatcher
-
-from wlauto.utils.types import prioritylist
-
-
-logger = logging.getLogger('dispatcher')
-
-
-class Signal(object):
- """
- This class implements the signals to be used for notifiying callbacks
- registered to respond to different states and stages of the execution of workload
- automation.
-
- """
-
- def __init__(self, name, description='no description', invert_priority=False):
- """
- Instantiates a Signal.
-
- :param name: name is the identifier of the Signal object. Signal instances with
- the same name refer to the same execution stage/stage.
- :param invert_priority: boolean parameter that determines whether multiple
- callbacks for the same signal should be ordered with
- ascending or descending priorities. Typically this flag
- should be set to True if the Signal is triggered AFTER an
- a state/stage has been reached. That way callbacks with high
- priorities will be called right after the event has occured.
- """
- self.name = name
- self.description = description
- self.invert_priority = invert_priority
-
- def __str__(self):
- return self.name
-
- __repr__ = __str__
-
- def __hash__(self):
- return id(self.name)
-
-
-# These are paired events -- if the before_event is sent, the after_ signal is
-# guaranteed to also be sent. In particular, the after_ signals will be sent
-# even if there is an error, so you cannot assume in the handler that the
-# device has booted successfully. In most cases, you should instead use the
-# non-paired signals below.
-BEFORE_FLASHING = Signal('before-flashing-signal', invert_priority=True)
-SUCCESSFUL_FLASHING = Signal('successful-flashing-signal')
-AFTER_FLASHING = Signal('after-flashing-signal')
-
-BEFORE_BOOT = Signal('before-boot-signal', invert_priority=True)
-SUCCESSFUL_BOOT = Signal('successful-boot-signal')
-AFTER_BOOT = Signal('after-boot-signal')
-
-BEFORE_INITIAL_BOOT = Signal('before-initial-boot-signal', invert_priority=True)
-SUCCESSFUL_INITIAL_BOOT = Signal('successful-initial-boot-signal')
-AFTER_INITIAL_BOOT = Signal('after-initial-boot-signal')
-
-BEFORE_FIRST_ITERATION_BOOT = Signal('before-first-iteration-boot-signal', invert_priority=True)
-SUCCESSFUL_FIRST_ITERATION_BOOT = Signal('successful-first-iteration-boot-signal')
-AFTER_FIRST_ITERATION_BOOT = Signal('after-first-iteration-boot-signal')
-
-BEFORE_WORKLOAD_SETUP = Signal('before-workload-setup-signal', invert_priority=True)
-SUCCESSFUL_WORKLOAD_SETUP = Signal('successful-workload-setup-signal')
-AFTER_WORKLOAD_SETUP = Signal('after-workload-setup-signal')
-
-BEFORE_WORKLOAD_EXECUTION = Signal('before-workload-execution-signal', invert_priority=True)
-SUCCESSFUL_WORKLOAD_EXECUTION = Signal('successful-workload-execution-signal')
-AFTER_WORKLOAD_EXECUTION = Signal('after-workload-execution-signal')
-
-BEFORE_WORKLOAD_RESULT_UPDATE = Signal('before-iteration-result-update-signal', invert_priority=True)
-SUCCESSFUL_WORKLOAD_RESULT_UPDATE = Signal('successful-iteration-result-update-signal')
-AFTER_WORKLOAD_RESULT_UPDATE = Signal('after-iteration-result-update-signal')
-
-BEFORE_WORKLOAD_TEARDOWN = Signal('before-workload-teardown-signal', invert_priority=True)
-SUCCESSFUL_WORKLOAD_TEARDOWN = Signal('successful-workload-teardown-signal')
-AFTER_WORKLOAD_TEARDOWN = Signal('after-workload-teardown-signal')
-
-BEFORE_OVERALL_RESULTS_PROCESSING = Signal('before-overall-results-process-signal', invert_priority=True)
-SUCCESSFUL_OVERALL_RESULTS_PROCESSING = Signal('successful-overall-results-process-signal')
-AFTER_OVERALL_RESULTS_PROCESSING = Signal('after-overall-results-process-signal')
-
-# These are the not-paired signals; they are emitted independently. E.g. the
-# fact that RUN_START was emitted does not mean run end will be.
-RUN_START = Signal('start-signal', invert_priority=True)
-RUN_END = Signal('end-signal')
-WORKLOAD_SPEC_START = Signal('workload-spec-start-signal', invert_priority=True)
-WORKLOAD_SPEC_END = Signal('workload-spec-end-signal')
-ITERATION_START = Signal('iteration-start-signal', invert_priority=True)
-ITERATION_END = Signal('iteration-end-signal')
-
-RUN_INIT = Signal('run-init-signal')
-SPEC_INIT = Signal('spec-init-signal')
-ITERATION_INIT = Signal('iteration-init-signal')
-
-RUN_FIN = Signal('run-fin-signal')
-
-# These signals are used by the LoggerFilter to tell about logging events
-ERROR_LOGGED = Signal('error_logged')
-WARNING_LOGGED = Signal('warning_logged')
-
-
-class CallbackPriority(object):
-
- EXTREMELY_HIGH = 30
- VERY_HIGH = 20
- HIGH = 10
- NORMAL = 0
- LOW = -10
- VERY_LOW = -20
- EXTREMELY_LOW = -30
-
- def __init__(self):
- raise ValueError('Cannot instantiate')
-
-
-class _prioritylist_wrapper(prioritylist):
- """
- This adds a NOP append() method so that when louie invokes it to add the
- handler to receivers, nothing will happen; the handler is actually added inside
- the connect() below according to priority, before louie's connect() gets invoked.
-
- """
-
- def append(self, *args, **kwargs):
- pass
-
-
-def connect(handler, signal, sender=dispatcher.Any, priority=0):
- """
- Connects a callback to a signal, so that the callback will be automatically invoked
- when that signal is sent.
-
- Parameters:
-
- :handler: This can be any callable that that takes the right arguments for
- the signal. For most signals this means a single argument that
- will be an ``ExecutionContext`` instance. But please see documentation
- for individual signals in the :ref:`signals reference <instrumentation_method_map>`.
- :signal: The signal to which the handler will be subscribed. Please see
- :ref:`signals reference <instrumentation_method_map>` for the list of standard WA
- signals.
-
- .. note:: There is nothing that prevents instrumentation from sending their
- own signals that are not part of the standard set. However the signal
- must always be an :class:`wlauto.core.signal.Signal` instance.
-
- :sender: The handler will be invoked only for the signals emitted by this sender. By
- default, this is set to :class:`louie.dispatcher.Any`, so the handler will
- be invoked for signals from any sender.
- :priority: An integer (positive or negative) the specifies the priority of the handler.
- Handlers with higher priority will be called before handlers with lower
- priority. The call order of handlers with the same priority is not specified.
- Defaults to 0.
-
- .. note:: Priorities for some signals are inverted (so highest priority
- handlers get executed last). Please see :ref:`signals reference <instrumentation_method_map>`
- for details.
-
- """
- if getattr(signal, 'invert_priority', False):
- priority = -priority
- senderkey = id(sender)
- if senderkey in dispatcher.connections:
- signals = dispatcher.connections[senderkey]
- else:
- dispatcher.connections[senderkey] = signals = {}
- if signal in signals:
- receivers = signals[signal]
- else:
- receivers = signals[signal] = _prioritylist_wrapper()
- receivers.add(handler, priority)
- dispatcher.connect(handler, signal, sender)
-
-
-def disconnect(handler, signal, sender=dispatcher.Any):
- """
- Disconnect a previously connected handler form the specified signal, optionally, only
- for the specified sender.
-
- Parameters:
-
- :handler: The callback to be disconnected.
- :signal: The signal the handler is to be disconnected form. It will
- be an :class:`wlauto.core.signal.Signal` instance.
- :sender: If specified, the handler will only be disconnected from the signal
- sent by this sender.
-
- """
- dispatcher.disconnect(handler, signal, sender)
-
-
-def send(signal, sender=dispatcher.Anonymous, *args, **kwargs):
- """
- Sends a signal, causing connected handlers to be invoked.
-
- Paramters:
-
- :signal: Signal to be sent. This must be an instance of :class:`wlauto.core.signal.Signal`
- or its subclasses.
- :sender: The sender of the signal (typically, this would be ``self``). Some handlers may only
- be subscribed to signals from a particular sender.
-
- The rest of the parameters will be passed on as aruments to the handler.
-
- """
- return dispatcher.send(signal, sender, *args, **kwargs)
-
-
-# This will normally be set to log_error() by init_logging(); see wa.framework/log.py.
-# Done this way to prevent a circular import dependency.
-log_error_func = logger.error
-
-
-def safe_send(signal, sender=dispatcher.Anonymous,
- propagate=[KeyboardInterrupt], *args, **kwargs):
- """
- Same as ``send``, except this will catch and log all exceptions raised
- by handlers, except those specified in ``propagate`` argument (defaults
- to just ``[KeyboardInterrupt]``).
- """
- try:
- send(singnal, sender, *args, **kwargs)
- except Exception as e:
- if any(isinstance(e, p) for p in propagate):
- raise e
- log_error_func(e)
-
-
-@contextmanager
-def wrap(signal_name, sender=dispatcher.Anonymous, safe=False, *args, **kwargs):
- """Wraps the suite in before/after signals, ensuring
- that after signal is always sent."""
- signal_name = signal_name.upper().replace('-', '_')
- send_func = safe_send if safe else send
- try:
- before_signal = globals()['BEFORE_' + signal_name]
- success_signal = globals()['SUCCESSFUL_' + signal_name]
- after_signal = globals()['AFTER_' + signal_name]
- except KeyError:
- raise ValueError('Invalid wrapped signal name: {}'.format(signal_name))
- try:
- send_func(before_signal, sender, *args, **kwargs)
- yield
- send_func(success_signal, sender, *args, **kwargs)
- finally:
- send_func(after_signal, sender, *args, **kwargs)
diff --git a/wlauto/core/version.py b/wlauto/core/version.py
deleted file mode 100644
index 5aeadc7b..00000000
--- a/wlauto/core/version.py
+++ /dev/null
@@ -1,26 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from collections import namedtuple
-
-VersionTuple = namedtuple('Version', ['major', 'minor', 'revision'])
-
-version = VersionTuple(2, 4, 0)
-
-
-def get_wa_version():
- version_string = '{}.{}.{}'.format(version.major, version.minor, version.revision)
- return version_string
diff --git a/wlauto/core/workload.py b/wlauto/core/workload.py
deleted file mode 100644
index 3c403fb1..00000000
--- a/wlauto/core/workload.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-A workload is the unit of execution. It represents a set of activities are are performed
-and measured together, as well as the necessary setup and teardown procedures. A single
-execution of a workload produces one :class:`wlauto.core.result.WorkloadResult` that is populated with zero or more
-:class:`wlauto.core.result.WorkloadMetric`\ s and/or
-:class:`wlauto.core.result.Artifact`\s by the workload and active instrumentation.
-
-"""
-from wlauto.core.plugin import Plugin
-from wlauto.exceptions import WorkloadError
-
-
-class Workload(Plugin):
- """
- This is the base class for the workloads executed by the framework.
- Each of the methods throwing NotImplementedError *must* be implemented
- by the derived classes.
-
- """
- kind = "workload"
- supported_devices = []
- supported_platforms = []
- summary_metrics = []
-
- def __init__(self, device, **kwargs):
- """
- Creates a new Workload.
-
- :param device: the Device on which the workload will be executed.
- """
- super(Workload, self).__init__(**kwargs)
- if self.supported_devices and device.name not in self.supported_devices:
- raise WorkloadError('Workload {} does not support device {}'.format(self.name, device.name))
-
- if self.supported_platforms and device.os not in self.supported_platforms:
- raise WorkloadError('Workload {} does not support platform {}'.format(self.name, device.os))
- self.device = device
-
- def init_resources(self, context):
- """
- This method may be used to perform early resource discovery and initialization. This is invoked
- during the initial loading stage and before the device is ready, so cannot be used for any
- device-dependent initialization. This method is invoked before the workload instance is
- validated.
-
- """
- pass
-
- def initialize(self, context):
- """
- This method should be used to perform once-per-run initialization of a workload instance, i.e.,
- unlike ``setup()`` it will not be invoked on each iteration.
-
- """
- pass
-
- def setup(self, context):
- """
- Perform the setup necessary to run the workload, such as copying the necessary files
- to the device, configuring the environments, etc.
-
- This is also the place to perform any on-device checks prior to attempting to execute
- the workload.
-
- """
- pass
-
- def run(self, context):
- """Execute the workload. This is the method that performs the actual "work" of the"""
- pass
-
- def update_result(self, context):
- """
- Update the result within the specified execution context with the metrics
- form this workload iteration.
-
- """
- pass
-
- def teardown(self, context):
- """ Perform any final clean up for the Workload. """
- pass
-
- def finalize(self, context):
- pass
-
- def __str__(self):
- return '<Workload {}>'.format(self.name)
diff --git a/wlauto/exceptions.py b/wlauto/exceptions.py
deleted file mode 100644
index bd4a0bb6..00000000
--- a/wlauto/exceptions.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto.utils.misc import get_traceback
-
-from devlib.exception import DevlibError, HostError, TargetError, TimeoutError
-
-
-class WAError(Exception):
- """Base class for all Workload Automation exceptions."""
- pass
-
-
-class NotFoundError(WAError):
- """Raised when the specified item is not found."""
- pass
-
-
-class ValidationError(WAError):
- """Raised on failure to validate an plugin."""
- pass
-
-
-class DeviceError(WAError):
- """General Device error."""
- pass
-
-
-class DeviceNotRespondingError(WAError):
- """The device is not responding."""
-
- def __init__(self, device):
- super(DeviceNotRespondingError, self).__init__('Device {} is not responding.'.format(device))
-
-
-class WorkloadError(WAError):
- """General Workload error."""
- pass
-
-
-class HostError(WAError):
- """Problem with the host on which WA is running."""
- pass
-
-
-class ModuleError(WAError):
- """
- Problem with a module.
-
- .. note:: Modules for specific plugin types should raise execeptions
- appropriate to that plugin. E.g. a ``Device`` module should raise
- ``DeviceError``. This is intended for situation where a module is
- unsure (and/or doesn't care) what its owner is.
-
- """
- pass
-
-
-class InstrumentError(WAError):
- """General Instrument error."""
- pass
-
-
-class ResultProcessorError(WAError):
- """General ResultProcessor error."""
- pass
-
-
-class ResourceError(WAError):
- """General Resolver error."""
- pass
-
-
-class CommandError(WAError):
- """Raised by commands when they have encountered an error condition
- during execution."""
- pass
-
-
-class ToolError(WAError):
- """Raised by tools when they have encountered an error condition
- during execution."""
- pass
-
-
-class LoaderError(WAError):
- """Raised when there is an error loading an plugin or
- an external resource. Apart form the usual message, the __init__
- takes an exc_info parameter which should be the result of
- sys.exc_info() for the original exception (if any) that
- caused the error."""
-
- def __init__(self, message, exc_info=None):
- super(LoaderError, self).__init__(message)
- self.exc_info = exc_info
-
- def __str__(self):
- if self.exc_info:
- orig = self.exc_info[1]
- orig_name = type(orig).__name__
- if isinstance(orig, WAError):
- reason = 'because of:\n{}: {}'.format(orig_name, orig)
- else:
- reason = 'because of:\n{}\n{}: {}'.format(get_traceback(self.exc_info), orig_name, orig)
- return '\n'.join([self.message, reason])
- else:
- return self.message
-
-
-class ConfigError(WAError):
- """Raised when configuration provided is invalid. This error suggests that
- the user should modify their config and try again."""
- pass
-
-
-class WorkerThreadError(WAError):
- """
- This should get raised in the main thread if a non-WAError-derived exception occurs on
- a worker/background thread. If a WAError-derived exception is raised in the worker, then
- it that exception should be re-raised on the main thread directly -- the main point of this is
- to preserve the backtrace in the output, and backtrace doesn't get output for WAErrors.
-
- """
-
- def __init__(self, thread, exc_info):
- self.thread = thread
- self.exc_info = exc_info
- orig = self.exc_info[1]
- orig_name = type(orig).__name__
- message = 'Exception of type {} occured on thread {}:\n'.format(orig_name, thread)
- message += '{}\n{}: {}'.format(get_traceback(self.exc_info), orig_name, orig)
- super(WorkerThreadError, self).__init__(message)
-
-
-class SerializerSyntaxError(Exception):
- """
- Error loading a serialized structure from/to a file handle.
- """
-
- def __init__(self, message, line=None, column=None):
- super(SerializerSyntaxError, self).__init__(message)
- self.line = line
- self.column = column
-
- def __str__(self):
- linestring = ' on line {}'.format(self.line) if self.line else ''
- colstring = ' in column {}'.format(self.column) if self.column else ''
- message = 'Syntax Error{}: {}'
- return message.format(''.join([linestring, colstring]), self.message)
diff --git a/wlauto/external/README b/wlauto/external/README
deleted file mode 100644
index 16d22a8e..00000000
--- a/wlauto/external/README
+++ /dev/null
@@ -1,74 +0,0 @@
-This directory contains external libraries and standalone utilities which have
-been written/modified to work with Workload Automation (and thus need to be
-included with WA rather than obtained from orignal sources).
-
-
-bbench_server
-=============
-
-This is a small sever that is used to detect when ``bbench`` workload has completed.
-``bbench`` navigates though a bunch of web pages in a browser using javascript.
-It will cause the browser to sent a GET request to the port the bbench_server is
-listening on, indicating the end of workload.
-
-
-daq_server
-==========
-
-Contains Daq server files that will run on a Windows machine. Please refer to
-daq instrument documentation.
-
-
-louie (third party)
-=====
-
-Python package that is itself a fork (and now, a replacement for) pydispatcher.
-This library provides a signal dispatching mechanism. This has been modified for
-WA to add prioritization to callbacks.
-
-
-pmu_logger
-==========
-
-Source for the kernel driver that enable the logging of CCI counters to ftrace
-on periodic basis. This driver is required by the ``cci_pmu_logger`` instrument.
-
-
-readenergy
-==========
-
-Outputs Juno internal energy/power/voltage/current measurments by reading APB
-regesiters from memory. This is used by ``juno_energy`` instrument.
-
-
-revent
-======
-
-This is a tool that is used to both record and playback key press and screen tap
-events. It is used to record UI manipulation for some workloads (such as games)
-where it is not possible to use the Android UI Automator.
-
-The tools is also included in binary form in wlauto/common/. In order to build
-the tool from source, you will need to have Android NDK in your PATH.
-
-
-stacktracer.py (third party)
-==============
-
-A module based on an ActiveState recipe that allows tracing thread stacks during
-execution of a Python program. This is used through the ``--debug`` flag in WA
-to ease debuging multi-threaded parts of the code.
-
-
-terminalsize.py (third party)
-===============
-
-Implements a platform-agnostic way of determining terminal window size. Taken
-from a public Github gist.
-
-
-uiauto
-======
-
-Contains the utilities library for UI automation.
-
diff --git a/wlauto/external/bbench_server/build.sh b/wlauto/external/bbench_server/build.sh
deleted file mode 100755
index 0c36467a..00000000
--- a/wlauto/external/bbench_server/build.sh
+++ /dev/null
@@ -1,31 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-BUILD_COMMAND=ndk-build
-
-if [[ $(which $BUILD_COMMAND) ]] ; then
- $BUILD_COMMAND
- if [[ $? ]]; then
- echo Coping to ../../workloads/bbench/
- cp libs/armeabi/bbench_server ../../workloads/bbench/bin/armeabi/bbench_server
- fi
-else
- echo Please make sure you have Android NDK in your PATH.
- exit 1
-fi
-
diff --git a/wlauto/external/bbench_server/jni/Android.mk b/wlauto/external/bbench_server/jni/Android.mk
deleted file mode 100644
index d6d40a08..00000000
--- a/wlauto/external/bbench_server/jni/Android.mk
+++ /dev/null
@@ -1,9 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_SRC_FILES:= bbench_server.cpp
-LOCAL_MODULE := bbench_server
-LOCAL_MODULE_TAGS := optional
-LOCAL_STATIC_LIBRARIES := libc
-LOCAL_SHARED_LIBRARIES :=
-include $(BUILD_EXECUTABLE)
diff --git a/wlauto/external/bbench_server/jni/bbench_server.cpp b/wlauto/external/bbench_server/jni/bbench_server.cpp
deleted file mode 100755
index 9b1e87d4..00000000
--- a/wlauto/external/bbench_server/jni/bbench_server.cpp
+++ /dev/null
@@ -1,151 +0,0 @@
-/* Copyright 2012-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-/**************************************************************************/
-/* Simple HTTP server program that will return on accepting connection */
-/**************************************************************************/
-
-/* Tested on Android ICS browser and FireFox browser */
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <unistd.h>
-#include <errno.h>
-#include <string.h>
-#include <sys/types.h>
-#include <sys/socket.h>
-#include <netinet/in.h>
-#include <netdb.h>
-#include <arpa/inet.h>
-#include <sys/wait.h>
-
-#define SERVERPORT "3030"
-
-void ExitOnError(int condition, const char *msg)
-{
- if(condition) { printf("Server: %s\n", msg); exit(1);}
-}
-
-void *GetInetAddr(struct sockaddr *sa)
-{
- if (sa->sa_family == AF_INET)
- {
- return &(((struct sockaddr_in*)sa)->sin_addr);
- }
- else
- {
- return &(((struct sockaddr_in6*)sa)->sin6_addr);
- }
-}
-
-int main(int argc, char *argv[])
-{
-
- socklen_t addr_size;
- struct addrinfo hints, *res;
- int server_fd, client_fd;
- int retval;
- int timeout_in_seconds;
-
- // Get the timeout value in seconds
- if(argc < 2)
- {
- printf("Usage %s <timeout in seconds>\n", argv[0]);
- exit(1);
- }
- else
- {
- timeout_in_seconds = atoi(argv[1]);
- printf("Server: Waiting for connection on port %s with timeout of %d seconds\n", SERVERPORT, timeout_in_seconds);
-
- }
-
- /**************************************************************************/
- /* Listen to a socket */
- /**************************************************************************/
- memset(&hints, 0, sizeof hints);
- hints.ai_family = AF_UNSPEC; // use IPv4 or IPv6, whichever
- hints.ai_socktype = SOCK_STREAM;
- hints.ai_flags = AI_PASSIVE; // fill in my IP for me
-
- getaddrinfo(NULL, SERVERPORT, &hints, &res);
-
-
- server_fd = socket(res->ai_family, res->ai_socktype, res->ai_protocol);
- ExitOnError(server_fd < 0, "Socket creation failed");
-
- retval = bind(server_fd, res->ai_addr, res->ai_addrlen);
- ExitOnError(retval < 0, "Bind failed");
-
- retval = listen(server_fd, 10);
- ExitOnError(retval < 0, "Listen failed");
-
- /**************************************************************************/
- /* Wait for connection to arrive or time out */
- /**************************************************************************/
- fd_set readfds;
- FD_ZERO(&readfds);
- FD_SET(server_fd, &readfds);
-
- // Timeout parameter
- timeval tv;
- tv.tv_sec = timeout_in_seconds;
- tv.tv_usec = 0;
-
- int ret = select(server_fd+1, &readfds, NULL, NULL, &tv);
- ExitOnError(ret <= 0, "No connection established, timed out");
- ExitOnError(FD_ISSET(server_fd, &readfds) == 0, "Error occured in select");
-
- /**************************************************************************/
- /* Accept connection and print the information */
- /**************************************************************************/
- {
- struct sockaddr_storage client_addr;
- char client_addr_string[INET6_ADDRSTRLEN];
- addr_size = sizeof client_addr;
- client_fd = accept(server_fd, (struct sockaddr *)&client_addr, &addr_size);
- ExitOnError(client_fd < 0, "Accept failed");
-
- inet_ntop(client_addr.ss_family,
- GetInetAddr((struct sockaddr *)&client_addr),
- client_addr_string,
- sizeof client_addr_string);
- printf("Server: Received connection from %s\n", client_addr_string);
- }
-
-
- /**************************************************************************/
- /* Send a acceptable HTTP response */
- /**************************************************************************/
- {
-
- char response[] = "HTTP/1.1 200 OK\r\n"
- "Content-Type: text/html\r\n"
- "Connection: close\r\n"
- "\r\n"
- "<html>"
- "<head>Local Server: Connection Accepted</head>"
- "<body></body>"
- "</html>";
- int bytes_sent;
- bytes_sent = send(client_fd, response, strlen(response), 0);
- ExitOnError(bytes_sent < 0, "Sending Response failed");
- }
-
-
- close(client_fd);
- close(server_fd);
- return 0;
-}
diff --git a/wlauto/external/daq_server/daqpower-1.0.5.tar.gz b/wlauto/external/daq_server/daqpower-1.0.5.tar.gz
deleted file mode 100644
index ee4cee29..00000000
--- a/wlauto/external/daq_server/daqpower-1.0.5.tar.gz
+++ /dev/null
Binary files differ
diff --git a/wlauto/external/daq_server/src/MANIFEST.in b/wlauto/external/daq_server/src/MANIFEST.in
deleted file mode 100644
index e69de29b..00000000
--- a/wlauto/external/daq_server/src/MANIFEST.in
+++ /dev/null
diff --git a/wlauto/external/daq_server/src/README b/wlauto/external/daq_server/src/README
deleted file mode 100644
index e69de29b..00000000
--- a/wlauto/external/daq_server/src/README
+++ /dev/null
diff --git a/wlauto/external/daq_server/src/build.sh b/wlauto/external/daq_server/src/build.sh
deleted file mode 100755
index ef3be06e..00000000
--- a/wlauto/external/daq_server/src/build.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-python setup.py sdist
-rm -rf build
-rm -f MANIFEST
-if [[ -d dist ]]; then
- mv dist/*.tar.gz ..
- rm -rf dist
-fi
-find . -iname \*.pyc -delete
diff --git a/wlauto/external/daq_server/src/daqpower/__init__.py b/wlauto/external/daq_server/src/daqpower/__init__.py
deleted file mode 100644
index 86926b77..00000000
--- a/wlauto/external/daq_server/src/daqpower/__init__.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-__version__ = '1.0.5'
diff --git a/wlauto/external/daq_server/src/daqpower/client.py b/wlauto/external/daq_server/src/daqpower/client.py
deleted file mode 100644
index 80276811..00000000
--- a/wlauto/external/daq_server/src/daqpower/client.py
+++ /dev/null
@@ -1,380 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E1101,E1103,wrong-import-position
-import os
-import sys
-
-from twisted.internet import reactor
-from twisted.internet.protocol import Protocol, ClientFactory, ReconnectingClientFactory
-from twisted.internet.error import ConnectionLost, ConnectionDone
-from twisted.protocols.basic import LineReceiver
-
-if __name__ == '__main__': # for debugging
- sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
-from daqpower import log
-from daqpower.common import DaqServerRequest, DaqServerResponse, Status
-from daqpower.config import get_config_parser
-
-
-__all__ = ['execute_command', 'run_send_command', 'Status']
-
-
-class Command(object):
-
- def __init__(self, name, **params):
- self.name = name
- self.params = params
-
-
-class CommandResult(object):
-
- def __init__(self):
- self.status = None
- self.message = None
- self.data = None
-
- def __str__(self):
- return '{} {}'.format(self.status, self.message)
-
-
-class CommandExecutorProtocol(Protocol):
-
- def __init__(self, command, timeout=10, retries=1):
- self.command = command
- self.sent_request = None
- self.waiting_for_response = False
- self.keep_going = None
- self.ports_to_pull = None
- self.factory = None
- self.timeoutCallback = None
- self.timeout = timeout
- self.retries = retries
- self.retry_count = 0
-
- def connectionMade(self):
- if self.command.name == 'get_data':
- self.sendRequest('list_port_files')
- else:
- self.sendRequest(self.command.name, **self.command.params)
-
- def connectionLost(self, reason=ConnectionDone):
- if isinstance(reason, ConnectionLost):
- self.errorOut('connection lost: {}'.format(reason))
- elif self.waiting_for_response:
- self.errorOut('Server closed connection without sending a response.')
- else:
- log.debug('connection terminated.')
-
- def sendRequest(self, command, **params):
- self.sent_request = DaqServerRequest(command, params)
- request_string = self.sent_request.serialize()
- log.debug('sending request: {}'.format(request_string))
- self.transport.write(''.join([request_string, '\r\n']))
- self.timeoutCallback = reactor.callLater(self.timeout, self.requestTimedOut)
- self.waiting_for_response = True
-
- def dataReceived(self, data):
- self.keep_going = False
- if self.waiting_for_response:
- self.waiting_for_response = False
- self.timeoutCallback.cancel()
- try:
- response = DaqServerResponse.deserialize(data)
- except Exception, e: # pylint: disable=W0703
- self.errorOut('Invalid response: {} ({})'.format(data, e))
- else:
- if response.status != Status.ERROR:
- self.processResponse(response) # may set self.keep_going
- if not self.keep_going:
- self.commandCompleted(response.status, response.message, response.data)
- else:
- self.errorOut(response.message)
- else:
- self.errorOut('unexpected data received: {}\n'.format(data))
-
- def processResponse(self, response):
- if self.sent_request.command in ['list_ports', 'list_port_files']:
- self.processPortsResponse(response)
- elif self.sent_request.command == 'list_devices':
- self.processDevicesResponse(response)
- elif self.sent_request.command == 'pull':
- self.processPullResponse(response)
-
- def processPortsResponse(self, response):
- if 'ports' not in response.data:
- self.errorOut('Response did not containt ports data: {} ({}).'.format(response, response.data))
- ports = response.data['ports']
- response.data = ports
- if self.command.name == 'get_data':
- if ports:
- self.ports_to_pull = ports
- self.sendPullRequest(self.ports_to_pull.pop())
- else:
- response.status = Status.OKISH
- response.message = 'No ports were returned.'
-
- def processDevicesResponse(self, response):
- if response.status == Status.OK:
- if 'devices' not in response.data:
- self.errorOut('Response did not containt devices data: {} ({}).'.format(response, response.data))
- devices = response.data['devices']
- response.data = devices
-
- def sendPullRequest(self, port_id):
- self.sendRequest('pull', port_id=port_id)
- self.keep_going = True
-
- def processPullResponse(self, response):
- if 'port_number' not in response.data:
- self.errorOut('Response does not contain port number: {} ({}).'.format(response, response.data))
- port_number = response.data.pop('port_number')
- filename = self.sent_request.params['port_id'] + '.csv'
- self.factory.initiateFileTransfer(filename, port_number)
- if self.ports_to_pull:
- self.sendPullRequest(self.ports_to_pull.pop())
-
- def commandCompleted(self, status, message=None, data=None):
- self.factory.result.status = status
- self.factory.result.message = message
- self.factory.result.data = data
- self.transport.loseConnection()
-
- def requestTimedOut(self):
- self.retry_count += 1
- if self.retry_count > self.retries:
- self.errorOut("Request timed out; server failed to respond.")
- else:
- log.debug('Retrying...')
- self.connectionMade()
-
- def errorOut(self, message):
- self.factory.errorOut(message)
-
-
-class CommandExecutorFactory(ClientFactory):
-
- protocol = CommandExecutorProtocol
- wait_delay = 1
-
- def __init__(self, config, command, timeout=10, retries=1):
- self.config = config
- self.command = command
- self.timeout = timeout
- self.retries = retries
- self.result = CommandResult()
- self.done = False
- self.transfers_in_progress = {}
- if command.name == 'get_data':
- if 'output_directory' not in command.params:
- self.errorOut('output_directory not specifed for get_data command.')
- self.output_directory = command.params['output_directory']
- if not os.path.isdir(self.output_directory):
- log.debug('Creating output directory {}'.format(self.output_directory))
- os.makedirs(self.output_directory)
-
- def buildProtocol(self, addr):
- protocol = CommandExecutorProtocol(self.command, self.timeout, self.retries)
- protocol.factory = self
- return protocol
-
- def initiateFileTransfer(self, filename, port):
- log.debug('Downloading {} from port {}'.format(filename, port))
- filepath = os.path.join(self.output_directory, filename)
- session = FileReceiverFactory(filepath, self)
- connector = reactor.connectTCP(self.config.host, port, session)
- self.transfers_in_progress[session] = connector
-
- def transferComplete(self, session):
- connector = self.transfers_in_progress[session]
- log.debug('Transfer on port {} complete.'.format(connector.port))
- del self.transfers_in_progress[session]
-
- def clientConnectionLost(self, connector, reason):
- if self.transfers_in_progress:
- log.debug('Waiting for the transfer(s) to complete.')
- self.waitForTransfersToCompleteAndExit()
-
- def clientConnectionFailed(self, connector, reason):
- self.result.status = Status.ERROR
- self.result.message = 'Could not connect to server.'
- self.waitForTransfersToCompleteAndExit()
-
- def waitForTransfersToCompleteAndExit(self):
- if self.transfers_in_progress:
- reactor.callLater(self.wait_delay, self.waitForTransfersToCompleteAndExit)
- else:
- log.debug('Stopping the reactor.')
- reactor.stop()
-
- def errorOut(self, message):
- self.result.status = Status.ERROR
- self.result.message = message
- reactor.crash()
-
- def __str__(self):
- return '<CommandExecutorProtocol {}>'.format(self.command.name)
-
- __repr__ = __str__
-
-
-class FileReceiver(LineReceiver): # pylint: disable=W0223
-
- def __init__(self, path):
- self.path = path
- self.fh = None
- self.factory = None
-
- def connectionMade(self):
- if os.path.isfile(self.path):
- log.warning('overriding existing file.')
- os.remove(self.path)
- self.fh = open(self.path, 'w')
-
- def connectionLost(self, reason=ConnectionDone):
- if self.fh:
- self.fh.close()
-
- def lineReceived(self, line):
- line = line.rstrip('\r\n') + '\n'
- self.fh.write(line)
-
-
-class FileReceiverFactory(ReconnectingClientFactory):
-
- def __init__(self, path, owner):
- self.path = path
- self.owner = owner
-
- def buildProtocol(self, addr):
- protocol = FileReceiver(self.path)
- protocol.factory = self
- self.resetDelay()
- return protocol
-
- def clientConnectionLost(self, conector, reason):
- if isinstance(reason, ConnectionLost):
- log.error('Connection lost: {}'.format(reason))
- ReconnectingClientFactory.clientConnectionLost(self, conector, reason)
- else:
- self.owner.transferComplete(self)
-
- def clientConnectionFailed(self, conector, reason):
- if isinstance(reason, ConnectionLost):
- log.error('Connection failed: {}'.format(reason))
- ReconnectingClientFactory.clientConnectionFailed(self, conector, reason)
-
- def __str__(self):
- return '<FileReceiver {}>'.format(self.path)
-
- __repr__ = __str__
-
-
-def execute_command(server_config, command, **kwargs):
- before_fds = _get_open_fds() # see the comment in the finally clause below
- if isinstance(command, basestring):
- command = Command(command, **kwargs)
- timeout = 300 if command.name in ['stop', 'pull'] else 10
- factory = CommandExecutorFactory(server_config, command, timeout)
-
- # reactors aren't designed to be re-startable. In order to be
- # able to call execute_command multiple times, we need to froce
- # re-installation of the reactor; hence this hackery.
- # TODO: look into implementing restartable reactors. According to the
- # Twisted FAQ, there is no good reason why there isn't one:
- # http://twistedmatrix.com/trac/wiki/FrequentlyAskedQuestions#WhycanttheTwistedsreactorberestarted
- from twisted.internet import default
- del sys.modules['twisted.internet.reactor']
- default.install()
- global reactor # pylint: disable=W0603
- reactor = sys.modules['twisted.internet.reactor']
-
- try:
- reactor.connectTCP(server_config.host, server_config.port, factory)
- reactor.run()
- return factory.result
- finally:
- # re-startable reactor hack part 2.
- # twisted hijacks SIGINT and doesn't bother to un-hijack it when the reactor
- # stops. So we have to do it for it *rolls eye*.
- import signal
- signal.signal(signal.SIGINT, signal.default_int_handler)
- # OK, the reactor is also leaking file descriptors. Tracking down all
- # of them is non trivial, so instead we're just comparing the before
- # and after lists of open FDs for the current process, and closing all
- # new ones, as execute_command should never leave anything open after
- # it exits (even when downloading data files from the server).
- # TODO: This is way too hacky even compared to the rest of this function.
- # Additionally, the current implementation ties this to UNIX,
- # so in the long run, we need to do this properly and get the FDs
- # from the reactor.
- after_fds = _get_open_fds()
- for fd in after_fds - before_fds:
- try:
- os.close(int(fd[1:]))
- except OSError:
- pass
- # Below is the alternative code that gets FDs from the reactor, however
- # at the moment it doesn't seem to get everything, which is why code
- # above is used instead.
- #for fd in readtor._selectables:
- # os.close(fd)
- #reactor._poller.close()
-
-
-def _get_open_fds():
- if os.name == 'posix':
- import subprocess
- pid = os.getpid()
- procs = subprocess.check_output(["lsof", '-w', '-Ff', "-p", str(pid)])
- return set(procs.split())
- else:
- # TODO: Implement the Windows equivalent.
- return []
-
-
-def run_send_command():
- """Main entry point when running as a script -- should not be invoked form another module."""
- parser = get_config_parser()
- parser.add_argument('command')
- parser.add_argument('-o', '--output-directory', metavar='DIR', default='.',
- help='Directory used to output data files (defaults to the current directory).')
- parser.add_argument('--verbose', help='Produce verobose output.', action='store_true', default=False)
- args = parser.parse_args()
- if not args.device_config.labels:
- args.device_config.labels = ['PORT_{}'.format(i) for i in xrange(len(args.device_config.resistor_values))]
-
- if args.verbose:
- log.start_logging('DEBUG')
- else:
- log.start_logging('INFO', fmt='%(levelname)-8s %(message)s')
-
- if args.command == 'configure':
- args.device_config.validate()
- command = Command(args.command, config=args.device_config)
- elif args.command == 'get_data':
- command = Command(args.command, output_directory=args.output_directory)
- else:
- command = Command(args.command)
-
- result = execute_command(args.server_config, command)
- print result
- if result.data:
- print result.data
-
-
-if __name__ == '__main__':
- run_send_command()
diff --git a/wlauto/external/daq_server/src/daqpower/common.py b/wlauto/external/daq_server/src/daqpower/common.py
deleted file mode 100644
index 105daf23..00000000
--- a/wlauto/external/daq_server/src/daqpower/common.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E1101
-import json
-
-
-class Serializer(json.JSONEncoder):
-
- def default(self, o): # pylint: disable=E0202
- if isinstance(o, Serializable):
- return o.serialize()
- if isinstance(o, EnumEntry):
- return o.name
- return json.JSONEncoder.default(self, o)
-
-
-class Serializable(object):
-
- @classmethod
- def deserialize(cls, text):
- return cls(**json.loads(text))
-
- def serialize(self, d=None):
- if d is None:
- d = self.__dict__
- return json.dumps(d, cls=Serializer)
-
-
-class DaqServerRequest(Serializable):
-
- def __init__(self, command, params=None): # pylint: disable=W0231
- self.command = command
- self.params = params or {}
-
-
-class DaqServerResponse(Serializable):
-
- def __init__(self, status, message=None, data=None): # pylint: disable=W0231
- self.status = status
- self.message = message.strip().replace('\r\n', ' ') if message else ''
- self.data = data or {}
-
- def __str__(self):
- return '{} {}'.format(self.status, self.message or '')
-
-
-class EnumEntry(object):
-
- def __init__(self, name):
- self.name = name
-
- def __str__(self):
- return self.name
-
- def __cmp__(self, other):
- return cmp(self.name, str(other))
-
-
-class Enum(object):
- """
- Assuming MyEnum = Enum('A', 'B'),
-
- MyEnum.A and MyEnum.B are valid values.
-
- a = MyEnum.A
- (a == MyEnum.A) == True
- (a in MyEnum) == True
-
- MyEnum('A') == MyEnum.A
-
- str(MyEnum.A) == 'A'
-
- """
-
- def __init__(self, *args):
- for a in args:
- setattr(self, a, EnumEntry(a))
-
- def __call__(self, value):
- if value not in self.__dict__:
- raise ValueError('Not enum value: {}'.format(value))
- return self.__dict__[value]
-
- def __iter__(self):
- for e in self.__dict__:
- yield self.__dict__[e]
-
-
-Status = Enum('OK', 'OKISH', 'ERROR')
diff --git a/wlauto/external/daq_server/src/daqpower/config.py b/wlauto/external/daq_server/src/daqpower/config.py
deleted file mode 100644
index d5b7c885..00000000
--- a/wlauto/external/daq_server/src/daqpower/config.py
+++ /dev/null
@@ -1,153 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import argparse
-
-from daqpower.common import Serializable
-
-
-class ConfigurationError(Exception):
- """Raised when configuration passed into DaqServer is invaid."""
- pass
-
-
-class DeviceConfiguration(Serializable):
- """Encapulates configuration for the DAQ, typically, passed from
- the client."""
-
- valid_settings = ['device_id', 'v_range', 'dv_range', 'sampling_rate', 'resistor_values', 'labels']
-
- default_device_id = 'Dev1'
- default_v_range = 2.5
- default_dv_range = 0.2
- default_sampling_rate = 10000
- # Channel map used in DAQ 6363 and similar.
- default_channel_map = (0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23)
-
- @property
- def number_of_ports(self):
- return len(self.resistor_values)
-
- def __init__(self, **kwargs): # pylint: disable=W0231
- try:
- self.device_id = kwargs.pop('device_id') or self.default_device_id
- self.v_range = float(kwargs.pop('v_range') or self.default_v_range)
- self.dv_range = float(kwargs.pop('dv_range') or self.default_dv_range)
- self.sampling_rate = int(kwargs.pop('sampling_rate') or self.default_sampling_rate)
- self.resistor_values = kwargs.pop('resistor_values') or []
- self.channel_map = kwargs.pop('channel_map') or self.default_channel_map
- self.labels = (kwargs.pop('labels') or
- ['PORT_{}.csv'.format(i) for i in xrange(len(self.resistor_values))])
- except KeyError, e:
- raise ConfigurationError('Missing config: {}'.format(e.message))
- if kwargs:
- raise ConfigurationError('Unexpected config: {}'.format(kwargs))
-
- def validate(self):
- if not self.number_of_ports:
- raise ConfigurationError('No resistor values were specified.')
- if len(self.resistor_values) != len(self.labels):
- message = 'The number of resistors ({}) does not match the number of labels ({})'
- raise ConfigurationError(message.format(len(self.resistor_values), len(self.labels)))
-
- def __str__(self):
- return self.serialize()
-
- __repr__ = __str__
-
-
-class ServerConfiguration(object):
- """Client-side server configuration."""
-
- valid_settings = ['host', 'port']
-
- default_host = '127.0.0.1'
- default_port = 45677
-
- def __init__(self, **kwargs):
- self.host = kwargs.pop('host', None) or self.default_host
- self.port = kwargs.pop('port', None) or self.default_port
- if kwargs:
- raise ConfigurationError('Unexpected config: {}'.format(kwargs))
-
- def validate(self):
- if not self.host:
- raise ConfigurationError('Server host not specified.')
- if not self.port:
- raise ConfigurationError('Server port not specified.')
- elif not isinstance(self.port, int):
- raise ConfigurationError('Server port must be an integer.')
-
-
-class UpdateDeviceConfig(argparse.Action):
-
- def __call__(self, parser, namespace, values, option_string=None):
- setting = option_string.strip('-').replace('-', '_')
- if setting not in DeviceConfiguration.valid_settings:
- raise ConfigurationError('Unkown option: {}'.format(option_string))
- setattr(namespace._device_config, setting, values) # pylint: disable=protected-access
-
-
-class UpdateServerConfig(argparse.Action):
-
- def __call__(self, parser, namespace, values, option_string=None):
- setting = option_string.strip('-').replace('-', '_')
- if setting not in namespace.server_config.valid_settings:
- raise ConfigurationError('Unkown option: {}'.format(option_string))
- setattr(namespace.server_config, setting, values)
-
-
-class ConfigNamespace(object):
-
- class _N(object):
- def __init__(self):
- self.device_id = None
- self.v_range = None
- self.dv_range = None
- self.sampling_rate = None
- self.resistor_values = None
- self.labels = None
- self.channel_map = None
-
- @property
- def device_config(self):
- return DeviceConfiguration(**self._device_config.__dict__)
-
- def __init__(self):
- self._device_config = self._N()
- self.server_config = ServerConfiguration()
-
-
-class ConfigArgumentParser(argparse.ArgumentParser):
-
- def parse_args(self, *args, **kwargs):
- kwargs['namespace'] = ConfigNamespace()
- return super(ConfigArgumentParser, self).parse_args(*args, **kwargs)
-
-
-def get_config_parser(server=True, device=True):
- parser = ConfigArgumentParser()
- if device:
- parser.add_argument('--device-id', action=UpdateDeviceConfig)
- parser.add_argument('--v-range', action=UpdateDeviceConfig, type=float)
- parser.add_argument('--dv-range', action=UpdateDeviceConfig, type=float)
- parser.add_argument('--sampling-rate', action=UpdateDeviceConfig, type=int)
- parser.add_argument('--resistor-values', action=UpdateDeviceConfig, type=float, nargs='*')
- parser.add_argument('--labels', action=UpdateDeviceConfig, nargs='*')
- if server:
- parser.add_argument('--host', action=UpdateServerConfig)
- parser.add_argument('--port', action=UpdateServerConfig, type=int)
- return parser
diff --git a/wlauto/external/daq_server/src/daqpower/daq.py b/wlauto/external/daq_server/src/daqpower/daq.py
deleted file mode 100644
index 7adae3dd..00000000
--- a/wlauto/external/daq_server/src/daqpower/daq.py
+++ /dev/null
@@ -1,347 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-"""
-Creates a new DAQ device class. This class assumes that there is a
-DAQ connected and mapped as Dev1. It assumes a specific syndesmology on the DAQ (it is not
-meant to be a generic DAQ interface). The following diagram shows the wiring for one DaqDevice
-port::
-
-Port 0
-========
-| A0+ <--- Vr -------------------------|
-| |
-| A0- <--- GND -------------------// |
-| |
-| A1+ <--- V+ ------------|-------V+ |
-| r | |
-| A1- <--- Vr --/\/\/\----| |
-| | |
-| | |
-| |--------------------------|
-========
-
-:number_of_ports: The number of ports connected on the DAQ. Each port requires 2 DAQ Channels
- one for the source voltage and one for the Voltage drop over the
- resistor r (V+ - Vr) allows us to detect the current.
-:resistor_value: The resistance of r. Typically a few milliOhm
-:downsample: The number of samples combined to create one Power point. If set to one
- each sample corresponds to one reported power point.
-:sampling_rate: The rate at which DAQ takes a sample from each channel.
-
-"""
-# pylint: disable=F0401,E1101,W0621,no-name-in-module,wrong-import-position,wrong-import-order
-import os
-import sys
-import csv
-import time
-import threading
-from Queue import Queue, Empty
-
-import numpy
-
-from PyDAQmx import Task, DAQError
-try:
- from PyDAQmx.DAQmxFunctions import DAQmxGetSysDevNames
- CAN_ENUMERATE_DEVICES = True
-except ImportError: # earlier driver version
- DAQmxGetSysDevNames = None
- CAN_ENUMERATE_DEVICES = False
-
-from PyDAQmx.DAQmxTypes import int32, byref, create_string_buffer
-from PyDAQmx.DAQmxConstants import (DAQmx_Val_Diff, DAQmx_Val_Volts, DAQmx_Val_GroupByScanNumber, DAQmx_Val_Auto,
- DAQmx_Val_Rising, DAQmx_Val_ContSamps)
-
-try:
- from PyDAQmx.DAQmxConstants import DAQmx_Val_Acquired_Into_Buffer
- callbacks_supported = True
-except ImportError: # earlier driver version
- DAQmx_Val_Acquired_Into_Buffer = None
- callbacks_supported = False
-
-
-from daqpower import log
-
-
-def list_available_devices():
- """Returns the list of DAQ devices visible to the driver."""
- if DAQmxGetSysDevNames:
- bufsize = 2048 # Should be plenty for all but the most pathalogical of situations.
- buf = create_string_buffer('\000' * bufsize)
- DAQmxGetSysDevNames(buf, bufsize)
- return buf.value.split(',')
- else:
- return []
-
-
-class ReadSamplesBaseTask(Task):
-
- def __init__(self, config, consumer):
- Task.__init__(self)
- self.config = config
- self.consumer = consumer
- self.sample_buffer_size = (self.config.sampling_rate + 1) * self.config.number_of_ports * 2
- self.samples_read = int32()
- self.remainder = []
- # create voltage channels
- for i in xrange(0, 2 * self.config.number_of_ports, 2):
- self.CreateAIVoltageChan('{}/ai{}'.format(config.device_id, config.channel_map[i]),
- '', DAQmx_Val_Diff,
- -config.v_range, config.v_range,
- DAQmx_Val_Volts, None)
- self.CreateAIVoltageChan('{}/ai{}'.format(config.device_id, config.channel_map[i + 1]),
- '', DAQmx_Val_Diff,
- -config.dv_range, config.dv_range,
- DAQmx_Val_Volts, None)
- # configure sampling rate
- self.CfgSampClkTiming('',
- self.config.sampling_rate,
- DAQmx_Val_Rising,
- DAQmx_Val_ContSamps,
- self.config.sampling_rate)
-
-
-class ReadSamplesCallbackTask(ReadSamplesBaseTask):
- """
- More recent verisons of the driver (on Windows) support callbacks
-
- """
-
- def __init__(self, config, consumer):
- ReadSamplesBaseTask.__init__(self, config, consumer)
- # register callbacks
- self.AutoRegisterEveryNSamplesEvent(DAQmx_Val_Acquired_Into_Buffer, self.config.sampling_rate // 2, 0)
- self.AutoRegisterDoneEvent(0)
-
- def EveryNCallback(self):
- # Note to future self: do NOT try to "optimize" this but re-using the same array and just
- # zeroing it out each time. The writes happen asynchronously and if your zero it out too soon,
- # you'll see a whole bunch of 0.0's in the output. If you wanna go down that route, you'll need
- # cycler through several arrays and have the code that's actually doing the writing zero them out
- # mark them as available to be used by this call. But, honestly, numpy array allocation does not
- # appear to be a bottleneck at the moment, so the current solution is "good enough".
- samples_buffer = numpy.zeros((self.sample_buffer_size,), dtype=numpy.float64)
- self.ReadAnalogF64(DAQmx_Val_Auto, 0.0, DAQmx_Val_GroupByScanNumber, samples_buffer,
- self.sample_buffer_size, byref(self.samples_read), None)
- self.consumer.write((samples_buffer, self.samples_read.value))
-
- def DoneCallback(self, status): # pylint: disable=W0613,R0201
- return 0 # The function should return an integer
-
-
-class ReadSamplesThreadedTask(ReadSamplesBaseTask):
- """
- Earlier verisons of the driver (on CentOS) do not support callbacks. So need
- to create a thread to periodically poll the buffer
-
- """
-
- def __init__(self, config, consumer):
- ReadSamplesBaseTask.__init__(self, config, consumer)
- self.poller = DaqPoller(self)
-
- def StartTask(self):
- ReadSamplesBaseTask.StartTask(self)
- self.poller.start()
-
- def StopTask(self):
- self.poller.stop()
- ReadSamplesBaseTask.StopTask(self)
-
-
-class DaqPoller(threading.Thread):
-
- def __init__(self, task, wait_period=1):
- super(DaqPoller, self).__init__()
- self.task = task
- self.wait_period = wait_period
- self._stop_signal = threading.Event()
- self.samples_buffer = numpy.zeros((self.task.sample_buffer_size,), dtype=numpy.float64)
-
- def run(self):
- while not self._stop_signal.is_set():
- # Note to future self: see the comment inside EventNCallback() above
- samples_buffer = numpy.zeros((self.task.sample_buffer_size,), dtype=numpy.float64)
- try:
- self.task.ReadAnalogF64(DAQmx_Val_Auto, self.wait_period, DAQmx_Val_GroupByScanNumber, samples_buffer,
- self.task.sample_buffer_size, byref(self.task.samples_read), None)
- except DAQError:
- pass
- self.task.consumer.write((samples_buffer, self.task.samples_read.value))
-
- def stop(self):
- self._stop_signal.set()
- self.join()
-
-
-class AsyncWriter(threading.Thread):
-
- def __init__(self, wait_period=1):
- super(AsyncWriter, self).__init__()
- self.daemon = True
- self.wait_period = wait_period
- self.running = threading.Event()
- self._stop_signal = threading.Event()
- self._queue = Queue()
-
- def write(self, stuff):
- if self._stop_signal.is_set():
- raise IOError('Attempting to writer to {} after it has been closed.'.format(self.__class__.__name__))
- self._queue.put(stuff)
-
- def do_write(self, stuff):
- raise NotImplementedError()
-
- def run(self):
- self.running.set()
- while True:
- if self._stop_signal.is_set() and self._queue.empty():
- break
- try:
- self.do_write(self._queue.get(block=True, timeout=self.wait_period))
- except Empty:
- pass # carry on
- self.running.clear()
-
- def stop(self):
- self._stop_signal.set()
-
- def wait(self):
- while self.running.is_set():
- time.sleep(self.wait_period)
-
-
-class PortWriter(object):
-
- def __init__(self, path):
- self.path = path
- self.fh = open(path, 'w', 0)
- self.writer = csv.writer(self.fh)
- self.writer.writerow(['power', 'voltage'])
-
- def write(self, row):
- self.writer.writerow(row)
-
- def close(self):
- self.fh.close()
-
- def __del__(self):
- self.close()
-
-
-class SamplePorcessorError(Exception):
- pass
-
-
-class SampleProcessor(AsyncWriter):
-
- def __init__(self, resistor_values, output_directory, labels):
- super(SampleProcessor, self).__init__()
- self.resistor_values = resistor_values
- self.output_directory = output_directory
- self.labels = labels
- self.number_of_ports = len(resistor_values)
- if len(self.labels) != self.number_of_ports:
- message = 'Number of labels ({}) does not match number of ports ({}).'
- raise SamplePorcessorError(message.format(len(self.labels), self.number_of_ports))
- self.port_writers = []
-
- def do_write(self, sample_tuple):
- samples, number_of_samples = sample_tuple
- for i in xrange(0, number_of_samples * self.number_of_ports * 2, self.number_of_ports * 2):
- for j in xrange(self.number_of_ports):
- V = float(samples[i + 2 * j])
- DV = float(samples[i + 2 * j + 1])
- P = V * (DV / self.resistor_values[j])
- self.port_writers[j].write([P, V])
-
- def start(self):
- for label in self.labels:
- port_file = self.get_port_file_path(label)
- writer = PortWriter(port_file)
- self.port_writers.append(writer)
- super(SampleProcessor, self).start()
-
- def stop(self):
- super(SampleProcessor, self).stop()
- self.wait()
- for writer in self.port_writers:
- writer.close()
-
- def get_port_file_path(self, port_id):
- if port_id in self.labels:
- return os.path.join(self.output_directory, port_id + '.csv')
- else:
- raise SamplePorcessorError('Invalid port ID: {}'.format(port_id))
-
- def __del__(self):
- self.stop()
-
-
-class DaqRunner(object):
-
- @property
- def number_of_ports(self):
- return self.config.number_of_ports
-
- def __init__(self, config, output_directory):
- self.config = config
- self.processor = SampleProcessor(config.resistor_values, output_directory, config.labels)
- if callbacks_supported:
- self.task = ReadSamplesCallbackTask(config, self.processor)
- else:
- self.task = ReadSamplesThreadedTask(config, self.processor) # pylint: disable=redefined-variable-type
- self.is_running = False
-
- def start(self):
- log.debug('Starting sample processor.')
- self.processor.start()
- log.debug('Starting DAQ Task.')
- self.task.StartTask()
- self.is_running = True
- log.debug('Runner started.')
-
- def stop(self):
- self.is_running = False
- log.debug('Stopping DAQ Task.')
- self.task.StopTask()
- log.debug('Stopping sample processor.')
- self.processor.stop()
- log.debug('Runner stopped.')
-
- def get_port_file_path(self, port_id):
- return self.processor.get_port_file_path(port_id)
-
-
-if __name__ == '__main__':
- from collections import namedtuple
- DeviceConfig = namedtuple('DeviceConfig', ['device_id', 'channel_map', 'resistor_values',
- 'v_range', 'dv_range', 'sampling_rate',
- 'number_of_ports', 'labels'])
- channel_map = (0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23)
- resistor_values = [0.005]
- labels = ['PORT_0']
- dev_config = DeviceConfig('Dev1', channel_map, resistor_values, 2.5, 0.2, 10000, len(resistor_values), labels)
- if len(sys.argv) != 3:
- print 'Usage: {} OUTDIR DURATION'.format(os.path.basename(__file__))
- sys.exit(1)
- output_directory = sys.argv[1]
- duration = float(sys.argv[2])
-
- print "Avialable devices:", list_available_devices()
- runner = DaqRunner(dev_config, output_directory)
- runner.start()
- time.sleep(duration)
- runner.stop()
diff --git a/wlauto/external/daq_server/src/daqpower/log.py b/wlauto/external/daq_server/src/daqpower/log.py
deleted file mode 100644
index eeb63afb..00000000
--- a/wlauto/external/daq_server/src/daqpower/log.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import logging
-
-from twisted.python import log
-
-__all__ = ['debug', 'info', 'warning', 'error', 'critical', 'start_logging']
-
-debug = lambda x: log.msg(x, logLevel=logging.DEBUG)
-info = lambda x: log.msg(x, logLevel=logging.INFO)
-warning = lambda x: log.msg(x, logLevel=logging.WARNING)
-error = lambda x: log.msg(x, logLevel=logging.ERROR)
-critical = lambda x: log.msg(x, logLevel=logging.CRITICAL)
-
-
-class CustomLoggingObserver(log.PythonLoggingObserver):
-
- def __init__(self, loggerName="twisted"):
- super(CustomLoggingObserver, self).__init__(loggerName)
- if hasattr(self, '_newObserver'): # new vesions of Twisted
- self.logger = self._newObserver.logger # pylint: disable=no-member
-
- def emit(self, eventDict):
- if 'logLevel' in eventDict:
- level = eventDict['logLevel']
- elif eventDict['isError']:
- level = logging.ERROR
- else:
- # All of that just just to override this one line from
- # default INFO level...
- level = logging.DEBUG
- text = log.textFromEventDict(eventDict)
- if text is None:
- return
- self.logger.log(level, text)
-
-
-logObserver = CustomLoggingObserver()
-logObserver.start()
-
-
-def start_logging(level, fmt='%(asctime)s %(levelname)-8s: %(message)s'):
- logging.basicConfig(level=getattr(logging, level), format=fmt)
-
diff --git a/wlauto/external/daq_server/src/daqpower/server.py b/wlauto/external/daq_server/src/daqpower/server.py
deleted file mode 100644
index 5ec9e030..00000000
--- a/wlauto/external/daq_server/src/daqpower/server.py
+++ /dev/null
@@ -1,526 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E1101,W0613,wrong-import-position
-from __future__ import division
-import os
-import sys
-import argparse
-import shutil
-import socket
-import time
-from datetime import datetime, timedelta
-
-from zope.interface import implements
-from twisted.protocols.basic import LineReceiver
-from twisted.internet.protocol import Factory, Protocol
-from twisted.internet import reactor, interfaces
-from twisted.internet.error import ConnectionLost, ConnectionDone
-
-if __name__ == "__main__": # for debugging
- sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
-from daqpower import log
-from daqpower.config import DeviceConfiguration
-from daqpower.common import DaqServerRequest, DaqServerResponse, Status
-
-try:
- from daqpower.daq import DaqRunner, list_available_devices, CAN_ENUMERATE_DEVICES
- __import_error = None
-except ImportError as e:
- # May be using debug mode.
- __import_error = e
- DaqRunner = None
- list_available_devices = lambda: ['Dev1']
-
-
-class ProtocolError(Exception):
- pass
-
-
-class DummyDaqRunner(object):
- """Dummy stub used when running in debug mode."""
-
- num_rows = 200
-
- @property
- def number_of_ports(self):
- return self.config.number_of_ports
-
- def __init__(self, config, output_directory):
- log.info('Creating runner with {} {}'.format(config, output_directory))
- self.config = config
- self.output_directory = output_directory
- self.is_running = False
-
- def start(self):
- import csv, random # pylint: disable=multiple-imports
- log.info('runner started')
- for i in xrange(self.config.number_of_ports):
- rows = [['power', 'voltage']] + [[random.gauss(1.0, 1.0), random.gauss(1.0, 0.1)]
- for _ in xrange(self.num_rows)]
- with open(self.get_port_file_path(self.config.labels[i]), 'wb') as wfh:
- writer = csv.writer(wfh)
- writer.writerows(rows)
-
- self.is_running = True
-
- def stop(self):
- self.is_running = False
- log.info('runner stopped')
-
- def get_port_file_path(self, port_id):
- if port_id in self.config.labels:
- return os.path.join(self.output_directory, '{}.csv'.format(port_id))
- else:
- raise Exception('Invalid port id: {}'.format(port_id))
-
-
-class DaqServer(object):
-
- def __init__(self, base_output_directory):
- self.base_output_directory = os.path.abspath(base_output_directory)
- if os.path.isdir(self.base_output_directory):
- log.info('Using output directory: {}'.format(self.base_output_directory))
- else:
- log.info('Creating new output directory: {}'.format(self.base_output_directory))
- os.makedirs(self.base_output_directory)
- self.runner = None
- self.output_directory = None
- self.labels = None
-
- def configure(self, config_string):
- message = None
- if self.runner:
- message = 'Configuring a new session before previous session has been terminated.'
- log.warning(message)
- if self.runner.is_running:
- self.runner.stop()
- config = DeviceConfiguration.deserialize(config_string)
- config.validate()
- self.output_directory = self._create_output_directory()
- self.labels = config.labels
- log.info('Writing port files to {}'.format(self.output_directory))
- self.runner = DaqRunner(config, self.output_directory)
- return message
-
- def start(self):
- if self.runner:
- if not self.runner.is_running:
- self.runner.start()
- else:
- message = 'Calling start() before stop() has been called. Data up to this point will be lost.'
- log.warning(message)
- self.runner.stop()
- self.runner.start()
- return message
- else:
- raise ProtocolError('Start called before a session has been configured.')
-
- def stop(self):
- if self.runner:
- if self.runner.is_running:
- self.runner.stop()
- else:
- message = 'Attempting to stop() before start() was invoked.'
- log.warning(message)
- self.runner.stop()
- return message
- else:
- raise ProtocolError('Stop called before a session has been configured.')
-
- def list_devices(self): # pylint: disable=no-self-use
- return list_available_devices()
-
- def list_ports(self):
- return self.labels
-
- def list_port_files(self):
- if not self.runner:
- raise ProtocolError('Attempting to list port files before session has been configured.')
- ports_with_files = []
- for port_id in self.labels:
- path = self.get_port_file_path(port_id)
- if os.path.isfile(path):
- ports_with_files.append(port_id)
- return ports_with_files
-
- def get_port_file_path(self, port_id):
- if not self.runner:
- raise ProtocolError('Attepting to get port file path before session has been configured.')
- return self.runner.get_port_file_path(port_id)
-
- def terminate(self):
- message = None
- if self.runner:
- if self.runner.is_running:
- message = 'Terminating session before runner has been stopped.'
- log.warning(message)
- self.runner.stop()
- self.runner = None
- if self.output_directory and os.path.isdir(self.output_directory):
- shutil.rmtree(self.output_directory)
- self.output_directory = None
- log.info('Session terminated.')
- else: # Runner has not been created.
- message = 'Attempting to close session before it has been configured.'
- log.warning(message)
- return message
-
- def _create_output_directory(self):
- basename = datetime.now().strftime('%Y-%m-%d_%H%M%S%f')
- dirname = os.path.join(self.base_output_directory, basename)
- os.makedirs(dirname)
- return dirname
-
- def __del__(self):
- if self.runner:
- self.runner.stop()
-
- def __str__(self):
- return '({})'.format(self.base_output_directory)
-
- __repr__ = __str__
-
-
-class DaqControlProtocol(LineReceiver): # pylint: disable=W0223
-
- def __init__(self, daq_server):
- self.daq_server = daq_server
- self.factory = None
-
- def lineReceived(self, line):
- line = line.strip()
- log.info('Received: {}'.format(line))
- try:
- request = DaqServerRequest.deserialize(line)
- except Exception, e: # pylint: disable=W0703
- # PyDAQmx exceptions use "mess" rather than the standard "message"
- # to pass errors...
- message = getattr(e, 'mess', e.message)
- self.sendError('Received bad request ({}: {})'.format(e.__class__.__name__, message))
- else:
- self.processRequest(request)
-
- def processRequest(self, request):
- try:
- if request.command == 'configure':
- self.configure(request)
- elif request.command == 'start':
- self.start(request)
- elif request.command == 'stop':
- self.stop(request)
- elif request.command == 'list_devices':
- self.list_devices(request)
- elif request.command == 'list_ports':
- self.list_ports(request)
- elif request.command == 'list_port_files':
- self.list_port_files(request)
- elif request.command == 'pull':
- self.pull_port_data(request)
- elif request.command == 'close':
- self.terminate(request)
- else:
- self.sendError('Received unknown command: {}'.format(request.command))
- except Exception, e: # pylint: disable=W0703
- message = getattr(e, 'mess', e.message)
- self.sendError('{}: {}'.format(e.__class__.__name__, message))
-
- def configure(self, request):
- if 'config' in request.params:
- result = self.daq_server.configure(request.params['config'])
- if not result:
- self.sendResponse(Status.OK)
- else:
- self.sendResponse(Status.OKISH, message=result)
- else:
- self.sendError('Invalid config; config string not provided.')
-
- def start(self, request):
- result = self.daq_server.start()
- if not result:
- self.sendResponse(Status.OK)
- else:
- self.sendResponse(Status.OKISH, message=result)
-
- def stop(self, request):
- result = self.daq_server.stop()
- if not result:
- self.sendResponse(Status.OK)
- else:
- self.sendResponse(Status.OKISH, message=result)
-
- def pull_port_data(self, request):
- if 'port_id' in request.params:
- port_id = request.params['port_id']
- port_file = self.daq_server.get_port_file_path(port_id)
- if os.path.isfile(port_file):
- port = self._initiate_file_transfer(port_file)
- self.sendResponse(Status.OK, data={'port_number': port})
- else:
- self.sendError('File for port {} does not exist.'.format(port_id))
- else:
- self.sendError('Invalid pull request; port id not provided.')
-
- def list_devices(self, request):
- if CAN_ENUMERATE_DEVICES:
- devices = self.daq_server.list_devices()
- self.sendResponse(Status.OK, data={'devices': devices})
- else:
- message = "Server does not support DAQ device enumration"
- self.sendResponse(Status.OKISH, message=message)
-
- def list_ports(self, request):
- port_labels = self.daq_server.list_ports()
- self.sendResponse(Status.OK, data={'ports': port_labels})
-
- def list_port_files(self, request):
- port_labels = self.daq_server.list_port_files()
- self.sendResponse(Status.OK, data={'ports': port_labels})
-
- def terminate(self, request):
- status = Status.OK
- message = ''
- if self.factory.transfer_sessions:
- message = 'Terminating with file tranfer sessions in progress. '
- log.warning(message)
- for session in self.factory.transfer_sessions:
- self.factory.transferComplete(session)
- message += self.daq_server.terminate() or ''
- if message:
- status = Status.OKISH
- self.sendResponse(status, message)
-
- def sendError(self, message):
- log.error(message)
- self.sendResponse(Status.ERROR, message)
-
- def sendResponse(self, status, message=None, data=None):
- response = DaqServerResponse(status, message=message, data=data)
- self.sendLine(response.serialize())
-
- def sendLine(self, line):
- log.info('Responding: {}'.format(line))
- LineReceiver.sendLine(self, line.replace('\r\n', ''))
-
- def _initiate_file_transfer(self, filepath):
- sender_factory = FileSenderFactory(filepath, self.factory)
- connector = reactor.listenTCP(0, sender_factory)
- self.factory.transferInitiated(sender_factory, connector)
- return connector.getHost().port
-
-
-class DaqFactory(Factory):
-
- protocol = DaqControlProtocol
- check_alive_period = 5 * 60
- max_transfer_lifetime = 30 * 60
-
- def __init__(self, server, cleanup_period=24 * 60 * 60, cleanup_after_days=5):
- self.server = server
- self.cleanup_period = cleanup_period
- self.cleanup_threshold = timedelta(cleanup_after_days)
- self.transfer_sessions = {}
-
- def buildProtocol(self, addr):
- proto = DaqControlProtocol(self.server)
- proto.factory = self
- reactor.callLater(self.check_alive_period, self.pulse)
- reactor.callLater(self.cleanup_period, self.perform_cleanup)
- return proto
-
- def clientConnectionLost(self, connector, reason):
- log.msg('client connection lost: {}.'.format(reason))
- if not isinstance(reason, ConnectionLost):
- log.msg('ERROR: Client terminated connection mid-transfer.')
- for session in self.transfer_sessions:
- self.transferComplete(session)
-
- def transferInitiated(self, session, connector):
- self.transfer_sessions[session] = (time.time(), connector)
-
- def transferComplete(self, session, reason='OK'):
- if reason != 'OK':
- log.error(reason)
- self.transfer_sessions[session][1].stopListening()
- del self.transfer_sessions[session]
-
- def pulse(self):
- """Close down any file tranfer sessions that have been open for too long."""
- current_time = time.time()
- for session in self.transfer_sessions:
- start_time, conn = self.transfer_sessions[session]
- if (current_time - start_time) > self.max_transfer_lifetime:
- message = '{} session on port {} timed out'
- self.transferComplete(session, message.format(session, conn.getHost().port))
- if self.transfer_sessions:
- reactor.callLater(self.check_alive_period, self.pulse)
-
- def perform_cleanup(self):
- """
- Cleanup and old uncollected data files to recover disk space.
-
- """
- log.msg('Performing cleanup of the output directory...')
- base_directory = self.server.base_output_directory
- current_time = datetime.now()
- for entry in os.listdir(base_directory):
- entry_path = os.path.join(base_directory, entry)
- entry_ctime = datetime.fromtimestamp(os.path.getctime(entry_path))
- existence_time = current_time - entry_ctime
- if existence_time > self.cleanup_threshold:
- log.debug('Removing {} (existed for {})'.format(entry, existence_time))
- shutil.rmtree(entry_path)
- else:
- log.debug('Keeping {} (existed for {})'.format(entry, existence_time))
- log.msg('Cleanup complete.')
-
- def __str__(self):
- return '<DAQ {}>'.format(self.server)
-
- __repr__ = __str__
-
-
-class FileReader(object):
-
- implements(interfaces.IPushProducer)
-
- def __init__(self, filepath):
- self.fh = open(filepath)
- self.proto = None
- self.done = False
- self._paused = True
-
- def setProtocol(self, proto):
- self.proto = proto
-
- def resumeProducing(self):
- if not self.proto:
- raise ProtocolError('resumeProducing called with no protocol set.')
- self._paused = False
- try:
- while not self._paused:
- line = self.fh.next().rstrip('\n') + '\r\n'
- self.proto.transport.write(line)
- except StopIteration:
- log.debug('Sent everything.')
- self.stopProducing()
-
- def pauseProducing(self):
- self._paused = True
-
- def stopProducing(self):
- self.done = True
- self.fh.close()
- self.proto.transport.unregisterProducer()
- self.proto.transport.loseConnection()
-
-
-class FileSenderProtocol(Protocol):
-
- def __init__(self, reader):
- self.reader = reader
- self.factory = None
-
- def connectionMade(self):
- self.transport.registerProducer(self.reader, True)
- self.reader.resumeProducing()
-
- def connectionLost(self, reason=ConnectionDone):
- if self.reader.done:
- self.factory.transferComplete()
- else:
- self.reader.pauseProducing()
- self.transport.unregisterProducer()
-
-
-class FileSenderFactory(Factory):
-
- @property
- def done(self):
- if self.reader:
- return self.reader.done
- else:
- return None
-
- def __init__(self, path, owner):
- self.path = os.path.abspath(path)
- self.reader = None
- self.owner = owner
-
- def buildProtocol(self, addr):
- if not self.reader:
- self.reader = FileReader(self.path)
- proto = FileSenderProtocol(self.reader)
- proto.factory = self
- self.reader.setProtocol(proto)
- return proto
-
- def transferComplete(self):
- self.owner.transferComplete(self)
-
- def __hash__(self):
- return hash(self.path)
-
- def __str__(self):
- return '<FileSender {}>'.format(self.path)
-
- __repr__ = __str__
-
-
-def run_server():
- parser = argparse.ArgumentParser()
- parser.add_argument('-d', '--directory', help='Working directory', metavar='DIR', default='.')
- parser.add_argument('-p', '--port', help='port the server will listen on.',
- metavar='PORT', default=45677, type=int)
- parser.add_argument('-c', '--cleanup-after', type=int, default=5, metavar='DAYS',
- help="""
- Sever will perodically clean up data files that are older than the number of
- days specfied by this parameter.
- """)
- parser.add_argument('--cleanup-period', type=int, default=1, metavar='DAYS',
- help='Specifies how ofte the server will attempt to clean up old files.')
- parser.add_argument('--debug', help='Run in debug mode (no DAQ connected).',
- action='store_true', default=False)
- parser.add_argument('--verbose', help='Produce verobose output.', action='store_true', default=False)
- args = parser.parse_args()
-
- if args.debug:
- global DaqRunner # pylint: disable=W0603
- DaqRunner = DummyDaqRunner
- else:
- if not DaqRunner:
- raise __import_error # pylint: disable=raising-bad-type
- if args.verbose or args.debug:
- log.start_logging('DEBUG')
- else:
- log.start_logging('INFO')
-
- # days to seconds
- cleanup_period = args.cleanup_period * 24 * 60 * 60
-
- server = DaqServer(args.directory)
- factory = DaqFactory(server, cleanup_period, args.cleanup_after)
- reactor.listenTCP(args.port, factory).getHost()
- try:
- hostname = socket.gethostbyname(socket.gethostname())
- except socket.gaierror:
- hostname = 'localhost'
- log.info('Listening on {}:{}'.format(hostname, args.port))
- reactor.run()
-
-
-if __name__ == "__main__":
- run_server()
diff --git a/wlauto/external/daq_server/src/scripts/run-daq-server b/wlauto/external/daq_server/src/scripts/run-daq-server
deleted file mode 100644
index b20d6caf..00000000
--- a/wlauto/external/daq_server/src/scripts/run-daq-server
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from daqpower.server import run_server
-run_server()
diff --git a/wlauto/external/daq_server/src/scripts/send-daq-command b/wlauto/external/daq_server/src/scripts/send-daq-command
deleted file mode 100644
index a4656a67..00000000
--- a/wlauto/external/daq_server/src/scripts/send-daq-command
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-from daqpower.client import run_send_command
-run_send_command()
diff --git a/wlauto/external/daq_server/src/setup.py b/wlauto/external/daq_server/src/setup.py
deleted file mode 100644
index 3c892aa8..00000000
--- a/wlauto/external/daq_server/src/setup.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import warnings
-from distutils.core import setup
-
-import daqpower
-
-
-warnings.filterwarnings('ignore', "Unknown distribution option: 'install_requires'")
-
-params = dict(
- name='daqpower',
- version=daqpower.__version__,
- packages=[
- 'daqpower',
- ],
- scripts=[
- 'scripts/run-daq-server',
- 'scripts/send-daq-command',
- ],
- url='N/A',
- maintainer='workload-automation',
- maintainer_email='workload-automation@arm.com',
- install_requires=[
- 'twisted',
- 'PyDAQmx',
- ],
- # https://pypi.python.org/pypi?%3Aaction=list_classifiers
- classifiers=[
- 'Development Status :: 3 - Alpha',
- 'Environment :: Console',
- 'License :: Other/Proprietary License',
- 'Operating System :: Unix',
- 'Programming Language :: Python :: 2.7',
- ],
-)
-
-setup(**params)
diff --git a/wlauto/external/pmu_logger/Makefile b/wlauto/external/pmu_logger/Makefile
deleted file mode 100755
index ca7b2674..00000000
--- a/wlauto/external/pmu_logger/Makefile
+++ /dev/null
@@ -1,7 +0,0 @@
-# To build the pmu_logger module use the following command line
-# make ARCH=arm CROSS_COMPILE=arm-linux-gnueabi- -C ../kernel/out SUBDIRS=$PWD modules
-# where
-# CROSS_COMPILE - prefix of the arm linux compiler
-# -C - location of the configured kernel source tree
-
-obj-m := pmu_logger.o \ No newline at end of file
diff --git a/wlauto/external/pmu_logger/README b/wlauto/external/pmu_logger/README
deleted file mode 100755
index 9f3952a2..00000000
--- a/wlauto/external/pmu_logger/README
+++ /dev/null
@@ -1,35 +0,0 @@
-The pmu_logger module provides the ability to periodically trace CCI PMU counters. The trace destinations can be ftrace buffer and/or kernel logs. This file gives a quick overview of the funcationality provided by the module and how to use it.
-
-The pmu_logger module creates a directory in the debugfs filesystem called cci_pmu_logger which can be used to enable/disable the counters and control the events that are counted.
-
-To configure the events being counted write the corresponding event id to the counter* files. The list of CCI PMU events can be found at http://arminfo.emea.arm.com/help/index.jsp?topic=/com.arm.doc.ddi0470d/CJHICFBF.html.
-
-The "period_jiffies" can be used to control the periodicity of tracing. It accepts values in kernel jiffies.
-
-To enable tracing, write a 1 to "control". To disable write another 1 to "control". The files "enable_console" and "enable_ftrace" control where the trace is written to. To check if the counters are currently running or not, you can read the control file.
-
-The current values of the counters can be read from the "values" file.
-
-Eg. To trace, A15 and A7 snoop hit rate every 10 jiffies the following command are required -
-
-
-trace-cmd reset
-
-echo 0x63 > counter0
-echo 0x6A > counter1
-echo 0x83 > counter2
-echo 0x8A > counter3
-
-echo 10 > period_jiffies
-
-trace-cmd start -b 20000 -e "sched:sched_wakeup"
-
-echo 1 > control
-
-# perform the activity for which you would like to collect the CCI PMU trace.
-
-trace-cmd stop && trace-cmd extract
-
-echo 1 > control
-
-trace-cmd report trace.dat | grep print # shows the trace of the CCI PMU counters along with the cycle counter values. \ No newline at end of file
diff --git a/wlauto/external/pmu_logger/pmu_logger.c b/wlauto/external/pmu_logger/pmu_logger.c
deleted file mode 100755
index 47497a10..00000000
--- a/wlauto/external/pmu_logger/pmu_logger.c
+++ /dev/null
@@ -1,294 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-/*
- * pmu_logger.c - Kernel module to log the CCI PMU counters
- */
-
-#include <linux/init.h>
-#include <linux/kernel.h>
-#include <linux/module.h>
-#include <linux/debugfs.h>
-#include <linux/timer.h>
-#include <asm/io.h>
-
-#define MODULE_NAME "cci_pmu_logger"
-
-// CCI_BASE needs to be modified to point to the mapped location of CCI in
-// memory on your device.
-#define CCI_BASE 0x2C090000 // TC2
-//#define CCI_BASE 0x10D20000
-#define CCI_SIZE 0x00010000
-
-#define PMCR 0x100
-
-#define PMCR_CEN (1 << 0)
-#define PMCR_RST (1 << 1)
-#define PMCR_CCR (1 << 2)
-#define PMCR_CCD (1 << 3)
-#define PMCR_EX (1 << 4)
-#define PMCR_DP (1 << 5)
-
-#define CC_BASE 0x9000
-#define PC0_BASE 0xA000
-#define PC1_BASE 0xB000
-#define PC2_BASE 0xC000
-#define PC3_BASE 0xD000
-
-#define PC_ESR 0x0
-#define CNT_VALUE 0x4
-#define CNT_CONTROL 0x8
-
-#define CNT_ENABLE (1 << 0)
-
-u32 counter0_event = 0x6A;
-u32 counter1_event = 0x63;
-u32 counter2_event = 0x8A;
-u32 counter3_event = 0x83;
-
-u32 enable_console = 0;
-u32 enable_ftrace = 1;
-
-void *cci_base = 0;
-
-static struct dentry *module_debugfs_root;
-static int enabled = false;
-
-u32 delay = 10; //jiffies. This translates to 1 sample every 100 ms
-struct timer_list timer;
-
-static void call_after_delay(void)
-{
- timer.expires = jiffies + delay;
- add_timer(&timer);
-}
-
-
-static void setup_and_call_after_delay(void (*fn)(unsigned long))
-{
- init_timer(&timer);
- timer.data = (unsigned long)&timer;
- timer.function = fn;
-
- call_after_delay();
-}
-
-static void print_counter_configuration(void)
-{
- if (enable_ftrace)
- trace_printk("Counter_0: %02x Counter_1: %02x Counter_2: %02x Counter_3: %02x\n", \
- counter0_event, counter1_event, counter2_event, counter3_event);
-
- if (enable_console)
- printk("Counter_0: %02x Counter_1: %02x Counter_2: %02x Counter_3: %02x\n", \
- counter0_event, counter1_event, counter2_event, counter3_event);
-}
-
-static void initialize_cci_pmu(void)
-{
- u32 val;
-
- // Select the events counted
- iowrite32(counter0_event, cci_base + PC0_BASE + PC_ESR);
- iowrite32(counter1_event, cci_base + PC1_BASE + PC_ESR);
- iowrite32(counter2_event, cci_base + PC2_BASE + PC_ESR);
- iowrite32(counter3_event, cci_base + PC3_BASE + PC_ESR);
-
- // Enable the individual PMU counters
- iowrite32(CNT_ENABLE, cci_base + PC0_BASE + CNT_CONTROL);
- iowrite32(CNT_ENABLE, cci_base + PC1_BASE + CNT_CONTROL);
- iowrite32(CNT_ENABLE, cci_base + PC2_BASE + CNT_CONTROL);
- iowrite32(CNT_ENABLE, cci_base + PC3_BASE + CNT_CONTROL);
- iowrite32(CNT_ENABLE, cci_base + CC_BASE + CNT_CONTROL);
-
- // Reset the counters and configure the Cycle Count Divider
- val = ioread32(cci_base + PMCR);
- iowrite32(val | PMCR_RST | PMCR_CCR | PMCR_CCD, cci_base + PMCR);
-}
-
-static void enable_cci_pmu_counters(void)
-{
- u32 val = ioread32(cci_base + PMCR);
- iowrite32(val | PMCR_CEN, cci_base + PMCR);
-}
-
-static void disable_cci_pmu_counters(void)
-{
- u32 val = ioread32(cci_base + PMCR);
- iowrite32(val & ~PMCR_CEN, cci_base + PMCR);
-}
-
-static void trace_values(unsigned long arg)
-{
- u32 cycles;
- u32 counter[4];
-
- cycles = ioread32(cci_base + CC_BASE + CNT_VALUE);
- counter[0] = ioread32(cci_base + PC0_BASE + CNT_VALUE);
- counter[1] = ioread32(cci_base + PC1_BASE + CNT_VALUE);
- counter[2] = ioread32(cci_base + PC2_BASE + CNT_VALUE);
- counter[3] = ioread32(cci_base + PC3_BASE + CNT_VALUE);
-
- if (enable_ftrace)
- trace_printk("Cycles: %08x Counter_0: %08x"
- " Counter_1: %08x Counter_2: %08x Counter_3: %08x\n", \
- cycles, counter[0], counter[1], counter[2], counter[3]);
-
- if (enable_console)
- printk("Cycles: %08x Counter_0: %08x"
- " Counter_1: %08x Counter_2: %08x Counter_3: %08x\n", \
- cycles, counter[0], counter[1], counter[2], counter[3]);
-
- if (enabled) {
- u32 val;
- // Reset the counters
- val = ioread32(cci_base + PMCR);
- iowrite32(val | PMCR_RST | PMCR_CCR, cci_base + PMCR);
-
- call_after_delay();
- }
-}
-
-static ssize_t read_control(struct file *file, char __user *buf, size_t count, loff_t *ppos)
-{
- char status[16];
- /* printk(KERN_DEBUG "%s\n", __func__); */
-
- if (enabled)
- snprintf(status, 16, "enabled\n");
- else
- snprintf(status, 16, "disabled\n");
-
- return simple_read_from_buffer(buf, count, ppos, status, strlen(status));
-}
-
-static ssize_t write_control(struct file *file, const char __user *buf, size_t count, loff_t *ppos)
-{
- if (enabled) {
- disable_cci_pmu_counters();
- enabled = false;
- } else {
- initialize_cci_pmu();
- enable_cci_pmu_counters();
- enabled = true;
-
- print_counter_configuration();
- setup_and_call_after_delay(trace_values);
- }
-
- return count;
-}
-
-static ssize_t read_values(struct file *file, char __user *buf, size_t count, loff_t *ppos)
-{
- char values[256];
- /* u32 val; */
-
- snprintf(values, 256, "Cycles: %08x Counter_0: %08x"
- " Counter_1: %08x Counter_2: %08x Counter_3: %08x\n", \
- ioread32(cci_base + CC_BASE + CNT_VALUE), \
- ioread32(cci_base + PC0_BASE + CNT_VALUE), \
- ioread32(cci_base + PC1_BASE + CNT_VALUE), \
- ioread32(cci_base + PC2_BASE + CNT_VALUE), \
- ioread32(cci_base + PC3_BASE + CNT_VALUE));
-
- return simple_read_from_buffer(buf, count, ppos, values, strlen(values));
-}
-
-static const struct file_operations control_fops = {
- .owner = THIS_MODULE,
- .read = read_control,
- .write = write_control,
-};
-
-static const struct file_operations value_fops = {
- .owner = THIS_MODULE,
- .read = read_values,
-};
-
-static int __init pmu_logger_init(void)
-{
- struct dentry *retval;
-
- module_debugfs_root = debugfs_create_dir(MODULE_NAME, NULL);
- if (!module_debugfs_root || IS_ERR(module_debugfs_root)) {
- printk(KERN_ERR "error creating debugfs dir.\n");
- goto out;
- }
-
- retval = debugfs_create_file("control", S_IRUGO | S_IWUGO, module_debugfs_root, NULL, &control_fops);
- if (!retval)
- goto out;
-
- retval = debugfs_create_file("values", S_IRUGO, module_debugfs_root, NULL, &value_fops);
- if (!retval)
- goto out;
-
- retval = debugfs_create_bool("enable_console", S_IRUGO | S_IWUGO, module_debugfs_root, &enable_console);
- if (!retval)
- goto out;
-
- retval = debugfs_create_bool("enable_ftrace", S_IRUGO | S_IWUGO, module_debugfs_root, &enable_ftrace);
- if (!retval)
- goto out;
-
- retval = debugfs_create_u32("period_jiffies", S_IRUGO | S_IWUGO, module_debugfs_root, &delay);
- if (!retval)
- goto out;
-
- retval = debugfs_create_x32("counter0", S_IRUGO | S_IWUGO, module_debugfs_root, &counter0_event);
- if (!retval)
- goto out;
- retval = debugfs_create_x32("counter1", S_IRUGO | S_IWUGO, module_debugfs_root, &counter1_event);
- if (!retval)
- goto out;
- retval = debugfs_create_x32("counter2", S_IRUGO | S_IWUGO, module_debugfs_root, &counter2_event);
- if (!retval)
- goto out;
- retval = debugfs_create_x32("counter3", S_IRUGO | S_IWUGO, module_debugfs_root, &counter3_event);
- if (!retval)
- goto out;
-
- cci_base = ioremap(CCI_BASE, CCI_SIZE);
- if (!cci_base)
- goto out;
-
- printk(KERN_INFO "CCI PMU Logger loaded.\n");
- return 0;
-
-out:
- debugfs_remove_recursive(module_debugfs_root);
- return 1;
-}
-
-static void __exit pmu_logger_exit(void)
-{
- if (module_debugfs_root) {
- debugfs_remove_recursive(module_debugfs_root);
- module_debugfs_root = NULL;
- }
- if (cci_base)
- iounmap(cci_base);
-
- printk(KERN_INFO "CCI PMU Logger removed.\n");
-}
-
-module_init(pmu_logger_init);
-module_exit(pmu_logger_exit);
-
-MODULE_LICENSE("GPL");
-MODULE_AUTHOR("Punit Agrawal");
-MODULE_DESCRIPTION("logger for CCI PMU counters");
diff --git a/wlauto/external/pmu_logger/pmu_logger.ko b/wlauto/external/pmu_logger/pmu_logger.ko
deleted file mode 100644
index 84164383..00000000
--- a/wlauto/external/pmu_logger/pmu_logger.ko
+++ /dev/null
Binary files differ
diff --git a/wlauto/external/readenergy/Makefile b/wlauto/external/readenergy/Makefile
deleted file mode 100644
index 76a25594..00000000
--- a/wlauto/external/readenergy/Makefile
+++ /dev/null
@@ -1,11 +0,0 @@
-# To build:
-#
-# CROSS_COMPILE=aarch64-linux-gnu- make
-#
-CROSS_COMPILE?=aarch64-linux-gnu-
-CC=$(CROSS_COMPILE)gcc
-CFLAGS='-Wl,-static -Wl,-lc'
-
-readenergy: readenergy.c
- $(CC) $(CFLAGS) readenergy.c -o readenergy
- cp readenergy ../../instrumentation/juno_energy/readenergy
diff --git a/wlauto/external/readenergy/readenergy b/wlauto/external/readenergy/readenergy
deleted file mode 100755
index c26991c2..00000000
--- a/wlauto/external/readenergy/readenergy
+++ /dev/null
Binary files differ
diff --git a/wlauto/external/readenergy/readenergy.c b/wlauto/external/readenergy/readenergy.c
deleted file mode 100644
index cc945f7f..00000000
--- a/wlauto/external/readenergy/readenergy.c
+++ /dev/null
@@ -1,345 +0,0 @@
-/* Copyright 2014-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-/*
- * readenergy.c
- *
- * Reads APB energy registers in Juno and outputs the measurements (converted to appropriate units).
- *
-*/
-#include <errno.h>
-#include <fcntl.h>
-#include <stdint.h>
-#include <stdio.h>
-#include <stdlib.h>
-#include <string.h>
-#include <signal.h>
-#include <sys/mman.h>
-#include <sys/stat.h>
-#include <sys/types.h>
-#include <time.h>
-#include <unistd.h>
-
-// The following values obtained from Juno TRM 2014/03/04 section 4.5
-
-// Location of APB registers in memory
-#define APB_BASE_MEMORY 0x1C010000
-// APB energy counters start at offset 0xD0 from the base APB address.
-#define BASE_INDEX 0xD0 / 4
-// the one-past last APB counter
-#define APB_SIZE 0x120
-
-// Masks specifying the bits that contain the actual counter values
-#define CMASK 0xFFF
-#define VMASK 0xFFF
-#define PMASK 0xFFFFFF
-
-// Sclaing factor (divisor) or getting measured values from counters
-#define SYS_ADC_CH0_PM1_SYS_SCALE 761
-#define SYS_ADC_CH1_PM2_A57_SCALE 381
-#define SYS_ADC_CH2_PM3_A53_SCALE 761
-#define SYS_ADC_CH3_PM4_GPU_SCALE 381
-#define SYS_ADC_CH4_VSYS_SCALE 1622
-#define SYS_ADC_CH5_VA57_SCALE 1622
-#define SYS_ADC_CH6_VA53_SCALE 1622
-#define SYS_ADC_CH7_VGPU_SCALE 1622
-#define SYS_POW_CH04_SYS_SCALE (SYS_ADC_CH0_PM1_SYS_SCALE * SYS_ADC_CH4_VSYS_SCALE)
-#define SYS_POW_CH15_A57_SCALE (SYS_ADC_CH1_PM2_A57_SCALE * SYS_ADC_CH5_VA57_SCALE)
-#define SYS_POW_CH26_A53_SCALE (SYS_ADC_CH2_PM3_A53_SCALE * SYS_ADC_CH6_VA53_SCALE)
-#define SYS_POW_CH37_GPU_SCALE (SYS_ADC_CH3_PM4_GPU_SCALE * SYS_ADC_CH7_VGPU_SCALE)
-#define SYS_ENM_CH0_SYS_SCALE 12348030000
-#define SYS_ENM_CH1_A57_SCALE 6174020000
-#define SYS_ENM_CH0_A53_SCALE 12348030000
-#define SYS_ENM_CH0_GPU_SCALE 6174020000
-
-// Original values prior to re-callibrations.
-/*#define SYS_ADC_CH0_PM1_SYS_SCALE 819.2*/
-/*#define SYS_ADC_CH1_PM2_A57_SCALE 409.6*/
-/*#define SYS_ADC_CH2_PM3_A53_SCALE 819.2*/
-/*#define SYS_ADC_CH3_PM4_GPU_SCALE 409.6*/
-/*#define SYS_ADC_CH4_VSYS_SCALE 1638.4*/
-/*#define SYS_ADC_CH5_VA57_SCALE 1638.4*/
-/*#define SYS_ADC_CH6_VA53_SCALE 1638.4*/
-/*#define SYS_ADC_CH7_VGPU_SCALE 1638.4*/
-/*#define SYS_POW_CH04_SYS_SCALE (SYS_ADC_CH0_PM1_SYS_SCALE * SYS_ADC_CH4_VSYS_SCALE)*/
-/*#define SYS_POW_CH15_A57_SCALE (SYS_ADC_CH1_PM2_A57_SCALE * SYS_ADC_CH5_VA57_SCALE)*/
-/*#define SYS_POW_CH26_A53_SCALE (SYS_ADC_CH2_PM3_A53_SCALE * SYS_ADC_CH6_VA53_SCALE)*/
-/*#define SYS_POW_CH37_GPU_SCALE (SYS_ADC_CH3_PM4_GPU_SCALE * SYS_ADC_CH7_VGPU_SCALE)*/
-/*#define SYS_ENM_CH0_SYS_SCALE 13421772800.0*/
-/*#define SYS_ENM_CH1_A57_SCALE 6710886400.0*/
-/*#define SYS_ENM_CH0_A53_SCALE 13421772800.0*/
-/*#define SYS_ENM_CH0_GPU_SCALE 6710886400.0*/
-
-// Ignore individual errors but if see too many, abort.
-#define ERROR_THRESHOLD 10
-
-// Default counter poll period (in milliseconds).
-#define DEFAULT_PERIOD 100
-
-// A single reading from the energy meter. The values are the proper readings converted
-// to appropriate units (e.g. Watts for power); they are *not* raw counter values.
-struct reading
-{
- double sys_adc_ch0_pm1_sys;
- double sys_adc_ch1_pm2_a57;
- double sys_adc_ch2_pm3_a53;
- double sys_adc_ch3_pm4_gpu;
- double sys_adc_ch4_vsys;
- double sys_adc_ch5_va57;
- double sys_adc_ch6_va53;
- double sys_adc_ch7_vgpu;
- double sys_pow_ch04_sys;
- double sys_pow_ch15_a57;
- double sys_pow_ch26_a53;
- double sys_pow_ch37_gpu;
- double sys_enm_ch0_sys;
- double sys_enm_ch1_a57;
- double sys_enm_ch0_a53;
- double sys_enm_ch0_gpu;
-};
-
-inline uint64_t join_64bit_register(uint32_t *buffer, int index)
-{
- uint64_t result = 0;
- result |= buffer[index];
- result |= (uint64_t)(buffer[index+1]) << 32;
- return result;
-}
-
-int nsleep(const struct timespec *req, struct timespec *rem)
-{
- struct timespec temp_rem;
- if (nanosleep(req, rem) == -1)
- {
- if (errno == EINTR)
- {
- nsleep(rem, &temp_rem);
- }
- else
- {
- return errno;
- }
- }
- else
- {
- return 0;
- }
-}
-
-void print_help()
-{
- fprintf(stderr, "Usage: readenergy [-t PERIOD] -o OUTFILE\n\n"
- "Read Juno energy counters every PERIOD milliseconds, writing them\n"
- "to OUTFILE in CSV format until SIGTERM is received.\n\n"
- "Parameters:\n"
- " PERIOD is the counter poll period in milliseconds.\n"
- " (Defaults to 100 milliseconds.)\n"
- " OUTFILE is the output file path\n");
-}
-
-// debugging only...
-inline void dprint(char *msg)
-{
- fprintf(stderr, "%s\n", msg);
- sync();
-}
-
-// -------------------------------------- config ----------------------------------------------------
-
-struct config
-{
- struct timespec period;
- char *output_file;
-};
-
-void config_init_period_from_millis(struct config *this, long millis)
-{
- this->period.tv_sec = (time_t)(millis / 1000);
- this->period.tv_nsec = (millis % 1000) * 1000000;
-}
-
-void config_init(struct config *this, int argc, char *argv[])
-{
- this->output_file = NULL;
- config_init_period_from_millis(this, DEFAULT_PERIOD);
-
- int opt;
- while ((opt = getopt(argc, argv, "ht:o:")) != -1)
- {
- switch(opt)
- {
- case 't':
- config_init_period_from_millis(this, atol(optarg));
- break;
- case 'o':
- this->output_file = optarg;
- break;
- case 'h':
- print_help();
- exit(EXIT_SUCCESS);
- break;
- default:
- fprintf(stderr, "ERROR: Unexpected option %s\n\n", opt);
- print_help();
- exit(EXIT_FAILURE);
- }
- }
-
- if (this->output_file == NULL)
- {
- fprintf(stderr, "ERROR: Mandatory -o option not specified.\n\n");
- print_help();
- exit(EXIT_FAILURE);
- }
-}
-
-// -------------------------------------- /config ---------------------------------------------------
-
-// -------------------------------------- emeter ----------------------------------------------------
-
-struct emeter
-{
- int fd;
- FILE *out;
- void *mmap_base;
-};
-
-void emeter_init(struct emeter *this, char *outfile)
-{
- this->out = fopen(outfile, "w");
- if (this->out == NULL)
- {
- fprintf(stderr, "ERROR: Could not open output file %s; got %s\n", outfile, strerror(errno));
- exit(EXIT_FAILURE);
- }
-
- this->fd = open("/dev/mem", O_RDONLY);
- if(this->fd < 0)
- {
- fprintf(stderr, "ERROR: Can't open /dev/mem; got %s\n", strerror(errno));
- fclose(this->out);
- exit(EXIT_FAILURE);
- }
-
- this->mmap_base = mmap(NULL, APB_SIZE, PROT_READ, MAP_SHARED, this->fd, APB_BASE_MEMORY);
- if (this->mmap_base == MAP_FAILED)
- {
- fprintf(stderr, "ERROR: mmap failed; got %s\n", strerror(errno));
- close(this->fd);
- fclose(this->out);
- exit(EXIT_FAILURE);
- }
-
- fprintf(this->out, "sys_curr,a57_curr,a53_curr,gpu_curr,"
- "sys_volt,a57_volt,a53_volt,gpu_volt,"
- "sys_pow,a57_pow,a53_pow,gpu_pow,"
- "sys_cenr,a57_cenr,a53_cenr,gpu_cenr\n");
-}
-
-void emeter_read_measurements(struct emeter *this, struct reading *reading)
-{
- uint32_t *buffer = (uint32_t *)this->mmap_base;
- reading->sys_adc_ch0_pm1_sys = (double)(CMASK & buffer[BASE_INDEX+0]) / SYS_ADC_CH0_PM1_SYS_SCALE;
- reading->sys_adc_ch1_pm2_a57 = (double)(CMASK & buffer[BASE_INDEX+1]) / SYS_ADC_CH1_PM2_A57_SCALE;
- reading->sys_adc_ch2_pm3_a53 = (double)(CMASK & buffer[BASE_INDEX+2]) / SYS_ADC_CH2_PM3_A53_SCALE;
- reading->sys_adc_ch3_pm4_gpu = (double)(CMASK & buffer[BASE_INDEX+3]) / SYS_ADC_CH3_PM4_GPU_SCALE;
- reading->sys_adc_ch4_vsys = (double)(VMASK & buffer[BASE_INDEX+4]) / SYS_ADC_CH4_VSYS_SCALE;
- reading->sys_adc_ch5_va57 = (double)(VMASK & buffer[BASE_INDEX+5]) / SYS_ADC_CH5_VA57_SCALE;
- reading->sys_adc_ch6_va53 = (double)(VMASK & buffer[BASE_INDEX+6]) / SYS_ADC_CH6_VA53_SCALE;
- reading->sys_adc_ch7_vgpu = (double)(VMASK & buffer[BASE_INDEX+7]) / SYS_ADC_CH7_VGPU_SCALE;
- reading->sys_pow_ch04_sys = (double)(PMASK & buffer[BASE_INDEX+8]) / SYS_POW_CH04_SYS_SCALE;
- reading->sys_pow_ch15_a57 = (double)(PMASK & buffer[BASE_INDEX+9]) / SYS_POW_CH15_A57_SCALE;
- reading->sys_pow_ch26_a53 = (double)(PMASK & buffer[BASE_INDEX+10]) / SYS_POW_CH26_A53_SCALE;
- reading->sys_pow_ch37_gpu = (double)(PMASK & buffer[BASE_INDEX+11]) / SYS_POW_CH37_GPU_SCALE;
- reading->sys_enm_ch0_sys = (double)join_64bit_register(buffer, BASE_INDEX+12) / SYS_ENM_CH0_SYS_SCALE;
- reading->sys_enm_ch1_a57 = (double)join_64bit_register(buffer, BASE_INDEX+14) / SYS_ENM_CH1_A57_SCALE;
- reading->sys_enm_ch0_a53 = (double)join_64bit_register(buffer, BASE_INDEX+16) / SYS_ENM_CH0_A53_SCALE;
- reading->sys_enm_ch0_gpu = (double)join_64bit_register(buffer, BASE_INDEX+18) / SYS_ENM_CH0_GPU_SCALE;
-}
-
-void emeter_take_reading(struct emeter *this)
-{
- static struct reading reading;
- int error_count = 0;
- emeter_read_measurements(this, &reading);
- int ret = fprintf(this->out, "%f,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f,%f\n",
- reading.sys_adc_ch0_pm1_sys,
- reading.sys_adc_ch1_pm2_a57,
- reading.sys_adc_ch2_pm3_a53,
- reading.sys_adc_ch3_pm4_gpu,
- reading.sys_adc_ch4_vsys,
- reading.sys_adc_ch5_va57,
- reading.sys_adc_ch6_va53,
- reading.sys_adc_ch7_vgpu,
- reading.sys_pow_ch04_sys,
- reading.sys_pow_ch15_a57,
- reading.sys_pow_ch26_a53,
- reading.sys_pow_ch37_gpu,
- reading.sys_enm_ch0_sys,
- reading.sys_enm_ch1_a57,
- reading.sys_enm_ch0_a53,
- reading.sys_enm_ch0_gpu);
- if (ret < 0)
- {
- fprintf(stderr, "ERROR: while writing a meter reading: %s\n", strerror(errno));
- if (++error_count > ERROR_THRESHOLD)
- exit(EXIT_FAILURE);
- }
-}
-
-void emeter_finalize(struct emeter *this)
-{
- if (munmap(this->mmap_base, APB_SIZE) == -1)
- {
- // Report the error but don't bother doing anything else, as we're not gonna do
- // anything with emeter after this point anyway.
- fprintf(stderr, "ERROR: munmap failed; got %s\n", strerror(errno));
- }
- close(this->fd);
- fclose(this->out);
-}
-
-// -------------------------------------- /emeter ----------------------------------------------------
-
-int done = 0;
-
-void term_handler(int signum)
-{
- done = 1;
-}
-
-int main(int argc, char *argv[])
-{
- struct sigaction action;
- memset(&action, 0, sizeof(struct sigaction));
- action.sa_handler = term_handler;
- sigaction(SIGTERM, &action, NULL);
-
- struct config config;
- struct emeter emeter;
- config_init(&config, argc, argv);
- emeter_init(&emeter, config.output_file);
-
- struct timespec remaining;
- while (!done)
- {
- emeter_take_reading(&emeter);
- nsleep(&config.period, &remaining);
- }
-
- emeter_finalize(&emeter);
- return EXIT_SUCCESS;
-}
diff --git a/wlauto/external/revent/Makefile b/wlauto/external/revent/Makefile
deleted file mode 100644
index dbbfea75..00000000
--- a/wlauto/external/revent/Makefile
+++ /dev/null
@@ -1,12 +0,0 @@
-# CROSS_COMPILE=aarch64-linux-gnu- make
-#
-CC=gcc
-CFLAGS=-static -lc
-
-revent: revent.c
- $(CROSS_COMPILE)$(CC) $(CFLAGS) revent.c -o revent
-
-clean:
- rm -rf revent
-
-.PHONY: clean
diff --git a/wlauto/external/revent/revent.c b/wlauto/external/revent/revent.c
deleted file mode 100644
index 667bb51c..00000000
--- a/wlauto/external/revent/revent.c
+++ /dev/null
@@ -1,636 +0,0 @@
-/* Copyright 2012-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-#include <stdio.h>
-#include <stdint.h>
-#include <stdlib.h>
-#include <string.h>
-#include <unistd.h>
-#include <fcntl.h>
-#include <errno.h>
-#include <limits.h>
-#include <linux/input.h>
-#include <sys/stat.h>
-#include <signal.h>
-#include <ctype.h>
-
-#ifdef ANDROID
-#include <android/log.h>
-#endif
-
-
-#define die(args...) do { \
- fprintf(stderr, "ERROR: "); \
- fprintf(stderr, args); \
- exit(EXIT_FAILURE); \
-} while(0)
-
-#define dprintf(args...) if (verbose) printf(args)
-
-
-#define INPDEV_MAX_DEVICES 16
-#define INPDEV_MAX_PATH 30
-
-
-#ifndef ANDROID
-int strlcpy(char *dest, char *source, size_t size)
-{
- strncpy(dest, source, size-1);
- dest[size-1] = '\0';
- return size;
-}
-#endif
-
-typedef enum {
- FALSE=0,
- TRUE
-} bool_t;
-
-typedef enum {
- RECORD=0,
- REPLAY,
- DUMP,
- INFO,
- INVALID
-} revent_mode_t;
-
-typedef struct {
- revent_mode_t mode;
- int32_t record_time;
- int32_t device_number;
- char *file;
-} revent_args_t;
-
-typedef struct {
- int32_t id_pathc; /* Count of total paths so far. */
- char id_pathv[INPDEV_MAX_DEVICES][INPDEV_MAX_PATH]; /* List of paths matching pattern. */
-} inpdev_t;
-
-typedef struct {
- int32_t dev_idx;
- int32_t _padding;
- struct input_event event;
-} replay_event_t;
-
-typedef struct {
- int32_t num_fds;
- int32_t num_events;
- int *fds;
- replay_event_t *events;
-} replay_buffer_t;
-
-
-bool_t verbose = FALSE;
-bool_t wait_for_stdin = TRUE;
-
-bool_t is_numeric(char *string)
-{
- int len = strlen(string);
-
- int i = 0;
- while(i < len)
- {
- if(!isdigit(string[i]))
- return FALSE;
- i++;
- }
-
- return TRUE;
-}
-
-off_t get_file_size(const char *filename) {
- struct stat st;
-
- if (stat(filename, &st) == 0)
- return st.st_size;
-
- die("Cannot determine size of %s: %s\n", filename, strerror(errno));
-}
-
-int inpdev_init(inpdev_t **inpdev, int devid)
-{
- int32_t i;
- int fd;
- int32_t num_devices;
-
- *inpdev = malloc(sizeof(inpdev_t));
- (*inpdev)->id_pathc = 0;
-
- if (devid == -1) {
- // device id was not specified so we want to record from all available input devices.
- for(i = 0; i < INPDEV_MAX_DEVICES; ++i)
- {
- sprintf((*inpdev)->id_pathv[(*inpdev)->id_pathc], "/dev/input/event%d", i);
- fd = open((*inpdev)->id_pathv[(*inpdev)->id_pathc], O_RDONLY);
- if(fd > 0)
- {
- close(fd);
- dprintf("opened %s\n", (*inpdev)->id_pathv[(*inpdev)->id_pathc]);
- (*inpdev)->id_pathc++;
- }
- else
- {
- dprintf("could not open %s\n", (*inpdev)->id_pathv[(*inpdev)->id_pathc]);
- }
- }
- }
- else {
- // device id was specified so record just that device.
- sprintf((*inpdev)->id_pathv[0], "/dev/input/event%d", devid);
- fd = open((*inpdev)->id_pathv[0], O_RDONLY);
- if(fd > 0)
- {
- close(fd);
- dprintf("opened %s\n", (*inpdev)->id_pathv[0]);
- (*inpdev)->id_pathc++;
- }
- else
- {
- die("could not open %s\n", (*inpdev)->id_pathv[0]);
- }
- }
-
- return 0;
-}
-
-int inpdev_close(inpdev_t *inpdev)
-{
- free(inpdev);
- return 0;
-}
-
-void printDevProperties(const char* aDev)
-{
- int fd = -1;
- char name[256]= "Unknown";
- if ((fd = open(aDev, O_RDONLY)) < 0)
- die("could not open %s\n", aDev);
-
- if(ioctl(fd, EVIOCGNAME(sizeof(name)), name) < 0)
- die("evdev ioctl failed on %s\n", aDev);
-
- printf("The device on %s says its name is %s\n",
- aDev, name);
- close(fd);
-}
-
-void dump(const char *logfile)
-{
- int fdin = open(logfile, O_RDONLY);
- if (fdin < 0) die("Could not open eventlog %s\n", logfile);
-
- int nfds;
- size_t rb = read(fdin, &nfds, sizeof(nfds));
- if (rb != sizeof(nfds)) die("problems reading eventlog\n");
- int *fds = malloc(sizeof(int)*nfds);
- if (!fds) die("out of memory\n");
-
- int32_t len;
- int32_t i;
- char buf[INPDEV_MAX_PATH];
-
- inpdev_t *inpdev = malloc(sizeof(inpdev_t));
- inpdev->id_pathc = 0;
- for (i=0; i<nfds; i++) {
- memset(buf, 0, sizeof(buf));
- rb = read(fdin, &len, sizeof(len));
- if (rb != sizeof(len)) die("problems reading eventlog\n");
- rb = read(fdin, &buf[0], len);
- if (rb != len) die("problems reading eventlog\n");
- strlcpy(inpdev->id_pathv[inpdev->id_pathc], buf, INPDEV_MAX_PATH);
- inpdev->id_pathv[inpdev->id_pathc][INPDEV_MAX_PATH-1] = '\0';
- inpdev->id_pathc++;
- }
-
- struct input_event ev;
- int count = 0;
- while(1) {
- int32_t idx;
- rb = read(fdin, &idx, sizeof(idx));
- if (rb != sizeof(idx)) break;
- rb = read(fdin, &ev, sizeof(ev));
- if (rb < (int)sizeof(ev)) break;
-
- printf("%10u.%-6u %30s type %2d code %3d value %4d\n",
- (unsigned int)ev.time.tv_sec, (unsigned int)ev.time.tv_usec,
- inpdev->id_pathv[idx], ev.type, ev.code, ev.value);
- count++;
- }
-
- printf("\nTotal: %d events\n", count);
- close(fdin);
- free(inpdev);
-}
-
-int replay_buffer_init(replay_buffer_t **buffer, const char *logfile)
-{
- *buffer = malloc(sizeof(replay_buffer_t));
- replay_buffer_t *buff = *buffer;
- off_t fsize = get_file_size(logfile);
- buff->events = (replay_event_t *)malloc((size_t)fsize);
- if (!buff->events)
- die("out of memory\n");
-
- int fdin = open(logfile, O_RDONLY);
- if (fdin < 0)
- die("Could not open eventlog %s\n", logfile);
-
- size_t rb = read(fdin, &(buff->num_fds), sizeof(buff->num_fds));
- if (rb!=sizeof(buff->num_fds))
- die("problems reading eventlog\n");
-
- buff->fds = malloc(sizeof(int) * buff->num_fds);
- if (!buff->fds)
- die("out of memory\n");
-
- int32_t len, i;
- char path_buff[256]; // should be more than enough
- for (i = 0; i < buff->num_fds; i++) {
- memset(path_buff, 0, sizeof(path_buff));
- rb = read(fdin, &len, sizeof(len));
- if (rb!=sizeof(len))
- die("problems reading eventlog\n");
- rb = read(fdin, &path_buff[0], len);
- if (rb != len)
- die("problems reading eventlog\n");
-
- buff->fds[i] = open(path_buff, O_WRONLY | O_NDELAY);
- if (buff->fds[i] < 0)
- die("could not open device file %s\n", path_buff);
- }
-
- struct timeval start_time;
- replay_event_t rep_ev;
- i = 0;
- while(1) {
- rb = read(fdin, &rep_ev, sizeof(rep_ev));
- if (rb < (int)sizeof(rep_ev))
- break;
-
- if (i == 0) {
- start_time = rep_ev.event.time;
- }
- timersub(&(rep_ev.event.time), &start_time, &(rep_ev.event.time));
- memcpy(&(buff->events[i]), &rep_ev, sizeof(rep_ev));
- i++;
- }
- buff->num_events = i - 1;
- close(fdin);
- return 0;
-}
-
-int replay_buffer_close(replay_buffer_t *buff)
-{
- free(buff->fds);
- free(buff->events);
- free(buff);
- return 0;
-}
-
-int replay_buffer_play(replay_buffer_t *buff)
-{
- int32_t i = 0, rb;
- struct timeval start_time, now, desired_time, last_event_delta, delta;
- memset(&last_event_delta, 0, sizeof(struct timeval));
- gettimeofday(&start_time, NULL);
-
- while (i < buff->num_events) {
- gettimeofday(&now, NULL);
- timeradd(&start_time, &last_event_delta, &desired_time);
-
- if (timercmp(&desired_time, &now, >)) {
- timersub(&desired_time, &now, &delta);
- useconds_t d = (useconds_t)delta.tv_sec * 1000000 + delta.tv_usec;
- dprintf("now %u.%u desiredtime %u.%u sleeping %u uS\n",
- (unsigned int)now.tv_sec, (unsigned int)now.tv_usec,
- (unsigned int)desired_time.tv_sec, (unsigned int)desired_time.tv_usec, d);
- usleep(d);
- }
-
- int32_t idx = (buff->events[i]).dev_idx;
- struct input_event ev = (buff->events[i]).event;
- while((i < buff->num_events) && !timercmp(&ev.time, &last_event_delta, !=)) {
- rb = write(buff->fds[idx], &ev, sizeof(ev));
- if (rb!=sizeof(ev))
- die("problems writing\n");
- dprintf("replayed event: type %d code %d value %d\n", ev.type, ev.code, ev.value);
-
- i++;
- idx = (buff->events[i]).dev_idx;
- ev = (buff->events[i]).event;
- }
- last_event_delta = ev.time;
- }
-}
-
-void replay(const char *logfile)
-{
- replay_buffer_t *replay_buffer;
- replay_buffer_init(&replay_buffer, logfile);
-#ifdef ANDROID
- __android_log_write(ANDROID_LOG_INFO, "REVENT", "Replay starting");
-#endif
- replay_buffer_play(replay_buffer);
-#ifdef ANDROID
- __android_log_write(ANDROID_LOG_INFO, "REVENT", "Replay complete");
-#endif
- replay_buffer_close(replay_buffer);
-}
-
-void usage()
-{
- printf("usage:\n revent [-h] [-v] COMMAND [OPTIONS] \n"
- "\n"
- " Options:\n"
- " -h print this help message and quit.\n"
- " -v enable verbose output.\n"
- "\n"
- " Commands:\n"
- " record [-t SECONDS] [-d DEVICE] FILE\n"
- " Record input event. stops after return on STDIN (or, optionally, \n"
- " a fixed delay)\n"
- "\n"
- " FILE file into which events will be recorded.\n"
- " -t SECONDS time, in seconds, for which to record events.\n"
- " if not specifed, recording will continue until\n"
- " return key is pressed.\n"
- " -d DEVICE the number of the input device form which\n"
- " events will be recoreded. If not specified, \n"
- " all available inputs will be used.\n"
- "\n"
- " replay FILE\n"
- " replays previously recorded events from the specified file.\n"
- "\n"
- " FILE file into which events will be recorded.\n"
- "\n"
- " dump FILE\n"
- " dumps the contents of the specified event log to STDOUT in\n"
- " human-readable form.\n"
- "\n"
- " FILE event log which will be dumped.\n"
- "\n"
- " info\n"
- " shows info about each event char device\n"
- "\n"
- );
-}
-
-void revent_args_init(revent_args_t **rargs, int argc, char** argv)
-{
- *rargs = malloc(sizeof(revent_args_t));
- revent_args_t *revent_args = *rargs;
- revent_args->mode = INVALID;
- revent_args->record_time = INT_MAX;
- revent_args->device_number = -1;
- revent_args->file = NULL;
-
- int opt;
- while ((opt = getopt(argc, argv, "ht:d:vs")) != -1)
- {
- switch (opt) {
- case 'h':
- usage();
- exit(0);
- break;
- case 't':
- if (is_numeric(optarg)) {
- revent_args->record_time = atoi(optarg);
- dprintf("timeout: %d\n", revent_args->record_time);
- } else {
- die("-t parameter must be numeric; got %s.\n", optarg);
- }
- break;
- case 'd':
- if (is_numeric(optarg)) {
- revent_args->device_number = atoi(optarg);
- dprintf("device: %d\n", revent_args->device_number);
- } else {
- die("-d parameter must be numeric; got %s.\n", optarg);
- }
- break;
- case 'v':
- verbose = TRUE;
- break;
- case 's':
- wait_for_stdin = FALSE;
- break;
-
- default:
- die("Unexpected option: %c", opt);
- }
- }
-
- int next_arg = optind;
- if (next_arg == argc) {
- usage();
- die("Must specify a command.\n");
- }
- if (!strcmp(argv[next_arg], "record"))
- revent_args->mode = RECORD;
- else if (!strcmp(argv[next_arg], "replay"))
- revent_args->mode = REPLAY;
- else if (!strcmp(argv[next_arg], "dump"))
- revent_args->mode = DUMP;
- else if (!strcmp(argv[next_arg], "info"))
- revent_args->mode = INFO;
- else {
- usage();
- die("Unknown command -- %s\n", argv[next_arg]);
- }
- next_arg++;
-
- if (next_arg != argc) {
- revent_args->file = argv[next_arg];
- dprintf("file: %s\n", revent_args->file);
- next_arg++;
- if (next_arg != argc) {
- die("Trailling arguments (use -h for help).\n");
- }
- }
-
- if ((revent_args->mode != RECORD) && (revent_args->record_time != INT_MAX)) {
- die("-t parameter is only valid for \"record\" command.\n");
- }
- if ((revent_args->mode != RECORD) && (revent_args->device_number != -1)) {
- die("-d parameter is only valid for \"record\" command.\n");
- }
- if ((revent_args->mode == INFO) && (revent_args->file != NULL)) {
- die("File path cannot be specified for \"info\" command.\n");
- }
- if (((revent_args->mode == RECORD) || (revent_args->mode == REPLAY)) && (revent_args->file == NULL)) {
- die("Must specify a file for recording/replaying (use -h for help).\n");
- }
-}
-
-int revent_args_close(revent_args_t *rargs)
-{
- free(rargs);
- return 0;
-}
-
-int* fds = NULL;
-FILE* fdout = NULL;
-revent_args_t *rargs = NULL;
-inpdev_t *inpdev = NULL;
-int count;
-
-void term_handler(int signum)
-{
- int32_t i;
- for (i=0; i < inpdev->id_pathc; i++)
- {
- close(fds[i]);
- }
-
- fclose(fdout);
- free(fds);
- dprintf("Recorded %d events\n", count);
-
- inpdev_close(inpdev);
- revent_args_close(rargs);
- exit(0);
-}
-
-void record(inpdev_t *inpdev, int delay, const char *logfile)
-{
- fd_set readfds;
- struct input_event ev;
- int32_t i;
- int32_t _padding = 0xdeadbeef;
- int32_t maxfd = 0;
- int32_t keydev=0;
-
- //signal handler
- struct sigaction action;
- memset(&action, 0, sizeof(struct sigaction));
- action.sa_handler = term_handler;
- sigaction(SIGTERM, &action, NULL);
-
- fds = malloc(sizeof(int)*inpdev->id_pathc);
- if (!fds) die("out of memory\n");
-
- fdout = fopen(logfile, "wb");
- if (!fdout) die("Could not open eventlog %s\n", logfile);
-
- fwrite(&inpdev->id_pathc, sizeof(inpdev->id_pathc), 1, fdout);
- for (i=0; i<inpdev->id_pathc; i++) {
- int32_t len = strlen(inpdev->id_pathv[i]);
- fwrite(&len, sizeof(len), 1, fdout);
- fwrite(inpdev->id_pathv[i], len, 1, fdout);
- }
-
- for (i=0; i < inpdev->id_pathc; i++)
- {
- fds[i] = open(inpdev->id_pathv[i], O_RDONLY);
- if (fds[i]>maxfd) maxfd = fds[i];
- dprintf("opened %s with %d\n", inpdev->id_pathv[i], fds[i]);
- if (fds[i]<0) die("could not open \%s\n", inpdev->id_pathv[i]);
- }
-
- count = 0;
- struct timeval tout;
- while(1)
- {
- FD_ZERO(&readfds);
- if (wait_for_stdin)
- {
- FD_SET(STDIN_FILENO, &readfds);
- }
- for (i=0; i < inpdev->id_pathc; i++)
- FD_SET(fds[i], &readfds);
- /* wait for input */
- tout.tv_sec = delay;
- tout.tv_usec = 0;
- int32_t r = select(maxfd+1, &readfds, NULL, NULL, &tout);
- /* dprintf("got %d (err %d)\n", r, errno); */
- if (!r) break;
- if (wait_for_stdin && FD_ISSET(STDIN_FILENO, &readfds)) {
- // in this case the key down for the return key will be recorded
- // so we need to up the key up
- memset(&ev, 0, sizeof(ev));
- ev.type = EV_KEY;
- ev.code = KEY_ENTER;
- ev.value = 0;
- gettimeofday(&ev.time, NULL);
- fwrite(&keydev, sizeof(keydev), 1, fdout);
- fwrite(&_padding, sizeof(_padding), 1, fdout);
- fwrite(&ev, sizeof(ev), 1, fdout);
- memset(&ev, 0, sizeof(ev)); // SYN
- gettimeofday(&ev.time, NULL);
- fwrite(&keydev, sizeof(keydev), 1, fdout);
- fwrite(&_padding, sizeof(_padding), 1, fdout);
- fwrite(&ev, sizeof(ev), 1, fdout);
- dprintf("added fake return exiting...\n");
- break;
- }
-
- for (i=0; i < inpdev->id_pathc; i++)
- {
- if (FD_ISSET(fds[i], &readfds))
- {
- dprintf("Got event from %s\n", inpdev->id_pathv[i]);
- memset(&ev, 0, sizeof(ev));
- size_t rb = read(fds[i], (void*) &ev, sizeof(ev));
- dprintf("%d event: type %d code %d value %d\n",
- (unsigned int)rb, ev.type, ev.code, ev.value);
- if (ev.type == EV_KEY && ev.code == KEY_ENTER && ev.value == 1)
- keydev = i;
- fwrite(&i, sizeof(i), 1, fdout);
- fwrite(&_padding, sizeof(_padding), 1, fdout);
- fwrite(&ev, sizeof(ev), 1, fdout);
- count++;
- }
- }
- }
-
- for (i=0; i < inpdev->id_pathc; i++)
- {
- close(fds[i]);
- }
-
- fclose(fdout);
- free(fds);
- dprintf("Recorded %d events\n", count);
-}
-
-int main(int argc, char** argv)
-{
- int i;
- char *logfile = NULL;
-
- revent_args_init(&rargs, argc, argv);
-
- inpdev_init(&inpdev, rargs->device_number);
-
- switch(rargs->mode) {
- case RECORD:
- record(inpdev, rargs->record_time, rargs->file);
- break;
- case REPLAY:
- replay(rargs->file);
- break;
- case DUMP:
- dump(rargs->file);
- break;
- case INFO:
- for (i = 0; i < inpdev->id_pathc; i++) {
- printDevProperties(inpdev->id_pathv[i]);
- }
- };
-
- inpdev_close(inpdev);
- revent_args_close(rargs);
- return 0;
-}
diff --git a/wlauto/external/uiauto/build.sh b/wlauto/external/uiauto/build.sh
deleted file mode 100755
index 96b8b7f2..00000000
--- a/wlauto/external/uiauto/build.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-ant build
-
-cp bin/classes/com/arm/wlauto/uiauto/BaseUiAutomation.class ../../common
diff --git a/wlauto/external/uiauto/build.xml b/wlauto/external/uiauto/build.xml
deleted file mode 100644
index 478a86cc..00000000
--- a/wlauto/external/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/external/uiauto/project.properties b/wlauto/external/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/external/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/external/uiauto/src/com/arm/wlauto/uiauto/BaseUiAutomation.java b/wlauto/external/uiauto/src/com/arm/wlauto/uiauto/BaseUiAutomation.java
deleted file mode 100644
index 4d26100b..00000000
--- a/wlauto/external/uiauto/src/com/arm/wlauto/uiauto/BaseUiAutomation.java
+++ /dev/null
@@ -1,113 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto;
-
-import java.io.File;
-import java.io.BufferedReader;
-import java.io.InputStreamReader;
-import java.util.concurrent.TimeoutException;
-
-import android.app.Activity;
-import android.os.Bundle;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-public class BaseUiAutomation extends UiAutomatorTestCase {
-
-
- public void sleep(int second) {
- super.sleep(second * 1000);
- }
-
- public boolean takeScreenshot(String name) {
- Bundle params = getParams();
- String png_dir = params.getString("workdir");
-
- try {
- return getUiDevice().takeScreenshot(new File(png_dir, name + ".png"));
- } catch(NoSuchMethodError e) {
- return true;
- }
- }
-
- public void waitText(String text) throws UiObjectNotFoundException {
- waitText(text, 600);
- }
-
- public void waitText(String text, int second) throws UiObjectNotFoundException {
- UiSelector selector = new UiSelector();
- UiObject text_obj = new UiObject(selector.text(text)
- .className("android.widget.TextView"));
- waitObject(text_obj, second);
- }
-
- public void waitObject(UiObject obj) throws UiObjectNotFoundException {
- waitObject(obj, 600);
- }
-
- public void waitObject(UiObject obj, int second) throws UiObjectNotFoundException {
- if (! obj.waitForExists(second * 1000)){
- throw new UiObjectNotFoundException("UiObject is not found: "
- + obj.getSelector().toString());
- }
- }
-
- public boolean waitUntilNoObject(UiObject obj, int second) {
- return obj.waitUntilGone(second * 1000);
- }
-
- public void clearLogcat() throws Exception {
- Runtime.getRuntime().exec("logcat -c");
- }
-
- public void waitForLogcatText(String searchText, long timeout) throws Exception {
- long startTime = System.currentTimeMillis();
- Process process = Runtime.getRuntime().exec("logcat");
- BufferedReader reader = new BufferedReader(new InputStreamReader(process.getInputStream()));
- String line;
-
- long currentTime = System.currentTimeMillis();
- boolean found = false;
- while ((currentTime - startTime) < timeout){
- sleep(2); // poll every two seconds
-
- while((line=reader.readLine())!=null) {
- if (line.contains(searchText)) {
- found = true;
- break;
- }
- }
-
- if (found) {
- break;
- }
- currentTime = System.currentTimeMillis();
- }
-
- process.destroy();
-
- if ((currentTime - startTime) >= timeout) {
- throw new TimeoutException("Timed out waiting for Logcat text \"%s\"".format(searchText));
- }
- }
-}
-
diff --git a/wlauto/instrumentation/__init__.py b/wlauto/instrumentation/__init__.py
deleted file mode 100644
index 72db181e..00000000
--- a/wlauto/instrumentation/__init__.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from wlauto.core import instrumentation
-
-
-def instrument_is_installed(instrument):
- """Returns ``True`` if the specified instrument is installed, and ``False``
- other wise. The insturment maybe specified either as a name or a subclass (or
- instance of subclass) of :class:`wlauto.core.Instrument`."""
- return instrumentation.is_installed(instrument)
-
-
-def instrument_is_enabled(instrument):
- """Returns ``True`` if the specified instrument is installed and is currently
- enabled, and ``False`` other wise. The insturment maybe specified either
- as a name or a subclass (or instance of subclass) of
- :class:`wlauto.core.Instrument`."""
- return instrumentation.is_enabled(instrument)
-
-
-def clear_instrumentation():
- instrumentation.installed = []
diff --git a/wlauto/instrumentation/coreutil/__init__.py b/wlauto/instrumentation/coreutil/__init__.py
deleted file mode 100644
index e63f8c3e..00000000
--- a/wlauto/instrumentation/coreutil/__init__.py
+++ /dev/null
@@ -1,278 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import os
-import sys
-import re
-import time
-import shutil
-import logging
-import threading
-import subprocess
-import tempfile
-import csv
-
-from wlauto import Instrument, Parameter
-from wlauto.core.execution import ExecutionContext
-from wlauto.exceptions import InstrumentError, WorkerThreadError
-from wlauto.core import signal
-
-
-class CoreUtilization(Instrument):
-
- name = 'coreutil'
- description = """
- Measures CPU core activity during workload execution in terms of the percentage of time a number
- of cores were utilized above the specfied threshold.
-
- This workload generates ``coreutil.csv`` report in the workload's output directory. The report is
- formatted as follows::
-
- <threshold,1core,2core,3core,4core
- 18.098132,38.650248000000005,10.736180000000001,3.6809760000000002,28.834312000000001
-
- Interpretation of the result:
-
- - 38.65% of total time only single core is running above or equal to threshold value
- - 10.736% of total time two cores are running simultaneously above or equal to threshold value
- - 3.6809% of total time three cores are running simultaneously above or equal to threshold value
- - 28.8314% of total time four cores are running simultaneously above or equal to threshold value
- - 18.098% of time all core are running below threshold value.
-
- ..note : This instrument doesn't work on ARM big.LITTLE IKS implementation
-
- """
-
- parameters = [
- Parameter('threshold', kind=int, default=50,
- constraint=lambda x: 0 < x <= 100,
- description='Cores with percentage utilization above this value will be considered '
- 'as "utilized". This value may need to be adjusted based on the background '
- 'activity and the intensity of the workload being instrumented (e.g. it may '
- 'need to be lowered for low-intensity workloads such as video playback).'
- )
- ]
-
- def __init__(self, device, **kwargs):
- super(CoreUtilization, self).__init__(device, **kwargs)
- self.collector = None
- self.output_dir = None
- self.cores = None
- self.output_artifact_registered = False
-
- def setup(self, context):
- ''' Calls ProcCollect class '''
- self.output_dir = context.output_directory
- self.collector = ProcCollect(self.device, self.logger, self.output_dir)
- self.cores = self.device.number_of_cores
-
- def start(self, context): # pylint: disable=W0613
- ''' Starts collecting data once the workload starts '''
- self.logger.debug('Starting to collect /proc/stat data')
- self.collector.start()
-
- def stop(self, context): # pylint: disable=W0613
- ''' Stops collecting data once the workload stops '''
- self.logger.debug('Stopping /proc/stat data collection')
- self.collector.stop()
-
- def update_result(self, context):
- ''' updates result into coreutil.csv '''
- self.collector.join() # wait for "proc.txt" to generate.
- context.add_artifact('proctxt', 'proc.txt', 'raw')
- calc = Calculator(self.cores, self.threshold, context) # pylint: disable=E1101
- calc.calculate()
- if not self.output_artifact_registered:
- context.add_run_artifact('cpuutil', 'coreutil.csv', 'data')
- self.output_artifact_registered = True
-
-
-class ProcCollect(threading.Thread):
- ''' Dumps data into proc.txt '''
-
- def __init__(self, device, logger, out_dir):
- super(ProcCollect, self).__init__()
- self.device = device
- self.logger = logger
- self.dire = out_dir
- self.stop_signal = threading.Event()
- self.command = 'cat /proc/stat'
- self.exc = None
-
- def run(self):
- try:
- self.stop_signal.clear()
- _, temp_file = tempfile.mkstemp()
- self.logger.debug('temp file : {}'.format(temp_file))
- with open(temp_file, 'wb') as tempfp:
- while not self.stop_signal.is_set():
- tempfp.write(self.device.execute(self.command))
- tempfp.write('\n')
- time.sleep(0.5)
- raw_file = os.path.join(self.dire, 'proc.txt')
- shutil.copy(temp_file, raw_file)
- os.unlink(temp_file)
- except Exception, error: # pylint: disable=W0703
- self.logger.warning('Exception on collector thread : {}({})'.format(error.__class__.__name__, error))
- self.exc = WorkerThreadError(self.name, sys.exc_info())
-
- def stop(self):
- '''Executed once the workload stops'''
- self.stop_signal.set()
- if self.exc is not None:
- raise self.exc # pylint: disable=E0702
-
-
-class Calculator(object):
- """
- Read /proc/stat and dump data into ``proc.txt`` which is parsed to generate ``coreutil.csv``
- Sample output from 'proc.txt' ::
-
- ----------------------------------------------------------------------
- cpu 9853753 51448 3248855 12403398 4241 111 14996 0 0 0
- cpu0 1585220 7756 1103883 4977224 552 97 10505 0 0 0
- cpu1 2141168 7243 564347 972273 504 4 1442 0 0 0
- cpu2 1940681 7994 651946 1005534 657 3 1424 0 0 0
- cpu3 1918013 8833 667782 1012249 643 3 1326 0 0 0
- cpu4 165429 5363 50289 1118910 474 0 148 0 0 0
- cpu5 1661299 4910 126654 1104018 480 0 53 0 0 0
- cpu6 333642 4657 48296 1102531 482 2 55 0 0 0
- cpu7 108299 4691 35656 1110658 448 0 41 0 0 0
- ----------------------------------------------------------------------
- Description:
-
- 1st column : cpu_id( cpu0, cpu1, cpu2,......)
- Next all column represents the amount of time, measured in units of USER_HZ
- 2nd column : Time spent in user mode
- 3rd column : Time spent in user mode with low priority
- 4th column : Time spent in system mode
- 5th column : Time spent in idle task
- 6th column : Time waiting for i/o to compelete
- 7th column : Time servicing interrupts
- 8th column : Time servicing softirqs
- 9th column : Stolen time is the time spent in other operating systems
- 10th column : Time spent running a virtual CPU
- 11th column : Time spent running a niced guest
-
- ----------------------------------------------------------------------------
-
- Procedure to calculate instantaneous CPU utilization:
-
- 1) Subtract two consecutive samples for every column( except 1st )
- 2) Sum all the values except "Time spent in idle task"
- 3) CPU utilization(%) = ( value obtained in 2 )/sum of all the values)*100
-
- """
-
- idle_time_index = 3
-
- def __init__(self, cores, threshold, context):
- self.cores = cores
- self.threshold = threshold
- self.context = context
- self.cpu_util = None # Store CPU utilization for each core
- self.active = None # Store active time(total time - idle)
- self.total = None # Store the total amount of time (in USER_HZ)
- self.output = None
- self.cpuid_regex = re.compile(r'cpu(\d+)')
- self.outfile = os.path.join(context.run_output_directory, 'coreutil.csv')
- self.infile = os.path.join(context.output_directory, 'proc.txt')
-
- def calculate(self):
- self.calculate_total_active()
- self.calculate_core_utilization()
- self.generate_csv(self.context)
-
- def calculate_total_active(self):
- """ Read proc.txt file and calculate 'self.active' and 'self.total' """
- all_cores = set(xrange(self.cores))
- self.total = [[] for _ in all_cores]
- self.active = [[] for _ in all_cores]
- with open(self.infile, "r") as fh:
- # parsing logic:
- # - keep spinning through lines until see the cpu summary line
- # (taken to indicate start of new record).
- # - extract values for individual cores after the summary line,
- # keeping track of seen cores until no more lines match 'cpu\d+'
- # pattern.
- # - For every core not seen in this record, pad zeros.
- # - Loop
- try:
- while True:
- line = fh.next()
- if not line.startswith('cpu '):
- continue
-
- seen_cores = set([])
- line = fh.next()
- match = self.cpuid_regex.match(line)
- while match:
- cpu_id = int(match.group(1))
- seen_cores.add(cpu_id)
- times = map(int, line.split()[1:]) # first column is the cpu_id
- self.total[cpu_id].append(sum(times))
- self.active[cpu_id].append(sum(times) - times[self.idle_time_index])
- line = fh.next()
- match = self.cpuid_regex.match(line)
-
- for unseen_core in all_cores - seen_cores:
- self.total[unseen_core].append(0)
- self.active[unseen_core].append(0)
- except StopIteration: # EOF
- pass
-
- def calculate_core_utilization(self):
- """Calculates CPU utilization"""
- diff_active = [[] for _ in xrange(self.cores)]
- diff_total = [[] for _ in xrange(self.cores)]
- self.cpu_util = [[] for _ in xrange(self.cores)]
- for i in xrange(self.cores):
- for j in xrange(len(self.active[i]) - 1):
- temp = self.active[i][j + 1] - self.active[i][j]
- diff_active[i].append(temp)
- diff_total[i].append(self.total[i][j + 1] - self.total[i][j])
- if diff_total[i][j] == 0:
- self.cpu_util[i].append(0)
- else:
- temp = float(diff_active[i][j]) / diff_total[i][j]
- self.cpu_util[i].append(round((float(temp)) * 100, 2))
-
- def generate_csv(self, context):
- """ generates ``coreutil.csv``"""
- self.output = [0 for _ in xrange(self.cores + 1)]
- for i in range(len(self.cpu_util[0])):
- count = 0
- for j in xrange(len(self.cpu_util)):
- if self.cpu_util[j][i] > round(float(self.threshold), 2):
- count = count + 1
- self.output[count] += 1
- if self.cpu_util[0]:
- scale_factor = round((float(1) / len(self.cpu_util[0])) * 100, 6)
- else:
- scale_factor = 0
- for i in xrange(len(self.output)):
- self.output[i] = self.output[i] * scale_factor
- with open(self.outfile, 'a+') as tem:
- writer = csv.writer(tem)
- reader = csv.reader(tem)
- if sum(1 for row in reader) == 0:
- row = ['workload', 'iteration', '<threshold']
- for i in xrange(1, self.cores + 1):
- row.append('{}core'.format(i))
- writer.writerow(row)
- row = [context.result.workload.name, context.result.iteration]
- row.extend(self.output)
- writer.writerow(row)
diff --git a/wlauto/instrumentation/daq/__init__.py b/wlauto/instrumentation/daq/__init__.py
deleted file mode 100644
index 9949a63a..00000000
--- a/wlauto/instrumentation/daq/__init__.py
+++ /dev/null
@@ -1,416 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=W0613,E1101,access-member-before-definition,attribute-defined-outside-init
-from __future__ import division
-import os
-import sys
-import csv
-import shutil
-import tempfile
-from collections import OrderedDict, defaultdict
-from string import ascii_lowercase
-
-from multiprocessing import Process, Queue
-
-from wlauto import Instrument, Parameter
-from wlauto.core import signal
-from wlauto.exceptions import ConfigError, InstrumentError, DeviceError
-from wlauto.utils.misc import ensure_directory_exists as _d
-from wlauto.utils.types import list_of_ints, list_of_strs, boolean
-
-# pylint: disable=wrong-import-position,wrong-import-order
-daqpower_path = os.path.join(os.path.dirname(__file__), '..', '..', 'external', 'daq_server', 'src')
-sys.path.insert(0, daqpower_path)
-try:
- import daqpower.client as daq # pylint: disable=F0401
- from daqpower.config import DeviceConfiguration, ServerConfiguration, ConfigurationError # pylint: disable=F0401
-except ImportError, e:
- daq, DeviceConfiguration, ServerConfiguration, ConfigurationError = None, None, None, None
- import_error_mesg = e.message
-sys.path.pop(0)
-
-
-UNITS = {
- 'energy': 'Joules',
- 'power': 'Watts',
- 'voltage': 'Volts',
-}
-
-
-GPIO_ROOT = '/sys/class/gpio'
-TRACE_MARKER_PATH = '/sys/kernel/debug/tracing/trace_marker'
-
-
-def dict_or_bool(value):
- """
- Ensures that either a dictionary or a boolean is used as a parameter.
- """
- if isinstance(value, dict):
- return value
- return boolean(value)
-
-
-class Daq(Instrument):
-
- name = 'daq'
- description = """
- DAQ instrument obtains the power consumption of the target device's core
- measured by National Instruments Data Acquisition(DAQ) device.
-
- WA communicates with a DAQ device server running on a Windows machine
- (Please refer to :ref:`daq_setup`) over a network. You must specify the IP
- address and port the server is listening on in the config file as follows ::
-
- daq_server_host = '10.1.197.176'
- daq_server_port = 45677
-
- These values will be output by the server when you run it on Windows.
-
- You must also specify the values of resistors (in Ohms) across which the
- voltages are measured (Please refer to :ref:`daq_setup`). The values should be
- specified as a list with an entry for each resistor, e.g.::
-
- daq_resistor_values = [0.005, 0.005]
-
- In addition to this mandatory configuration, you can also optionally specify the
- following::
-
- :daq_labels: Labels to be used for ports. Defaults to ``'PORT_<pnum>'``, where
- 'pnum' is the number of the port.
- :daq_device_id: The ID under which the DAQ is registered with the driver.
- Defaults to ``'Dev1'``.
- :daq_v_range: Specifies the voltage range for the SOC voltage channel on the DAQ
- (please refer to :ref:`daq_setup` for details). Defaults to ``2.5``.
- :daq_dv_range: Specifies the voltage range for the resistor voltage channel on
- the DAQ (please refer to :ref:`daq_setup` for details).
- Defaults to ``0.2``.
- :daq_sampling_rate: DAQ sampling rate. DAQ will take this many samples each
- second. Please note that this maybe limitted by your DAQ model
- and then number of ports you're measuring (again, see
- :ref:`daq_setup`). Defaults to ``10000``.
- :daq_channel_map: Represents mapping from logical AI channel number to physical
- connector on the DAQ (varies between DAQ models). The default
- assumes DAQ 6363 and similar with AI channels on connectors
- 0-7 and 16-23.
-
- """
-
- parameters = [
- Parameter('server_host', kind=str, default='localhost',
- global_alias='daq_server_host',
- description='The host address of the machine that runs the daq Server which the '
- 'insturment communicates with.'),
- Parameter('server_port', kind=int, default=45677,
- global_alias='daq_server_port',
- description='The port number for daq Server in which daq insturment communicates '
- 'with.'),
- Parameter('device_id', kind=str, default='Dev1',
- global_alias='daq_device_id',
- description='The ID under which the DAQ is registered with the driver.'),
- Parameter('v_range', kind=float, default=2.5,
- global_alias='daq_v_range',
- description='Specifies the voltage range for the SOC voltage channel on the DAQ '
- '(please refer to :ref:`daq_setup` for details).'),
- Parameter('dv_range', kind=float, default=0.2,
- global_alias='daq_dv_range',
- description='Specifies the voltage range for the resistor voltage channel on '
- 'the DAQ (please refer to :ref:`daq_setup` for details).'),
- Parameter('sampling_rate', kind=int, default=10000,
- global_alias='daq_sampling_rate',
- description='DAQ sampling rate. DAQ will take this many samples each '
- 'second. Please note that this maybe limitted by your DAQ model '
- 'and then number of ports you\'re measuring (again, see '
- ':ref:`daq_setup`)'),
- Parameter('resistor_values', kind=list, mandatory=True,
- global_alias='daq_resistor_values',
- description='The values of resistors (in Ohms) across which the voltages are measured on '
- 'each port.'),
- Parameter('channel_map', kind=list_of_ints, default=(0, 1, 2, 3, 4, 5, 6, 7, 16, 17, 18, 19, 20, 21, 22, 23),
- global_alias='daq_channel_map',
- description='Represents mapping from logical AI channel number to physical '
- 'connector on the DAQ (varies between DAQ models). The default '
- 'assumes DAQ 6363 and similar with AI channels on connectors '
- '0-7 and 16-23.'),
- Parameter('labels', kind=list_of_strs,
- global_alias='daq_labels',
- description='List of port labels. If specified, the lenght of the list must match '
- 'the length of ``resistor_values``. Defaults to "PORT_<pnum>", where '
- '"pnum" is the number of the port.'),
- Parameter('negative_samples', default='keep', allowed_values=['keep', 'zero', 'drop', 'abs'],
- global_alias='daq_negative_samples',
- description="""
- Specifies how negative power samples should be handled. The following
- methods are possible:
-
- :keep: keep them as they are
- :zero: turn negative values to zero
- :drop: drop samples if they contain negative values. *warning:* this may result in
- port files containing different numbers of samples
- :abs: take the absoulte value of negave samples
-
- """),
- Parameter('gpio_sync', kind=int, constraint=lambda x: x > 0,
- description="""
- If specified, the instrument will simultaneously set the
- specified GPIO pin high and put a marker into ftrace. This is
- to facillitate syncing kernel trace events to DAQ power
- trace.
- """),
- Parameter('merge_channels', kind=dict_or_bool, default=False,
- description="""
- If set to ``True``, channels with consecutive letter suffixes will be summed.
- e.g. If you have channels A7a, A7b, A7c, A15a, A15b they will be summed to A7, A15
-
- You can also manually specify the name of channels to be merged and the name of the
- result like so:
-
- merge_channels:
- A15: [A15dvfs, A15ram]
- NonCPU: [GPU, RoS, Mem]
-
- In the above exaples the DAQ channels labeled A15a and A15b will be summed together
- with the results being saved as 'channel' ''a''. A7, GPU and RoS will be summed to 'c'
- """)
- ]
-
- def initialize(self, context):
- status, devices = self._execute_command('list_devices')
- if status == daq.Status.OK and not devices:
- raise InstrumentError('DAQ: server did not report any devices registered with the driver.')
- self._results = OrderedDict()
- self.gpio_path = None
- if self.gpio_sync:
- if not self.device.file_exists(GPIO_ROOT):
- raise InstrumentError('GPIO sysfs not enabled on the device.')
- try:
- export_path = self.device.path.join(GPIO_ROOT, 'export')
- self.device.write_value(export_path, self.gpio_sync, verify=False)
- pin_root = self.device.path.join(GPIO_ROOT, 'gpio{}'.format(self.gpio_sync))
- direction_path = self.device.path.join(pin_root, 'direction')
- self.device.write_value(direction_path, 'out')
- self.gpio_path = self.device.path.join(pin_root, 'value')
- self.device.write_value(self.gpio_path, 0, verify=False)
- signal.connect(self.insert_start_marker, signal.BEFORE_WORKLOAD_EXECUTION, priority=11)
- signal.connect(self.insert_stop_marker, signal.AFTER_WORKLOAD_EXECUTION, priority=11)
- except DeviceError as e:
- raise InstrumentError('Could not configure GPIO on device: {}'.format(e))
-
- def setup(self, context):
- self.logger.debug('Initialising session.')
- self._execute_command('configure', config=self.device_config)
-
- def slow_start(self, context):
- self.logger.debug('Starting collecting measurements.')
- self._execute_command('start')
-
- def slow_stop(self, context):
- self.logger.debug('Stopping collecting measurements.')
- self._execute_command('stop')
-
- def update_result(self, context): # pylint: disable=R0914
- self.logger.debug('Downloading data files.')
- output_directory = _d(os.path.join(context.output_directory, 'daq'))
- self._execute_command('get_data', output_directory=output_directory)
-
- if self.merge_channels:
- self._merge_channels(context)
-
- for entry in os.listdir(output_directory):
- context.add_iteration_artifact('DAQ_{}'.format(os.path.splitext(entry)[0]),
- path=os.path.join('daq', entry),
- kind='data',
- description='DAQ power measurments.')
- port = os.path.splitext(entry)[0]
- path = os.path.join(output_directory, entry)
- key = (context.spec.id, context.spec.label, context.current_iteration)
- if key not in self._results:
- self._results[key] = {}
-
- temp_file = os.path.join(tempfile.gettempdir(), entry)
- writer, wfh = None, None
-
- with open(path) as fh:
- if self.negative_samples != 'keep':
- wfh = open(temp_file, 'wb')
- writer = csv.writer(wfh)
-
- reader = csv.reader(fh)
- metrics = reader.next()
- if writer:
- writer.writerow(metrics)
- self._metrics |= set(metrics)
-
- rows = _get_rows(reader, writer, self.negative_samples)
- data = zip(*rows)
-
- if writer:
- wfh.close()
- shutil.move(temp_file, os.path.join(output_directory, entry))
-
- n = len(data[0])
- means = [s / n for s in map(sum, data)]
- for metric, value in zip(metrics, means):
- metric_name = '{}_{}'.format(port, metric)
- context.result.add_metric(metric_name, round(value, 3), UNITS[metric])
- self._results[key][metric_name] = round(value, 3)
- energy = sum(data[metrics.index('power')]) * (self.sampling_rate / 1000000)
- context.result.add_metric('{}_energy'.format(port), round(energy, 3), UNITS['energy'])
-
- def teardown(self, context):
- self.logger.debug('Terminating session.')
- self._execute_command('close')
-
- def finalize(self, context):
- if self.gpio_path:
- unexport_path = self.device.path.join(GPIO_ROOT, 'unexport')
- self.device.write_value(unexport_path, self.gpio_sync, verify=False)
-
- def validate(self): # pylint: disable=too-many-branches
- if not daq:
- raise ImportError(import_error_mesg)
- self._results = None
- self._metrics = set()
- if self.labels:
- if len(self.labels) != len(self.resistor_values):
- raise ConfigError('Number of DAQ port labels does not match the number of resistor values.')
- else:
- self.labels = ['PORT_{}'.format(i) for i, _ in enumerate(self.resistor_values)]
- self.server_config = ServerConfiguration(host=self.server_host,
- port=self.server_port)
- self.device_config = DeviceConfiguration(device_id=self.device_id,
- v_range=self.v_range,
- dv_range=self.dv_range,
- sampling_rate=self.sampling_rate,
- resistor_values=self.resistor_values,
- channel_map=self.channel_map,
- labels=self.labels)
- try:
- self.server_config.validate()
- self.device_config.validate()
- except ConfigurationError, ex:
- raise ConfigError('DAQ configuration: ' + ex.message) # Re-raise as a WA error
- self.grouped_suffixes = defaultdict(str)
- if isinstance(self.merge_channels, bool):
- if self.merge_channels:
- # Create a dict of potential prefixes and a list of their suffixes
- grouped_suffixes = {label[:-1]: label for label in sorted(self.labels) if len(label) > 1}
- # Only merge channels if more than one channel has the same prefix and the prefixes
- # are consecutive letters starting with 'a'.
- self.label_map = {}
- for channel, suffixes in grouped_suffixes.iteritems():
- if len(suffixes) > 1:
- if "".join([s[-1] for s in suffixes]) in ascii_lowercase[:len(suffixes)]:
- self.label_map[channel] = suffixes
-
- elif isinstance(self.merge_channels, dict):
- # Check if given channel names match labels
- for old_names in self.merge_channels.values():
- for name in old_names:
- if name not in self.labels:
- raise ConfigError("No channel with label {} specified".format(name))
- self.label_map = self.merge_channels # pylint: disable=redefined-variable-type
- self.merge_channels = True
- else: # Should never reach here
- raise AssertionError("``merge_channels`` is of invalid type")
-
- def before_overall_results_processing(self, context):
- if self._results:
- headers = ['id', 'workload', 'iteration']
- metrics = ['{}_{}'.format(p, m) for p in self.labels for m in sorted(self._metrics)]
- headers += metrics
- rows = [headers]
- for key, value in self._results.iteritems():
- rows.append(list(key) + [value[m] for m in metrics])
-
- outfile = os.path.join(context.output_directory, 'daq_power.csv')
- with open(outfile, 'wb') as fh:
- writer = csv.writer(fh)
- writer.writerows(rows)
-
- def insert_start_marker(self, context):
- if self.gpio_path:
- command = 'echo DAQ_START_MARKER > {}; echo 1 > {}'.format(TRACE_MARKER_PATH, self.gpio_path)
- self.device.execute(command, as_root=self.device.is_rooted)
-
- def insert_stop_marker(self, context):
- if self.gpio_path:
- command = 'echo DAQ_STOP_MARKER > {}; echo 0 > {}'.format(TRACE_MARKER_PATH, self.gpio_path)
- self.device.execute(command, as_root=self.device.is_rooted)
-
- def _execute_command(self, command, **kwargs):
- # pylint: disable=E1101
- q = Queue()
- p = Process(target=_send_daq_command, args=(q, self.server_config, command), kwargs=kwargs)
- p.start()
- result = q.get()
- p.join()
- if result.status == daq.Status.OK:
- pass # all good
- elif result.status == daq.Status.OKISH:
- self.logger.debug(result.message)
- elif result.status == daq.Status.ERROR:
- raise InstrumentError('DAQ: {}'.format(result.message))
- else:
- raise InstrumentError('DAQ: Unexpected result: {} - {}'.format(result.status, result.message))
- return (result.status, result.data)
-
- def _merge_channels(self, context): # pylint: disable=r0914
- output_directory = _d(os.path.join(context.output_directory, 'daq'))
- for name, labels in self.label_map.iteritems():
- summed = None
- for label in labels:
- path = os.path.join(output_directory, "{}.csv".format(label))
- with open(path) as fh:
- reader = csv.reader(fh)
- metrics = reader.next()
- rows = _get_rows(reader, None, self.negative_samples)
- if summed:
- summed = [[x + y for x, y in zip(a, b)] for a, b in zip(rows, summed)]
- else:
- summed = rows
- output_path = os.path.join(output_directory, "{}.csv".format(name))
- with open(output_path, 'wb') as wfh:
- writer = csv.writer(wfh)
- writer.writerow(metrics)
- for row in summed:
- writer.writerow(row)
-
-
-def _send_daq_command(q, *args, **kwargs):
- result = daq.execute_command(*args, **kwargs)
- q.put(result)
-
-
-def _get_rows(reader, writer, negative_samples):
- rows = []
- for row in reader:
- row = map(float, row)
- if negative_samples == 'keep':
- rows.append(row)
- elif negative_samples == 'zero':
- def nonneg(v):
- return v if v >= 0 else 0
- rows.append([nonneg(v) for v in row])
- elif negative_samples == 'drop':
- if all(v >= 0 for v in row):
- rows.append(row)
- elif negative_samples == 'abs':
- rows.append([abs(v) for v in row])
- else:
- raise AssertionError(negative_samples) # should never get here
- if writer:
- writer.writerow(row)
- return rows
diff --git a/wlauto/instrumentation/delay/__init__.py b/wlauto/instrumentation/delay/__init__.py
deleted file mode 100644
index f4d17a54..00000000
--- a/wlauto/instrumentation/delay/__init__.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-#pylint: disable=W0613,E1101,E0203,W0201
-import time
-
-from wlauto import Instrument, Parameter
-from wlauto.exceptions import ConfigError, InstrumentError
-from wlauto.utils.types import boolean
-
-
-class DelayInstrument(Instrument):
-
- name = 'delay'
- description = """
- This instrument introduces a delay before executing either an iteration
- or all iterations for a spec.
-
- The delay may be specified as either a fixed period or a temperature
- threshold that must be reached.
-
- Optionally, if an active cooling solution is employed to speed up temperature drop between
- runs, it may be controlled using this instrument.
-
- """
-
- parameters = [
- Parameter('temperature_file', default='/sys/devices/virtual/thermal/thermal_zone0/temp',
- global_alias='thermal_temp_file',
- description="""Full path to the sysfile on the device that contains the device's
- temperature."""),
- Parameter('temperature_timeout', kind=int, default=600,
- global_alias='thermal_timeout',
- description="""
- The timeout after which the instrument will stop waiting even if the specified threshold
- temperature is not reached. If this timeout is hit, then a warning will be logged stating
- the actual temperature at which the timeout has ended.
- """),
- Parameter('temperature_poll_period', kind=int, default=5,
- global_alias='thermal_sleep_time',
- description="""How long to sleep (in seconds) between polling current device temperature."""),
- Parameter('temperature_between_specs', kind=int, default=None,
- global_alias='thermal_threshold_between_specs',
- description="""
- Temperature (in device-specific units) the device must cool down to before
- the iteration spec will be run.
-
- .. note:: This cannot be specified at the same time as ``fixed_between_specs``
-
- """),
- Parameter('temperature_between_iterations', kind=int, default=None,
- global_alias='thermal_threshold_between_iterations',
- description="""
- Temperature (in device-specific units) the device must cool down to before
- the next spec will be run.
-
- .. note:: This cannot be specified at the same time as ``fixed_between_iterations``
-
- """),
- Parameter('temperature_before_start', kind=int, default=None,
- global_alias='thermal_threshold_before_start',
- description="""
- Temperature (in device-specific units) the device must cool down to just before
- the actual workload execution (after setup has been performed).
-
- .. note:: This cannot be specified at the same time as ``fixed_between_iterations``
-
- """),
- Parameter('fixed_between_specs', kind=int, default=None,
- global_alias='fixed_delay_between_specs',
- description="""
- How long to sleep (in seconds) after all iterations for a workload spec have
- executed.
-
- .. note:: This cannot be specified at the same time as ``temperature_between_specs``
-
- """),
- Parameter('fixed_between_iterations', kind=int, default=None,
- global_alias='fixed_delay_between_iterations',
- description="""
- How long to sleep (in seconds) after each iterations for a workload spec has
- executed.
-
- .. note:: This cannot be specified at the same time as ``temperature_between_iterations``
-
- """),
- Parameter('fixed_before_start', kind=int, default=None,
- global_alias='fixed_delay_before_start',
- description="""
-
- How long to sleep (in seconds) after setup for an iteration has been perfromed but
- before running the workload.
-
- .. note:: This cannot be specified at the same time as ``temperature_before_start``
-
- """),
- Parameter('active_cooling', kind=boolean, default=False,
- global_alias='thermal_active_cooling',
- description="""
- This instrument supports an active cooling solution while waiting for the device temperature
- to drop to the threshold. The solution involves an mbed controlling a fan. The mbed is signaled
- over a serial port. If this solution is present in the setup, this should be set to ``True``.
- """),
- ]
-
- def initialize(self, context):
- if self.temperature_between_iterations == 0:
- temp = self.device.get_sysfile_value(self.temperature_file, int)
- self.logger.debug('Setting temperature threshold between iterations to {}'.format(temp))
- self.temperature_between_iterations = temp
- if self.temperature_between_specs == 0:
- temp = self.device.get_sysfile_value(self.temperature_file, int)
- self.logger.debug('Setting temperature threshold between workload specs to {}'.format(temp))
- self.temperature_between_specs = temp
-
- def very_slow_on_iteration_start(self, context):
- if self.active_cooling:
- self.device.stop_active_cooling()
- if self.fixed_between_iterations:
- self.logger.debug('Waiting for a fixed period after iteration...')
- time.sleep(self.fixed_between_iterations)
- elif self.temperature_between_iterations:
- self.logger.debug('Waiting for temperature drop before iteration...')
- self.wait_for_temperature(self.temperature_between_iterations)
-
- def very_slow_on_spec_start(self, context):
- if self.active_cooling:
- self.device.stop_active_cooling()
- if self.fixed_between_specs:
- self.logger.debug('Waiting for a fixed period after spec execution...')
- time.sleep(self.fixed_between_specs)
- elif self.temperature_between_specs:
- self.logger.debug('Waiting for temperature drop before spec execution...')
- self.wait_for_temperature(self.temperature_between_specs)
-
- def very_slow_start(self, context):
- if self.active_cooling:
- self.device.stop_active_cooling()
- if self.fixed_before_start:
- self.logger.debug('Waiting for a fixed period after iteration...')
- time.sleep(self.fixed_before_start)
- elif self.temperature_before_start:
- self.logger.debug('Waiting for temperature drop before commencing execution...')
- self.wait_for_temperature(self.temperature_before_start)
-
- def wait_for_temperature(self, temperature):
- if self.active_cooling:
- self.device.start_active_cooling()
- self.do_wait_for_temperature(temperature)
- self.device.stop_active_cooling()
- else:
- self.do_wait_for_temperature(temperature)
-
- def do_wait_for_temperature(self, temperature):
- reading = self.device.get_sysfile_value(self.temperature_file, int)
- waiting_start_time = time.time()
- while reading > temperature:
- self.logger.debug('Device temperature: {}'.format(reading))
- if time.time() - waiting_start_time > self.temperature_timeout:
- self.logger.warning('Reached timeout; current temperature: {}'.format(reading))
- break
- time.sleep(self.temperature_poll_period)
- reading = self.device.get_sysfile_value(self.temperature_file, int)
-
- def validate(self):
- if (self.temperature_between_specs is not None and
- self.fixed_between_specs is not None):
- raise ConfigError('Both fixed delay and thermal threshold specified for specs.')
-
- if (self.temperature_between_iterations is not None and
- self.fixed_between_iterations is not None):
- raise ConfigError('Both fixed delay and thermal threshold specified for iterations.')
-
- if (self.temperature_before_start is not None and
- self.fixed_before_start is not None):
- raise ConfigError('Both fixed delay and thermal threshold specified before start.')
-
- if not any([self.temperature_between_specs, self.fixed_between_specs, self.temperature_before_start,
- self.temperature_between_iterations, self.fixed_between_iterations,
- self.fixed_before_start]):
- raise ConfigError('delay instrument is enabled, but no delay is specified.')
-
- if self.active_cooling and not self.device.has('active_cooling'):
- message = 'Your device does not support active cooling. Did you configure it with an approprite module?'
- raise InstrumentError(message)
-
diff --git a/wlauto/instrumentation/dmesg/__init__.py b/wlauto/instrumentation/dmesg/__init__.py
deleted file mode 100644
index 36e1af5b..00000000
--- a/wlauto/instrumentation/dmesg/__init__.py
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import os
-
-from wlauto import Instrument, Parameter
-from wlauto.utils.misc import ensure_file_directory_exists as _f
-
-
-class DmesgInstrument(Instrument):
- # pylint: disable=no-member,attribute-defined-outside-init
- """
- Collected dmesg output before and during the run.
-
- """
-
- name = 'dmesg'
-
- parameters = [
- Parameter('loglevel', kind=int, allowed_values=range(8),
- description='Set loglevel for console output.')
- ]
-
- loglevel_file = '/proc/sys/kernel/printk'
-
- def setup(self, context):
- if self.loglevel:
- self.old_loglevel = self.device.get_sysfile_value(self.loglevel_file)
- self.device.write_value(self.loglevel_file, self.loglevel, verify=False)
- self.before_file = _f(os.path.join(context.output_directory, 'dmesg', 'before'))
- self.after_file = _f(os.path.join(context.output_directory, 'dmesg', 'after'))
-
- def slow_start(self, context):
- with open(self.before_file, 'w') as wfh:
- wfh.write(self.device.execute('dmesg'))
- context.add_artifact('dmesg_before', self.before_file, kind='data')
- if self.device.is_rooted:
- self.device.execute('dmesg -c', as_root=True)
-
- def slow_stop(self, context):
- with open(self.after_file, 'w') as wfh:
- wfh.write(self.device.execute('dmesg'))
- context.add_artifact('dmesg_after', self.after_file, kind='data')
-
- def teardown(self, context): # pylint: disable=unused-argument
- if self.loglevel:
- self.device.write_value(self.loglevel_file, self.old_loglevel, verify=False)
-
-
diff --git a/wlauto/instrumentation/energy_model/__init__.py b/wlauto/instrumentation/energy_model/__init__.py
deleted file mode 100644
index 1c434fb7..00000000
--- a/wlauto/instrumentation/energy_model/__init__.py
+++ /dev/null
@@ -1,850 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-#pylint: disable=attribute-defined-outside-init,access-member-before-definition,redefined-outer-name
-from __future__ import division
-import os
-import math
-import time
-from tempfile import mktemp
-from base64 import b64encode
-from collections import Counter, namedtuple
-
-try:
- import jinja2
- import pandas as pd
- import matplotlib
- matplotlib.use('AGG')
- import matplotlib.pyplot as plt
- import numpy as np
- low_filter = np.vectorize(lambda x: x > 0 and x or 0) # pylint: disable=no-member
- import_error = None
-except ImportError as e:
- import_error = e
- jinja2 = None
- pd = None
- plt = None
- np = None
- low_filter = None
-
-from wlauto import Instrument, Parameter, File
-from wlauto.exceptions import ConfigError, InstrumentError, DeviceError
-from wlauto.instrumentation import instrument_is_installed
-from wlauto.utils.types import caseless_string, list_or_caseless_string, list_of_ints
-from wlauto.utils.misc import list_to_mask
-
-FREQ_TABLE_FILE = 'frequency_power_perf_data.csv'
-CPUS_TABLE_FILE = 'projected_cap_power.csv'
-MEASURED_CPUS_TABLE_FILE = 'measured_cap_power.csv'
-IDLE_TABLE_FILE = 'idle_power_perf_data.csv'
-REPORT_TEMPLATE_FILE = 'report.template'
-EM_TEMPLATE_FILE = 'em.template'
-
-IdlePowerState = namedtuple('IdlePowerState', ['power'])
-CapPowerState = namedtuple('CapPowerState', ['cap', 'power'])
-
-
-class EnergyModel(object):
-
- def __init__(self):
- self.big_cluster_idle_states = []
- self.little_cluster_idle_states = []
- self.big_cluster_cap_states = []
- self.little_cluster_cap_states = []
- self.big_core_idle_states = []
- self.little_core_idle_states = []
- self.big_core_cap_states = []
- self.little_core_cap_states = []
-
- def add_cap_entry(self, cluster, perf, clust_pow, core_pow):
- if cluster == 'big':
- self.big_cluster_cap_states.append(CapPowerState(perf, clust_pow))
- self.big_core_cap_states.append(CapPowerState(perf, core_pow))
- elif cluster == 'little':
- self.little_cluster_cap_states.append(CapPowerState(perf, clust_pow))
- self.little_core_cap_states.append(CapPowerState(perf, core_pow))
- else:
- raise ValueError('Unexpected cluster: {}'.format(cluster))
-
- def add_cluster_idle(self, cluster, values):
- for value in values:
- if cluster == 'big':
- self.big_cluster_idle_states.append(IdlePowerState(value))
- elif cluster == 'little':
- self.little_cluster_idle_states.append(IdlePowerState(value))
- else:
- raise ValueError('Unexpected cluster: {}'.format(cluster))
-
- def add_core_idle(self, cluster, values):
- for value in values:
- if cluster == 'big':
- self.big_core_idle_states.append(IdlePowerState(value))
- elif cluster == 'little':
- self.little_core_idle_states.append(IdlePowerState(value))
- else:
- raise ValueError('Unexpected cluster: {}'.format(cluster))
-
-
-class PowerPerformanceAnalysis(object):
-
- def __init__(self, data):
- self.summary = {}
- big_freqs = data[data.cluster == 'big'].frequency.unique()
- little_freqs = data[data.cluster == 'little'].frequency.unique()
- self.summary['frequency'] = max(set(big_freqs).intersection(set(little_freqs)))
-
- big_sc = data[(data.cluster == 'big') &
- (data.frequency == self.summary['frequency']) &
- (data.cpus == 1)]
- little_sc = data[(data.cluster == 'little') &
- (data.frequency == self.summary['frequency']) &
- (data.cpus == 1)]
- self.summary['performance_ratio'] = big_sc.performance.item() / little_sc.performance.item()
- self.summary['power_ratio'] = big_sc.power.item() / little_sc.power.item()
- self.summary['max_performance'] = data[data.cpus == 1].performance.max()
- self.summary['max_power'] = data[data.cpus == 1].power.max()
-
-
-def build_energy_model(freq_power_table, cpus_power, idle_power, first_cluster_idle_state):
- # pylint: disable=too-many-locals
- em = EnergyModel()
- idle_power_sc = idle_power[idle_power.cpus == 1]
- perf_data = get_normalized_single_core_data(freq_power_table)
-
- for cluster in ['little', 'big']:
- cluster_cpus_power = cpus_power[cluster].dropna()
- cluster_power = cluster_cpus_power['cluster'].apply(int)
- core_power = (cluster_cpus_power['1'] - cluster_power).apply(int)
- performance = (perf_data[perf_data.cluster == cluster].performance_norm * 1024 / 100).apply(int)
- for perf, clust_pow, core_pow in zip(performance, cluster_power, core_power):
- em.add_cap_entry(cluster, perf, clust_pow, core_pow)
-
- all_idle_power = idle_power_sc[idle_power_sc.cluster == cluster].power.values
- # CORE idle states
- # We want the delta of each state w.r.t. the power
- # consumption of the shallowest one at this level (core_ref)
- idle_core_power = low_filter(all_idle_power[:first_cluster_idle_state] -
- all_idle_power[first_cluster_idle_state - 1])
- # CLUSTER idle states
- # We want the absolute value of each idle state
- idle_cluster_power = low_filter(all_idle_power[first_cluster_idle_state - 1:])
- em.add_cluster_idle(cluster, idle_cluster_power)
- em.add_core_idle(cluster, idle_core_power)
-
- return em
-
-
-def generate_em_c_file(em, big_core, little_core, em_template_file, outfile):
- with open(em_template_file) as fh:
- em_template = jinja2.Template(fh.read())
- em_text = em_template.render(
- big_core=big_core,
- little_core=little_core,
- em=em,
- )
- with open(outfile, 'w') as wfh:
- wfh.write(em_text)
- return em_text
-
-
-def generate_report(freq_power_table, measured_cpus_table, cpus_table, idle_power_table, # pylint: disable=unused-argument
- report_template_file, device_name, em_text, outfile):
- # pylint: disable=too-many-locals
- cap_power_analysis = PowerPerformanceAnalysis(freq_power_table)
- single_core_norm = get_normalized_single_core_data(freq_power_table)
- cap_power_plot = get_cap_power_plot(single_core_norm)
- idle_power_plot = get_idle_power_plot(idle_power_table)
-
- fig, axes = plt.subplots(1, 2)
- fig.set_size_inches(16, 8)
- for i, cluster in enumerate(reversed(cpus_table.columns.levels[0])):
- projected = cpus_table[cluster].dropna(subset=['1'])
- plot_cpus_table(projected, axes[i], cluster)
- cpus_plot_data = get_figure_data(fig)
-
- with open(report_template_file) as fh:
- report_template = jinja2.Template(fh.read())
- html = report_template.render(
- device_name=device_name,
- freq_power_table=freq_power_table.set_index(['cluster', 'cpus', 'frequency']).to_html(),
- cap_power_analysis=cap_power_analysis,
- cap_power_plot=get_figure_data(cap_power_plot),
- idle_power_table=idle_power_table.set_index(['cluster', 'cpus', 'state']).to_html(),
- idle_power_plot=get_figure_data(idle_power_plot),
- cpus_table=cpus_table.to_html(),
- cpus_plot=cpus_plot_data,
- em_text=em_text,
- )
- with open(outfile, 'w') as wfh:
- wfh.write(html)
- return html
-
-
-def wa_result_to_power_perf_table(df, performance_metric, index):
- table = df.pivot_table(index=index + ['iteration'],
- columns='metric', values='value').reset_index()
- result_mean = table.groupby(index).mean()
- result_std = table.groupby(index).std()
- result_std.columns = [c + ' std' for c in result_std.columns]
- result_count = table.groupby(index).count()
- result_count.columns = [c + ' count' for c in result_count.columns]
- count_sqrt = result_count.apply(lambda x: x.apply(math.sqrt))
- count_sqrt.columns = result_std.columns # match column names for division
- result_error = 1.96 * result_std / count_sqrt # 1.96 == 95% confidence interval
- result_error.columns = [c + ' error' for c in result_mean.columns]
-
- result = pd.concat([result_mean, result_std, result_count, result_error], axis=1)
- del result['iteration']
- del result['iteration std']
- del result['iteration count']
- del result['iteration error']
-
- updated_columns = []
- for column in result.columns:
- if column == performance_metric:
- updated_columns.append('performance')
- elif column == performance_metric + ' std':
- updated_columns.append('performance_std')
- elif column == performance_metric + ' error':
- updated_columns.append('performance_error')
- else:
- updated_columns.append(column.replace(' ', '_'))
- result.columns = updated_columns
- result = result[sorted(result.columns)]
- result.reset_index(inplace=True)
-
- return result
-
-
-def get_figure_data(fig, fmt='png'):
- tmp = mktemp()
- fig.savefig(tmp, format=fmt, bbox_inches='tight')
- with open(tmp, 'rb') as fh:
- image_data = b64encode(fh.read())
- os.remove(tmp)
- return image_data
-
-
-def get_normalized_single_core_data(data):
- finite_power = np.isfinite(data.power) # pylint: disable=no-member
- finite_perf = np.isfinite(data.performance) # pylint: disable=no-member
- data_single_core = data[(data.cpus == 1) & finite_perf & finite_power].copy()
- data_single_core['performance_norm'] = (data_single_core.performance /
- data_single_core.performance.max() * 100).apply(int)
- data_single_core['power_norm'] = (data_single_core.power /
- data_single_core.power.max() * 100).apply(int)
- return data_single_core
-
-
-def get_cap_power_plot(data_single_core):
- big_single_core = data_single_core[(data_single_core.cluster == 'big') &
- (data_single_core.cpus == 1)]
- little_single_core = data_single_core[(data_single_core.cluster == 'little') &
- (data_single_core.cpus == 1)]
-
- fig, axes = plt.subplots(1, 1, figsize=(12, 8))
- axes.plot(big_single_core.performance_norm,
- big_single_core.power_norm,
- marker='o')
- axes.plot(little_single_core.performance_norm,
- little_single_core.power_norm,
- marker='o')
- axes.set_xlim(0, 105)
- axes.set_ylim(0, 105)
- axes.set_xlabel('Performance (Normalized)')
- axes.set_ylabel('Power (Normalized)')
- axes.grid()
- axes.legend(['big cluster', 'little cluster'], loc=0)
- return fig
-
-
-def get_idle_power_plot(df):
- fig, axes = plt.subplots(1, 2, figsize=(15, 7))
- for cluster, ax in zip(['little', 'big'], axes):
- data = df[df.cluster == cluster].pivot_table(index=['state'], columns='cpus', values='power')
- err = df[df.cluster == cluster].pivot_table(index=['state'], columns='cpus', values='power_error')
- data.plot(kind='bar', ax=ax, rot=30, yerr=err)
- ax.set_title('{} cluster'.format(cluster))
- ax.set_xlim(-1, len(data.columns) - 0.5)
- ax.set_ylabel('Power (mW)')
- return fig
-
-
-def fit_polynomial(s, n):
- # pylint: disable=no-member
- coeffs = np.polyfit(s.index, s.values, n)
- poly = np.poly1d(coeffs)
- return poly(s.index)
-
-
-def get_cpus_power_table(data, index, opps, leak_factors): # pylint: disable=too-many-locals
- # pylint: disable=no-member
- power_table = data[[index, 'cluster', 'cpus', 'power']].pivot_table(index=index,
- columns=['cluster', 'cpus'],
- values='power')
- bs_power_table = pd.DataFrame(index=power_table.index, columns=power_table.columns)
- for cluster in power_table.columns.levels[0]:
- power_table[cluster, 0] = (power_table[cluster, 1] -
- (power_table[cluster, 2] -
- power_table[cluster, 1]))
- bs_power_table.loc[power_table[cluster, 1].notnull(), (cluster, 1)] = fit_polynomial(power_table[cluster, 1].dropna(), 2)
- bs_power_table.loc[power_table[cluster, 2].notnull(), (cluster, 2)] = fit_polynomial(power_table[cluster, 2].dropna(), 2)
-
- if opps[cluster] is None:
- bs_power_table.loc[bs_power_table[cluster, 1].notnull(), (cluster, 0)] = \
- (2 * power_table[cluster, 1] - power_table[cluster, 2]).values
- else:
- voltages = opps[cluster].set_index('frequency').sort_index()
- leakage = leak_factors[cluster] * 2 * voltages['voltage']**3 / 0.9**3
- leakage_delta = leakage - leakage[leakage.index[0]]
- bs_power_table.loc[:, (cluster, 0)] = \
- (2 * bs_power_table[cluster, 1] + leakage_delta - bs_power_table[cluster, 2])
-
- # re-order columns and rename colum '0' to 'cluster'
- power_table = power_table[sorted(power_table.columns,
- cmp=lambda x, y: cmp(y[0], x[0]) or cmp(x[1], y[1]))]
- bs_power_table = bs_power_table[sorted(bs_power_table.columns,
- cmp=lambda x, y: cmp(y[0], x[0]) or cmp(x[1], y[1]))]
- old_levels = power_table.columns.levels
- power_table.columns.set_levels([old_levels[0], list(map(str, old_levels[1])[:-1]) + ['cluster']],
- inplace=True)
- bs_power_table.columns.set_levels([old_levels[0], list(map(str, old_levels[1])[:-1]) + ['cluster']],
- inplace=True)
- return power_table, bs_power_table
-
-
-def plot_cpus_table(projected, ax, cluster):
- projected.T.plot(ax=ax, marker='o')
- ax.set_title('{} cluster'.format(cluster))
- ax.set_xticklabels(projected.columns)
- ax.set_xticks(range(0, 5))
- ax.set_xlim(-0.5, len(projected.columns) - 0.5)
- ax.set_ylabel('Power (mW)')
- ax.grid(True)
-
-
-def opp_table(d):
- if d is None:
- return None
- return pd.DataFrame(d.items(), columns=['frequency', 'voltage'])
-
-
-class EnergyModelInstrument(Instrument):
-
- name = 'energy_model'
- desicription = """
- Generates a power mode for the device based on specified workload.
-
- This insturment will execute the workload specified by the agenda (currently, only ``sysbench`` is
- supported) and will use the resulting performance and power measurments to generate a power mode for
- the device.
-
- This instrument requires certain features to be present in the kernel:
-
- 1. cgroups and cpusets must be enabled.
- 2. cpufreq and userspace governor must be enabled.
- 3. cpuidle must be enabled.
-
- """
-
- parameters = [
- Parameter('device_name', kind=caseless_string,
- description="""The name of the device to be used in generating the model. If not specified,
- ``device.name`` will be used. """),
- Parameter('big_core', kind=caseless_string,
- description="""The name of the "big" core in the big.LITTLE system; must match
- one of the values in ``device.core_names``. """),
- Parameter('performance_metric', kind=caseless_string, mandatory=True,
- description="""Metric to be used as the performance indicator."""),
- Parameter('power_metric', kind=list_or_caseless_string,
- description="""Metric to be used as the power indicator. The value may contain a
- ``{core}`` format specifier that will be replaced with names of big
- and little cores to drive the name of the metric for that cluster.
- Ether this or ``energy_metric`` must be specified but not both."""),
- Parameter('energy_metric', kind=list_or_caseless_string,
- description="""Metric to be used as the energy indicator. The value may contain a
- ``{core}`` format specifier that will be replaced with names of big
- and little cores to drive the name of the metric for that cluster.
- this metric will be used to derive power by deviding through by
- execution time. Either this or ``power_metric`` must be specified, but
- not both."""),
- Parameter('power_scaling_factor', kind=float, default=1.0,
- description="""Power model specfies power in milliWatts. This is a scaling factor that
- power_metric values will be multiplied by to get milliWatts."""),
- Parameter('big_frequencies', kind=list_of_ints,
- description="""List of frequencies to be used for big cores. These frequencies must
- be supported by the cores. If this is not specified, all available
- frequencies for the core (as read from cpufreq) will be used."""),
- Parameter('little_frequencies', kind=list_of_ints,
- description="""List of frequencies to be used for little cores. These frequencies must
- be supported by the cores. If this is not specified, all available
- frequencies for the core (as read from cpufreq) will be used."""),
- Parameter('idle_workload', kind=str, default='idle',
- description="Workload to be used while measuring idle power."),
- Parameter('idle_workload_params', kind=dict, default={},
- description="Parameter to pass to the idle workload."),
- Parameter('first_cluster_idle_state', kind=int, default=-1,
- description='''The index of the first cluster idle state on the device. Previous states
- are assumed to be core idles. The default is ``-1``, i.e. only the last
- idle state is assumed to affect the entire cluster.'''),
- Parameter('no_hotplug', kind=bool, default=False,
- description='''This options allows running the instrument without hotpluging cores on and off.
- Disabling hotplugging will most likely produce a less accurate power model.'''),
- Parameter('num_of_freqs_to_thermal_adjust', kind=int, default=0,
- description="""The number of frequencies begining from the highest, to be adjusted for
- the thermal effect."""),
- Parameter('big_opps', kind=opp_table,
- description="""OPP table mapping frequency to voltage (kHz --> mV) for the big cluster."""),
- Parameter('little_opps', kind=opp_table,
- description="""OPP table mapping frequency to voltage (kHz --> mV) for the little cluster."""),
- Parameter('big_leakage', kind=int, default=120,
- description="""
- Leakage factor for the big cluster (this is specific to a particular core implementation).
- """),
- Parameter('little_leakage', kind=int, default=60,
- description="""
- Leakage factor for the little cluster (this is specific to a particular core implementation).
- """),
- ]
-
- def validate(self):
- if import_error:
- message = 'energy_model instrument requires pandas, jinja2 and matplotlib Python packages to be installed; got: "{}"'
- raise InstrumentError(message.format(import_error.message))
- for capability in ['cgroups', 'cpuidle']:
- if not self.device.has(capability):
- message = 'The Device does not appear to support {}; does it have the right module installed?'
- raise ConfigError(message.format(capability))
- device_cores = set(self.device.core_names)
- if (self.power_metric and self.energy_metric) or not (self.power_metric or self.energy_metric):
- raise ConfigError('Either power_metric or energy_metric must be specified (but not both).')
- if not device_cores:
- raise ConfigError('The Device does not appear to have core_names configured.')
- elif len(device_cores) != 2:
- raise ConfigError('The Device does not appear to be a big.LITTLE device.')
- if self.big_core and self.big_core not in self.device.core_names:
- raise ConfigError('Specified big_core "{}" is in divice {}'.format(self.big_core, self.device.name))
- if not self.big_core:
- self.big_core = self.device.core_names[-1] # the last core is usually "big" in existing big.LITTLE devices
- if not self.device_name:
- self.device_name = self.device.name
- if self.num_of_freqs_to_thermal_adjust and not instrument_is_installed('daq'):
- self.logger.warn('Adjustment for thermal effect requires daq instrument. Disabling adjustment')
- self.num_of_freqs_to_thermal_adjust = 0
-
- def initialize(self, context):
- self.number_of_cpus = {}
- self.report_template_file = context.resolver.get(File(self, REPORT_TEMPLATE_FILE))
- self.em_template_file = context.resolver.get(File(self, EM_TEMPLATE_FILE))
- self.little_core = (set(self.device.core_names) - set([self.big_core])).pop()
- self.perform_runtime_validation()
- self.enable_all_cores()
- self.configure_clusters()
- self.discover_idle_states()
- self.disable_thermal_management()
- self.initialize_job_queue(context)
- self.initialize_result_tracking()
-
- def setup(self, context):
- if not context.spec.label.startswith('idle_'):
- return
- for idle_state in self.get_device_idle_states(self.measured_cluster):
- if idle_state.index > context.spec.idle_state_index:
- idle_state.disable = 1
- else:
- idle_state.disable = 0
-
- def fast_start(self, context): # pylint: disable=unused-argument
- self.start_time = time.time()
-
- def fast_stop(self, context): # pylint: disable=unused-argument
- self.run_time = time.time() - self.start_time
-
- def on_iteration_start(self, context):
- self.setup_measurement(context.spec.cluster)
-
- def thermal_correction(self, context):
- if not self.num_of_freqs_to_thermal_adjust or self.num_of_freqs_to_thermal_adjust > len(self.big_frequencies):
- return 0
- freqs = self.big_frequencies[-self.num_of_freqs_to_thermal_adjust:]
- spec = context.result.spec
- if spec.frequency not in freqs:
- return 0
- data_path = os.path.join(context.output_directory, 'daq', '{}.csv'.format(self.big_core))
- data = pd.read_csv(data_path)['power']
- return _adjust_for_thermal(data, filt_method=lambda x: pd.rolling_median(x, 1000), thresh=0.9, window=5000)
-
- # slow to make sure power results have been generated
- def slow_update_result(self, context): # pylint: disable=too-many-branches
- spec = context.result.spec
- cluster = spec.cluster
- is_freq_iteration = spec.label.startswith('freq_')
- perf_metric = 0
- power_metric = 0
- thermal_adjusted_power = 0
- if is_freq_iteration and cluster == 'big':
- thermal_adjusted_power = self.thermal_correction(context)
- for metric in context.result.metrics:
- if metric.name == self.performance_metric:
- perf_metric = metric.value
- elif thermal_adjusted_power and metric.name in self.big_power_metrics:
- power_metric += thermal_adjusted_power * self.power_scaling_factor
- elif (cluster == 'big') and metric.name in self.big_power_metrics:
- power_metric += metric.value * self.power_scaling_factor
- elif (cluster == 'little') and metric.name in self.little_power_metrics:
- power_metric += metric.value * self.power_scaling_factor
- elif thermal_adjusted_power and metric.name in self.big_energy_metrics:
- power_metric += thermal_adjusted_power / self.run_time * self.power_scaling_factor
- elif (cluster == 'big') and metric.name in self.big_energy_metrics:
- power_metric += metric.value / self.run_time * self.power_scaling_factor
- elif (cluster == 'little') and metric.name in self.little_energy_metrics:
- power_metric += metric.value / self.run_time * self.power_scaling_factor
-
- if not (power_metric and (perf_metric or not is_freq_iteration)):
- message = 'Incomplete results for {} iteration{}'
- raise InstrumentError(message.format(context.result.spec.id, context.current_iteration))
-
- if is_freq_iteration:
- index_matter = [cluster, spec.num_cpus,
- spec.frequency, context.result.iteration]
- data = self.freq_data
- else:
- index_matter = [cluster, spec.num_cpus,
- spec.idle_state_id, spec.idle_state_desc, context.result.iteration]
- data = self.idle_data
- if self.no_hotplug:
- # due to that fact that hotpluging was disabled, power has to be artificially scaled
- # to the number of cores that should have been active if hotplugging had occurred.
- power_metric = spec.num_cpus * (power_metric / self.number_of_cpus[cluster])
-
- data.append(index_matter + ['performance', perf_metric])
- data.append(index_matter + ['power', power_metric])
-
- def before_overall_results_processing(self, context):
- # pylint: disable=too-many-locals
- if not self.idle_data or not self.freq_data:
- self.logger.warning('Run aborted early; not generating energy_model.')
- return
- output_directory = os.path.join(context.output_directory, 'energy_model')
- os.makedirs(output_directory)
-
- df = pd.DataFrame(self.idle_data, columns=['cluster', 'cpus', 'state_id',
- 'state', 'iteration', 'metric', 'value'])
- idle_power_table = wa_result_to_power_perf_table(df, '', index=['cluster', 'cpus', 'state'])
- idle_output = os.path.join(output_directory, IDLE_TABLE_FILE)
- with open(idle_output, 'w') as wfh:
- idle_power_table.to_csv(wfh, index=False)
- context.add_artifact('idle_power_table', idle_output, 'export')
-
- df = pd.DataFrame(self.freq_data,
- columns=['cluster', 'cpus', 'frequency', 'iteration', 'metric', 'value'])
- freq_power_table = wa_result_to_power_perf_table(df, self.performance_metric,
- index=['cluster', 'cpus', 'frequency'])
- freq_output = os.path.join(output_directory, FREQ_TABLE_FILE)
- with open(freq_output, 'w') as wfh:
- freq_power_table.to_csv(wfh, index=False)
- context.add_artifact('freq_power_table', freq_output, 'export')
-
- if self.big_opps is None or self.little_opps is None:
- message = 'OPPs not specified for one or both clusters; cluster power will not be adjusted for leakage.'
- self.logger.warning(message)
- opps = {'big': self.big_opps, 'little': self.little_opps}
- leakages = {'big': self.big_leakage, 'little': self.little_leakage}
- try:
- measured_cpus_table, cpus_table = get_cpus_power_table(freq_power_table, 'frequency', opps, leakages)
- except (ValueError, KeyError, IndexError) as e:
- self.logger.error('Could not create cpu power tables: {}'.format(e))
- return
- measured_cpus_output = os.path.join(output_directory, MEASURED_CPUS_TABLE_FILE)
- with open(measured_cpus_output, 'w') as wfh:
- measured_cpus_table.to_csv(wfh)
- context.add_artifact('measured_cpus_table', measured_cpus_output, 'export')
- cpus_output = os.path.join(output_directory, CPUS_TABLE_FILE)
- with open(cpus_output, 'w') as wfh:
- cpus_table.to_csv(wfh)
- context.add_artifact('cpus_table', cpus_output, 'export')
-
- em = build_energy_model(freq_power_table, cpus_table, idle_power_table, self.first_cluster_idle_state)
- em_file = os.path.join(output_directory, '{}_em.c'.format(self.device_name))
- em_text = generate_em_c_file(em, self.big_core, self.little_core,
- self.em_template_file, em_file)
- context.add_artifact('em', em_file, 'data')
-
- report_file = os.path.join(output_directory, 'report.html')
- generate_report(freq_power_table, measured_cpus_table, cpus_table,
- idle_power_table, self.report_template_file,
- self.device_name, em_text, report_file)
- context.add_artifact('pm_report', report_file, 'export')
-
- def initialize_result_tracking(self):
- self.freq_data = []
- self.idle_data = []
- self.big_power_metrics = []
- self.little_power_metrics = []
- self.big_energy_metrics = []
- self.little_energy_metrics = []
- if self.power_metric:
- self.big_power_metrics = [pm.format(core=self.big_core) for pm in self.power_metric]
- self.little_power_metrics = [pm.format(core=self.little_core) for pm in self.power_metric]
- else: # must be energy_metric
- self.big_energy_metrics = [em.format(core=self.big_core) for em in self.energy_metric]
- self.little_energy_metrics = [em.format(core=self.little_core) for em in self.energy_metric]
-
- def configure_clusters(self):
- self.measured_cores = None
- self.measuring_cores = None
- self.cpuset = self.device.get_cgroup_controller('cpuset')
- self.cpuset.create_group('big', self.big_cpus, [0])
- self.cpuset.create_group('little', self.little_cpus, [0])
- for cluster in set(self.device.core_clusters):
- self.device.set_cluster_governor(cluster, 'userspace')
-
- def discover_idle_states(self):
- online_cpu = self.device.get_online_cpus(self.big_core)[0]
- self.big_idle_states = self.device.get_cpuidle_states(online_cpu)
- online_cpu = self.device.get_online_cpus(self.little_core)[0]
- self.little_idle_states = self.device.get_cpuidle_states(online_cpu)
- if not (len(self.big_idle_states) >= 2 and len(self.little_idle_states) >= 2):
- raise DeviceError('There do not appeart to be at least two idle states '
- 'on at least one of the clusters.')
-
- def setup_measurement(self, measured):
- measuring = 'big' if measured == 'little' else 'little'
- self.measured_cluster = measured
- self.measuring_cluster = measuring
- self.measured_cpus = self.big_cpus if measured == 'big' else self.little_cpus
- self.measuring_cpus = self.little_cpus if measured == 'big' else self.big_cpus
- self.reset()
-
- def reset(self):
- self.enable_all_cores()
- self.enable_all_idle_states()
- self.reset_cgroups()
- self.cpuset.move_all_tasks_to(self.measuring_cluster)
- server_process = 'adbd' if self.device.os == 'android' else 'sshd'
- server_pids = self.device.get_pids_of(server_process)
- children_ps = [e for e in self.device.ps()
- if e.ppid in server_pids and e.name != 'sshd']
- children_pids = [e.pid for e in children_ps]
- pids_to_move = server_pids + children_pids
- self.cpuset.root.add_tasks(pids_to_move)
- for pid in pids_to_move:
- try:
- self.device.execute('busybox taskset -p 0x{:x} {}'.format(list_to_mask(self.measuring_cpus), pid))
- except DeviceError:
- pass
-
- def enable_all_cores(self):
- counter = Counter(self.device.core_names)
- for core, number in counter.iteritems():
- self.device.set_number_of_online_cpus(core, number)
- self.big_cpus = self.device.get_online_cpus(self.big_core)
- self.little_cpus = self.device.get_online_cpus(self.little_core)
-
- def enable_all_idle_states(self):
- for cpu in self.device.online_cpus:
- for state in self.device.get_cpuidle_states(cpu):
- state.disable = 0
-
- def reset_cgroups(self):
- self.big_cpus = self.device.get_online_cpus(self.big_core)
- self.little_cpus = self.device.get_online_cpus(self.little_core)
- self.cpuset.big.set(self.big_cpus, 0)
- self.cpuset.little.set(self.little_cpus, 0)
-
- def perform_runtime_validation(self):
- if not self.device.is_rooted:
- raise InstrumentError('the device must be rooted to generate energy models')
- if 'userspace' not in self.device.list_available_cluster_governors(0):
- raise InstrumentError('userspace cpufreq governor must be enabled')
-
- error_message = 'Frequency {} is not supported by {} cores'
- available_frequencies = self.device.list_available_core_frequencies(self.big_core)
- if self.big_frequencies:
- for freq in self.big_frequencies:
- if freq not in available_frequencies:
- raise ConfigError(error_message.format(freq, self.big_core))
- else:
- self.big_frequencies = available_frequencies
- available_frequencies = self.device.list_available_core_frequencies(self.little_core)
- if self.little_frequencies:
- for freq in self.little_frequencies:
- if freq not in available_frequencies:
- raise ConfigError(error_message.format(freq, self.little_core))
- else:
- self.little_frequencies = available_frequencies
-
- def initialize_job_queue(self, context):
- old_specs = []
- for job in context.runner.job_queue:
- if job.spec not in old_specs:
- old_specs.append(job.spec)
- new_specs = self.get_cluster_specs(old_specs, 'big', context)
- new_specs.extend(self.get_cluster_specs(old_specs, 'little', context))
-
- # Update config to refect jobs that will actually run.
- context.config.workload_specs = new_specs
- config_file = os.path.join(context.host_working_directory, 'run_config.json')
- with open(config_file, 'wb') as wfh:
- context.config.serialize(wfh)
-
- context.runner.init_queue(new_specs)
-
- def get_cluster_specs(self, old_specs, cluster, context):
- core = self.get_core_name(cluster)
- self.number_of_cpus[cluster] = sum([1 for c in self.device.core_names if c == core])
-
- cluster_frequencies = self.get_frequencies_param(cluster)
- if not cluster_frequencies:
- raise InstrumentError('Could not read available frequencies for {}'.format(core))
- min_frequency = min(cluster_frequencies)
-
- idle_states = self.get_device_idle_states(cluster)
- new_specs = []
- for state in idle_states:
- for num_cpus in xrange(1, self.number_of_cpus[cluster] + 1):
- spec = old_specs[0].copy()
- spec.workload_name = self.idle_workload
- spec.workload_parameters = self.idle_workload_params
- spec.idle_state_id = state.id
- spec.idle_state_desc = state.desc
- spec.idle_state_index = state.index
- if not self.no_hotplug:
- spec.runtime_parameters['{}_cores'.format(core)] = num_cpus
- spec.runtime_parameters['{}_frequency'.format(core)] = min_frequency
- spec.runtime_parameters['ui'] = 'off'
- spec.cluster = cluster
- spec.num_cpus = num_cpus
- spec.id = '{}_idle_{}_{}'.format(cluster, state.id, num_cpus)
- spec.label = 'idle_{}'.format(cluster)
- spec.number_of_iterations = old_specs[0].number_of_iterations
- spec.load(self.device, context.config.ext_loader)
- spec.workload.init_resources(context)
- spec.workload.validate()
- new_specs.append(spec)
- for old_spec in old_specs:
- if old_spec.workload_name not in ['sysbench', 'dhrystone']:
- raise ConfigError('Only sysbench and dhrystone workloads currently supported for energy_model generation.')
- for freq in cluster_frequencies:
- for num_cpus in xrange(1, self.number_of_cpus[cluster] + 1):
- spec = old_spec.copy()
- spec.runtime_parameters['{}_frequency'.format(core)] = freq
- if not self.no_hotplug:
- spec.runtime_parameters['{}_cores'.format(core)] = num_cpus
- spec.runtime_parameters['ui'] = 'off'
- spec.id = '{}_{}_{}'.format(cluster, num_cpus, freq)
- spec.label = 'freq_{}_{}'.format(cluster, spec.label)
- spec.workload_parameters['taskset_mask'] = list_to_mask(self.get_cpus(cluster))
- spec.workload_parameters['threads'] = num_cpus
- if old_spec.workload_name == 'sysbench':
- # max_requests set to an arbitrary high values to make sure
- # sysbench runs for full duriation even on highly
- # performant cores.
- spec.workload_parameters['max_requests'] = 10000000
- spec.cluster = cluster
- spec.num_cpus = num_cpus
- spec.frequency = freq
- spec.load(self.device, context.config.ext_loader)
- spec.workload.init_resources(context)
- spec.workload.validate()
- new_specs.append(spec)
- return new_specs
-
- def disable_thermal_management(self):
- if self.device.file_exists('/sys/class/thermal/thermal_zone0'):
- tzone_paths = self.device.execute('ls /sys/class/thermal/thermal_zone*')
- for tzpath in tzone_paths.strip().split():
- mode_file = '{}/mode'.format(tzpath)
- if self.device.file_exists(mode_file):
- self.device.write_value(mode_file, 'disabled')
-
- def get_device_idle_states(self, cluster):
- if cluster == 'big':
- online_cpus = self.device.get_online_cpus(self.big_core)
- else:
- online_cpus = self.device.get_online_cpus(self.little_core)
- idle_states = []
- for cpu in online_cpus:
- idle_states.extend(self.device.get_cpuidle_states(cpu))
- return idle_states
-
- def get_core_name(self, cluster):
- if cluster == 'big':
- return self.big_core
- else:
- return self.little_core
-
- def get_cpus(self, cluster):
- if cluster == 'big':
- return self.big_cpus
- else:
- return self.little_cpus
-
- def get_frequencies_param(self, cluster):
- if cluster == 'big':
- return self.big_frequencies
- else:
- return self.little_frequencies
-
-
-def _adjust_for_thermal(data, filt_method=lambda x: x, thresh=0.9, window=5000, tdiff_threshold=10000):
- n = filt_method(data)
- n = n[~np.isnan(n)] # pylint: disable=no-member
-
- d = np.diff(n) # pylint: disable=no-member
- d = d[~np.isnan(d)] # pylint: disable=no-member
- dmin = min(d)
- dmax = max(d)
-
- index_up = np.max((d > dmax * thresh).nonzero()) # pylint: disable=no-member
- index_down = np.min((d < dmin * thresh).nonzero()) # pylint: disable=no-member
- low_average = np.average(n[index_up:index_up + window]) # pylint: disable=no-member
- high_average = np.average(n[index_down - window:index_down]) # pylint: disable=no-member
- if low_average > high_average or index_down - index_up < tdiff_threshold:
- return 0
- else:
- return low_average
-
-
-if __name__ == '__main__':
- import sys # pylint: disable=wrong-import-position,wrong-import-order
- indir, outdir = sys.argv[1], sys.argv[2]
- device_name = 'odroidxu3'
- big_core = 'a15'
- little_core = 'a7'
- first_cluster_idle_state = -1
-
- this_dir = os.path.dirname(__file__)
- report_template_file = os.path.join(this_dir, REPORT_TEMPLATE_FILE)
- em_template_file = os.path.join(this_dir, EM_TEMPLATE_FILE)
-
- freq_power_table = pd.read_csv(os.path.join(indir, FREQ_TABLE_FILE))
- measured_cpus_table, cpus_table = pd.read_csv(os.path.join(indir, CPUS_TABLE_FILE), # pylint: disable=unbalanced-tuple-unpacking
- header=range(2), index_col=0)
- idle_power_table = pd.read_csv(os.path.join(indir, IDLE_TABLE_FILE))
-
- if not os.path.exists(outdir):
- os.makedirs(outdir)
- report_file = os.path.join(outdir, 'report.html')
- em_file = os.path.join(outdir, '{}_em.c'.format(device_name))
-
- em = build_energy_model(freq_power_table, cpus_table,
- idle_power_table, first_cluster_idle_state)
- em_text = generate_em_c_file(em, big_core, little_core,
- em_template_file, em_file)
- generate_report(freq_power_table, measured_cpus_table, cpus_table,
- idle_power_table, report_template_file, device_name,
- em_text, report_file)
diff --git a/wlauto/instrumentation/energy_model/em.template b/wlauto/instrumentation/energy_model/em.template
deleted file mode 100644
index 6a02ece0..00000000
--- a/wlauto/instrumentation/energy_model/em.template
+++ /dev/null
@@ -1,51 +0,0 @@
-static struct idle_state idle_states_cluster_{{ little_core|lower }}[] = {
- {% for entry in em.little_cluster_idle_states -%}
- { .power = {{ entry.power }}, },
- {% endfor %}
- };
-
-static struct idle_state idle_states_cluster_{{ big_core|lower }}[] = {
- {% for entry in em.big_cluster_idle_states -%}
- { .power = {{ entry.power }}, },
- {% endfor %}
- };
-
-static struct capacity_state cap_states_cluster_{{ little_core|lower }}[] = {
- /* Power per cluster */
- {% for entry in em.little_cluster_cap_states -%}
- { .cap = {{ entry.cap }}, .power = {{ entry.power }}, },
- {% endfor %}
- };
-
-static struct capacity_state cap_states_cluster_{{ big_core|lower }}[] = {
- /* Power per cluster */
- {% for entry in em.big_cluster_cap_states -%}
- { .cap = {{ entry.cap }}, .power = {{ entry.power }}, },
- {% endfor %}
- };
-
-static struct idle_state idle_states_core_{{ little_core|lower }}[] = {
- {% for entry in em.little_core_idle_states -%}
- { .power = {{ entry.power }}, },
- {% endfor %}
- };
-
-static struct idle_state idle_states_core_{{ big_core|lower }}[] = {
- {% for entry in em.big_core_idle_states -%}
- { .power = {{ entry.power }}, },
- {% endfor %}
- };
-
-static struct capacity_state cap_states_core_{{ little_core|lower }}[] = {
- /* Power per cpu */
- {% for entry in em.little_core_cap_states -%}
- { .cap = {{ entry.cap }}, .power = {{ entry.power }}, },
- {% endfor %}
- }
-
-static struct capacity_state cap_states_core_{{ big_core|lower }}[] = {
- /* Power per cpu */
- {% for entry in em.big_core_cap_states -%}
- { .cap = {{ entry.cap }}, .power = {{ entry.power }}, },
- {% endfor %}
- };
diff --git a/wlauto/instrumentation/energy_model/report.template b/wlauto/instrumentation/energy_model/report.template
deleted file mode 100644
index 9170ee90..00000000
--- a/wlauto/instrumentation/energy_model/report.template
+++ /dev/null
@@ -1,123 +0,0 @@
-<html>
-<body>
- <style>
- .toggle-box {
- display: none;
- }
-
- .toggle-box + label {
- cursor: pointer;
- display: block;
- font-weight: bold;
- line-height: 21px;
- margin-bottom: 5px;
- }
-
- .toggle-box + label + div {
- display: none;
- margin-bottom: 10px;
- }
-
- .toggle-box:checked + label + div {
- display: block;
- }
-
- .toggle-box + label:before {
- background-color: #4F5150;
- -webkit-border-radius: 10px;
- -moz-border-radius: 10px;
- border-radius: 10px;
- color: #FFFFFF;
- content: "+";
- display: block;
- float: left;
- font-weight: bold;
- height: 20px;
- line-height: 20px;
- margin-right: 5px;
- text-align: center;
- width: 20px;
- }
-
- .toggle-box:checked + label:before {
- content: "\2212";
- }
-
- .document {
- width: 800px;
- margin-left:auto;
- margin-right:auto;
- }
-
- img {
- margin-left:auto;
- margin-right:auto;
- }
-
- h1.title {
- text-align: center;
- }
- </style>
-
- <div class="document">
- <h1 class="title">{{ device_name }} Energy Model Report</h1>
-
- <h2>Power/Performance Analysis</h2>
- <div>
- <h3>Summary</h3>
- At {{ cap_power_analysis.summary['frequency']|round(2) }} Hz<br />
- big is {{ cap_power_analysis.summary['performance_ratio']|round(2) }} times faster<br />
- big consumes {{ cap_power_analysis.summary['power_ratio']|round(2) }} times more power<br />
- <br />
- max performance: {{ cap_power_analysis.summary['max_performance']|round(2) }}<br />
- max power: {{ cap_power_analysis.summary['max_power']|round(2) }}<br />
- </div>
-
- <div>
- <h3>Single Core Power/Perfromance Plot</h3>
- These are the traditional power-performance curves for the single-core runs.
- <img align="middle" width="600px" src="data:image/png;base64,{{ cap_power_plot }}" />
- </div>
-
- <div>
- <input class="toggle-box" id="freq_table" type="checkbox" >
- <label for="freq_table">Expand view all power/performance data</label>
- <div>
- {{ freq_power_table }}
- </div>
- </div>
-
- <div>
- <h3>CPUs Power Plot</h3>
- Each line correspond to the cluster running at a different OPP. Each
- point corresponds to the average power with a certain number of CPUs
- executing. To get the contribution of the cluster we have to extend the
- lines on the left (what it would be the average power of just the cluster).
- <img align="middle" width="600px" src="data:image/png;base64,{{ cpus_plot }}" />
- </div>
-
- <div>
- <input class="toggle-box" id="cpus_table" type="checkbox" >
- <label for="cpus_table">Expand view CPUS power data</label>
- <div>
- {{ cpus_table }}
- </div>
- </div>
- <div>
- <h3>Idle Power</h3>
- <img align="middle" width="600px" src="data:image/png;base64,{{ idle_power_plot }}" />
- </div>
-
- <div>
- <input class="toggle-box" id="idle_power_table" type="checkbox" >
- <label for="idle_power_table">Expand view idle power data</label>
- <div>
- {{ idle_power_table }}
- </div>
- </div>
- </div>
-</body>
-</html>
-
-<!-- vim: ft=htmljinja
--->
diff --git a/wlauto/instrumentation/energy_probe/__init__.py b/wlauto/instrumentation/energy_probe/__init__.py
deleted file mode 100644
index 14346cf6..00000000
--- a/wlauto/instrumentation/energy_probe/__init__.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0613,E1101,access-member-before-definition,attribute-defined-outside-init
-import os
-import subprocess
-import signal
-import struct
-import csv
-try:
- import pandas
-except ImportError:
- pandas = None
-
-from wlauto import Instrument, Parameter, Executable
-from wlauto.exceptions import InstrumentError, ConfigError
-from wlauto.utils.types import list_of_numbers
-
-
-class EnergyProbe(Instrument):
-
- name = 'energy_probe'
- description = """Collects power traces using the ARM energy probe.
-
- This instrument requires ``caiman`` utility to be installed in the workload automation
- host and be in the PATH. Caiman is part of DS-5 and should be in ``/path/to/DS-5/bin/`` .
- Energy probe can simultaneously collect energy from up to 3 power rails.
-
- To connect the energy probe on a rail, connect the white wire to the pin that is closer to the
- Voltage source and the black wire to the pin that is closer to the load (the SoC or the device
- you are probing). Between the pins there should be a shunt resistor of known resistance in the
- range of 5 to 20 mOhm. The resistance of the shunt resistors is a mandatory parameter
- ``resistor_values``.
-
- .. note:: This instrument can process results a lot faster if python pandas is installed.
- """
-
- parameters = [
- Parameter('resistor_values', kind=list_of_numbers, default=[],
- description="""The value of shunt resistors. This is a mandatory parameter."""),
- Parameter('labels', kind=list, default=[],
- description="""Meaningful labels for each of the monitored rails."""),
- Parameter('device_entry', kind=str, default='/dev/ttyACM0',
- description="""Path to /dev entry for the energy probe (it should be /dev/ttyACMx)"""),
- ]
-
- MAX_CHANNELS = 3
-
- def __init__(self, device, **kwargs):
- super(EnergyProbe, self).__init__(device, **kwargs)
- self.attributes_per_sample = 3
- self.bytes_per_sample = self.attributes_per_sample * 4
- self.attributes = ['power', 'voltage', 'current']
- for i, val in enumerate(self.resistor_values):
- self.resistor_values[i] = int(1000 * float(val))
-
- def validate(self):
- if subprocess.call('which caiman', stdout=subprocess.PIPE, shell=True):
- raise InstrumentError('caiman not in PATH. Cannot enable energy probe')
- if not self.resistor_values:
- raise ConfigError('At least one resistor value must be specified')
- if len(self.resistor_values) > self.MAX_CHANNELS:
- raise ConfigError('{} Channels where specified when Energy Probe supports up to {}'
- .format(len(self.resistor_values), self.MAX_CHANNELS))
- if pandas is None:
- self.logger.warning("pandas package will significantly speed up this instrument")
- self.logger.warning("to install it try: pip install pandas")
-
- def setup(self, context):
- if not self.labels:
- self.labels = ["PORT_{}".format(channel) for channel, _ in enumerate(self.resistor_values)]
- self.output_directory = os.path.join(context.output_directory, 'energy_probe')
- rstring = ""
- for i, rval in enumerate(self.resistor_values):
- rstring += '-r {}:{} '.format(i, rval)
- self.command = 'caiman -d {} -l {} {}'.format(self.device_entry, rstring, self.output_directory)
- os.makedirs(self.output_directory)
-
- def start(self, context):
- self.logger.debug(self.command)
- self.caiman = subprocess.Popen(self.command,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- stdin=subprocess.PIPE,
- preexec_fn=os.setpgrp,
- shell=True)
-
- def stop(self, context):
- os.killpg(self.caiman.pid, signal.SIGTERM)
-
- def update_result(self, context): # pylint: disable=too-many-locals
- num_of_channels = len(self.resistor_values)
- processed_data = [[] for _ in xrange(num_of_channels)]
- filenames = [os.path.join(self.output_directory, '{}.csv'.format(label)) for label in self.labels]
- struct_format = '{}I'.format(num_of_channels * self.attributes_per_sample)
- not_a_full_row_seen = False
- with open(os.path.join(self.output_directory, "0000000000"), "rb") as bfile:
- while True:
- data = bfile.read(num_of_channels * self.bytes_per_sample)
- if data == '':
- break
- try:
- unpacked_data = struct.unpack(struct_format, data)
- except struct.error:
- if not_a_full_row_seen:
- self.logger.warn('possibly missaligned caiman raw data, row contained {} bytes'.format(len(data)))
- continue
- else:
- not_a_full_row_seen = True
- for i in xrange(num_of_channels):
- index = i * self.attributes_per_sample
- processed_data[i].append({attr: val for attr, val in
- zip(self.attributes, unpacked_data[index:index + self.attributes_per_sample])})
- for i, path in enumerate(filenames):
- with open(path, 'w') as f:
- if pandas is not None:
- self._pandas_produce_csv(processed_data[i], f)
- else:
- self._slow_produce_csv(processed_data[i], f)
-
- # pylint: disable=R0201
- def _pandas_produce_csv(self, data, f):
- dframe = pandas.DataFrame(data)
- dframe = dframe / 1000.0
- dframe.to_csv(f)
-
- def _slow_produce_csv(self, data, f):
- new_data = []
- for entry in data:
- new_data.append({key: val / 1000.0 for key, val in entry.items()})
- writer = csv.DictWriter(f, self.attributes)
- writer.writeheader()
- writer.writerows(new_data)
-
diff --git a/wlauto/instrumentation/fps/__init__.py b/wlauto/instrumentation/fps/__init__.py
deleted file mode 100644
index 4acd4574..00000000
--- a/wlauto/instrumentation/fps/__init__.py
+++ /dev/null
@@ -1,315 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0613,E1101
-from __future__ import division
-import os
-import sys
-import time
-import csv
-import shutil
-import threading
-import errno
-import tempfile
-
-from distutils.version import LooseVersion
-
-try:
- import pandas as pd
-except ImportError:
- pd = None
-
-from wlauto import Instrument, Parameter, IterationResult
-from wlauto.instrumentation import instrument_is_installed
-from wlauto.exceptions import (InstrumentError, WorkerThreadError, ConfigError,
- DeviceNotRespondingError, TimeoutError)
-from wlauto.utils.types import boolean, numeric
-
-
-VSYNC_INTERVAL = 16666667
-PAUSE_LATENCY = 20
-EPSYLON = 0.0001
-
-
-class FpsInstrument(Instrument):
-
- name = 'fps'
- description = """
- Measures Frames Per Second (FPS) and associated metrics for a workload's main View.
-
- .. note:: This instrument depends on pandas Python library (which is not part of standard
- WA dependencies), so you will need to install that first, before you can use it.
-
- The view is specified by the workload as ``view`` attribute. This defaults
- to ``'SurfaceView'`` for game workloads, and ``None`` for non-game
- workloads (as for them FPS mesurement usually doesn't make sense).
- Individual workloads may override this.
-
- This instrument adds four metrics to the results:
-
- :FPS: Frames Per Second. This is the frame rate of the workload.
- :frames: The total number of frames rendered during the execution of
- the workload.
- :janks: The number of "janks" that occured during execution of the
- workload. Janks are sudden shifts in frame rate. They result
- in a "stuttery" UI. See http://jankfree.org/jank-busters-io
- :not_at_vsync: The number of frames that did not render in a single
- vsync cycle.
-
- """
- supported_platforms = ['android']
-
- parameters = [
- Parameter('drop_threshold', kind=numeric, default=5,
- description='Data points below this FPS will be dropped as they '
- 'do not constitute "real" gameplay. The assumption '
- 'being that while actually running, the FPS in the '
- 'game will not drop below X frames per second, '
- 'except on loading screens, menus, etc, which '
- 'should not contribute to FPS calculation. '),
- Parameter('keep_raw', kind=boolean, default=False,
- description='If set to ``True``, this will keep the raw dumpsys output '
- 'in the results directory (this is maily used for debugging) '
- 'Note: frames.csv with collected frames data will always be '
- 'generated regardless of this setting.'),
- Parameter('generate_csv', kind=boolean, default=True,
- description='If set to ``True``, this will produce temporal fps data '
- 'in the results directory, in a file named fps.csv '
- 'Note: fps data will appear as discrete step-like values '
- 'in order to produce a more meainingfull representation,'
- 'a rolling mean can be applied.'),
- Parameter('crash_check', kind=boolean, default=True,
- description="""
- Specifies wither the instrument should check for crashed content by examining
- frame data. If this is set, ``execution_time`` instrument must also be installed.
- The check is performed by using the measured FPS and exection time to estimate the expected
- frames cound and comparing that against the measured frames count. The the ratio of
- measured/expected is too low, then it is assumed that the content has crashed part way
- during the run. What is "too low" is determined by ``crash_threshold``.
-
- .. note:: This is not 100\% fool-proof. If the crash occurs sufficiently close to
- workload's termination, it may not be detected. If this is expected, the
- threshold may be adjusted up to compensate.
- """),
- Parameter('crash_threshold', kind=float, default=0.7,
- description="""
- Specifies the threshold used to decided whether a measured/expected frames ration indicates
- a content crash. E.g. a value of ``0.75`` means the number of actual frames counted is a
- quarter lower than expected, it will treated as a content crash.
- """),
- ]
-
- clear_command = 'dumpsys SurfaceFlinger --latency-clear '
-
- def __init__(self, device, **kwargs):
- super(FpsInstrument, self).__init__(device, **kwargs)
- self.collector = None
- self.outfile = None
- self.fps_outfile = None
- self.is_enabled = True
-
- def validate(self):
- if not pd or LooseVersion(pd.__version__) < LooseVersion('0.13.1'):
- message = ('fps instrument requires pandas Python package (version 0.13.1 or higher) to be installed.\n'
- 'You can install it with pip, e.g. "sudo pip install pandas"')
- raise InstrumentError(message)
- if self.crash_check and not instrument_is_installed('execution_time'):
- raise ConfigError('execution_time instrument must be installed in order to check for content crash.')
-
- def setup(self, context):
- workload = context.workload
- if hasattr(workload, 'view'):
- self.fps_outfile = os.path.join(context.output_directory, 'fps.csv')
- self.outfile = os.path.join(context.output_directory, 'frames.csv')
- self.collector = LatencyCollector(self.outfile, self.device, workload.view or '', self.keep_raw, self.logger)
- self.device.execute(self.clear_command)
- else:
- self.logger.debug('Workload does not contain a view; disabling...')
- self.is_enabled = False
-
- def start(self, context):
- if self.is_enabled:
- self.logger.debug('Starting SurfaceFlinger collection...')
- self.collector.start()
-
- def stop(self, context):
- if self.is_enabled and self.collector.is_alive():
- self.logger.debug('Stopping SurfaceFlinger collection...')
- self.collector.stop()
-
- def update_result(self, context):
- if self.is_enabled:
- data = pd.read_csv(self.outfile)
- if not data.empty: # pylint: disable=maybe-no-member
- per_frame_fps = self._update_stats(context, data)
- if self.generate_csv:
- per_frame_fps.to_csv(self.fps_outfile, index=False, header=True)
- context.add_artifact('fps', path='fps.csv', kind='data')
- else:
- context.result.add_metric('FPS', float('nan'))
- context.result.add_metric('frame_count', 0)
- context.result.add_metric('janks', 0)
- context.result.add_metric('not_at_vsync', 0)
-
- def slow_update_result(self, context):
- result = context.result
- if result.has_metric('execution_time'):
- self.logger.debug('Checking for crashed content.')
- exec_time = result['execution_time'].value
- fps = result['FPS'].value
- frames = result['frame_count'].value
- if all([exec_time, fps, frames]):
- expected_frames = fps * exec_time
- ratio = frames / expected_frames
- self.logger.debug('actual/expected frames: {:.2}'.format(ratio))
- if ratio < self.crash_threshold:
- self.logger.error('Content for {} appears to have crashed.'.format(context.spec.label))
- result.status = IterationResult.FAILED
- result.add_event('Content crash detected (actual/expected frames: {:.2}).'.format(ratio))
-
- def _update_stats(self, context, data): # pylint: disable=too-many-locals
- vsync_interval = self.collector.refresh_period
- # fiter out bogus frames.
- actual_present_times = data.actual_present_time[data.actual_present_time != 0x7fffffffffffffff]
- actual_present_time_deltas = (actual_present_times - actual_present_times.shift()).drop(0) # pylint: disable=E1103
- vsyncs_to_compose = (actual_present_time_deltas / vsync_interval).apply(lambda x: int(round(x, 0)))
- # drop values lower than drop_threshold FPS as real in-game frame
- # rate is unlikely to drop below that (except on loading screens
- # etc, which should not be factored in frame rate calculation).
- per_frame_fps = (1.0 / (vsyncs_to_compose * (vsync_interval / 1e9)))
- keep_filter = per_frame_fps > self.drop_threshold
- filtered_vsyncs_to_compose = vsyncs_to_compose[keep_filter]
- if not filtered_vsyncs_to_compose.empty:
- total_vsyncs = filtered_vsyncs_to_compose.sum()
- if total_vsyncs:
- frame_count = filtered_vsyncs_to_compose.size
- fps = 1e9 * frame_count / (vsync_interval * total_vsyncs)
- context.result.add_metric('FPS', fps)
- context.result.add_metric('frame_count', frame_count)
- else:
- context.result.add_metric('FPS', float('nan'))
- context.result.add_metric('frame_count', 0)
-
- vtc_deltas = filtered_vsyncs_to_compose - filtered_vsyncs_to_compose.shift()
- vtc_deltas.index = range(0, vtc_deltas.size)
- vtc_deltas = vtc_deltas.drop(0).abs()
- janks = vtc_deltas.apply(lambda x: (PAUSE_LATENCY > x > 1.5) and 1 or 0).sum()
- not_at_vsync = vsyncs_to_compose.apply(lambda x: (abs(x - 1.0) > EPSYLON) and 1 or 0).sum()
- context.result.add_metric('janks', janks)
- context.result.add_metric('not_at_vsync', not_at_vsync)
- else: # no filtered_vsyncs_to_compose
- context.result.add_metric('FPS', float('nan'))
- context.result.add_metric('frame_count', 0)
- context.result.add_metric('janks', 0)
- context.result.add_metric('not_at_vsync', 0)
- per_frame_fps.name = 'fps'
- return per_frame_fps
-
-
-class LatencyCollector(threading.Thread):
-
- # Note: the size of the frames buffer for a particular surface is defined
- # by NUM_FRAME_RECORDS inside android/services/surfaceflinger/FrameTracker.h.
- # At the time of writing, this was hard-coded to 128. So at 60 fps
- # (and there is no reason to go above that, as it matches vsync rate
- # on pretty much all phones), there is just over 2 seconds' worth of
- # frames in there. Hence the sleep time of 2 seconds between dumps.
- #command_template = 'while (true); do dumpsys SurfaceFlinger --latency {}; sleep 2; done'
- command_template = 'dumpsys SurfaceFlinger --latency {}'
-
- def __init__(self, outfile, device, activity, keep_raw, logger):
- super(LatencyCollector, self).__init__()
- self.outfile = outfile
- self.device = device
- self.command = self.command_template.format(activity)
- self.keep_raw = keep_raw
- self.logger = logger
- self.stop_signal = threading.Event()
- self.frames = []
- self.last_ready_time = 0
- self.refresh_period = VSYNC_INTERVAL
- self.drop_threshold = self.refresh_period * 1000
- self.exc = None
- self.unresponsive_count = 0
-
- def run(self):
- try:
- self.logger.debug('SurfaceFlinger collection started.')
- self.stop_signal.clear()
- fd, temp_file = tempfile.mkstemp()
- self.logger.debug('temp file: {}'.format(temp_file))
- wfh = os.fdopen(fd, 'wb')
- try:
- while not self.stop_signal.is_set():
- wfh.write(self.device.execute(self.command))
- time.sleep(2)
- finally:
- wfh.close()
- # TODO: this can happen after the run during results processing
- with open(temp_file) as fh:
- text = fh.read().replace('\r\n', '\n').replace('\r', '\n')
- for line in text.split('\n'):
- line = line.strip()
- if line:
- self._process_trace_line(line)
- if self.keep_raw:
- raw_file = os.path.join(os.path.dirname(self.outfile), 'surfaceflinger.raw')
- shutil.copy(temp_file, raw_file)
- os.unlink(temp_file)
- except (DeviceNotRespondingError, TimeoutError): # pylint: disable=W0703
- raise
- except Exception, e: # pylint: disable=W0703
- self.logger.warning('Exception on collector thread: {}({})'.format(e.__class__.__name__, e))
- self.exc = WorkerThreadError(self.name, sys.exc_info())
- self.logger.debug('SurfaceFlinger collection stopped.')
-
- with open(self.outfile, 'w') as wfh:
- writer = csv.writer(wfh)
- writer.writerow(['desired_present_time', 'actual_present_time', 'frame_ready_time'])
- writer.writerows(self.frames)
- self.logger.debug('Frames data written.')
-
- def stop(self):
- self.stop_signal.set()
- self.join()
- if self.unresponsive_count:
- message = 'SurfaceFlinger was unrepsonsive {} times.'.format(self.unresponsive_count)
- if self.unresponsive_count > 10:
- self.logger.warning(message)
- else:
- self.logger.debug(message)
- if self.exc:
- raise self.exc # pylint: disable=E0702
- self.logger.debug('FSP collection complete.')
-
- def _process_trace_line(self, line):
- parts = line.split()
- if len(parts) == 3:
- desired_present_time, actual_present_time, frame_ready_time = map(int, parts)
- if frame_ready_time <= self.last_ready_time:
- return # duplicate frame
- if (frame_ready_time - desired_present_time) > self.drop_threshold:
- self.logger.debug('Dropping bogus frame {}.'.format(line))
- return # bogus data
- self.last_ready_time = frame_ready_time
- self.frames.append((desired_present_time, actual_present_time, frame_ready_time))
- elif len(parts) == 1:
- self.refresh_period = int(parts[0])
- self.drop_threshold = self.refresh_period * 10
- elif 'SurfaceFlinger appears to be unresponsive, dumping anyways' in line:
- self.unresponsive_count += 1
- else:
- self.logger.warning('Unexpected SurfaceFlinger dump output: {}'.format(line))
diff --git a/wlauto/instrumentation/freqsweep/__init__.py b/wlauto/instrumentation/freqsweep/__init__.py
deleted file mode 100755
index c048c6ec..00000000
--- a/wlauto/instrumentation/freqsweep/__init__.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# pylint: disable=access-member-before-definition,attribute-defined-outside-init
-
-import os
-from collections import OrderedDict
-from wlauto import Instrument, Parameter
-from wlauto.exceptions import ConfigError, InstrumentError
-from wlauto.utils.types import caseless_string
-
-
-class FreqSweep(Instrument):
- name = 'freq_sweep'
- description = """
- Sweeps workloads through all available frequencies on a device.
-
- When enabled this instrument will take all workloads specified in an agenda
- and run them at all available frequencies for all clusters.
-
- Recommendations:
- - Setting the runner to 'by_spec' increases the chance of successfully
- completing an agenda without encountering hotplug issues
- - If possible disable dynamic hotplug on the target device
- """
-
- parameters = [
- Parameter('sweeps', kind=list,
- description="""
- By default this instrument will sweep across all available
- frequencies for all available clusters. If you wish to only
- sweep across certain frequencies on particular clusters you
- can do so by specifying this parameter.
-
- Sweeps should be a lists of dictionaries that can contain:
- - Cluster (mandatory): The name of the cluster this sweep will be
- performed on. E.g A7
- - Frequencies: A list of frequencies (in KHz) to use. If this is
- not provided all frequencies available for this
- cluster will be used.
- E.g: [800000, 900000, 100000]
- - label: Workload specs will be named '{spec id}_{label}_{frequency}'.
- If a label is not provided it will be named 'sweep{sweep No.}'
-
- Example sweep specification:
-
- freq_sweep:
- sweeps:
- - cluster: A53
- label: littles
- frequencies: [800000, 900000, 100000]
- - cluster: A57
- label: bigs
- """),
- ]
-
- def validate(self):
- if not self.device.core_names:
- raise ConfigError('The Device does not appear to have core_names configured.')
-
- def initialize(self, context): # pylint: disable=r0912
- if not self.device.is_rooted:
- raise InstrumentError('The device must be rooted to sweep frequencies')
-
- if 'userspace' not in self.device.list_available_cluster_governors(0):
- raise InstrumentError("'userspace' cpufreq governor must be enabled")
-
- # Create sweeps for each core type using num_cpus cores
- if not self.sweeps:
- self.sweeps = []
- for core in set(self.device.core_names):
- sweep_spec = {}
- sweep_spec['cluster'] = core
- sweep_spec['label'] = core
- self.sweeps.append(sweep_spec)
-
- new_specs = []
- old_specs = []
- for job in context.runner.job_queue:
- if job.spec not in old_specs:
- old_specs.append(job.spec)
-
- # Validate sweeps, add missing sections and create workload specs
- for i, sweep_spec in enumerate(self.sweeps):
- if 'cluster' not in sweep_spec:
- raise ConfigError('cluster must be define for all sweeps')
- # Check if cluster exists on device
- if caseless_string(sweep_spec['cluster']) not in self.device.core_names:
- raise ConfigError('Only {} cores are present on this device, you specified {}'
- .format(", ".join(set(self.device.core_names)), sweep_spec['cluster']))
-
- # Default to all available frequencies
- if 'frequencies' not in sweep_spec:
- self.device.enable_cpu(self.device.core_names.index(sweep_spec['cluster']))
- sweep_spec['frequencies'] = self.device.list_available_core_frequencies(sweep_spec['cluster'])
-
- # Check that given frequencies are valid of the core cluster
- else:
- self.device.enable_cpu(self.device.core_names.index(sweep_spec['cluster']))
- available_freqs = self.device.list_available_core_frequencies(sweep_spec['cluster'])
- for freq in sweep_spec['frequencies']:
- if freq not in available_freqs:
- raise ConfigError('Frequency {} is not supported by {} cores'.format(freq, sweep_spec['cluster']))
-
- # Add default labels
- if 'label' not in sweep_spec:
- sweep_spec['label'] = "sweep{}".format(i + 1)
-
- new_specs.extend(self.get_sweep_workload_specs(old_specs, sweep_spec, context))
-
- # Update config to refect jobs that will actually run.
- context.config.workload_specs = new_specs
- config_file = os.path.join(context.host_working_directory, 'run_config.json')
- with open(config_file, 'wb') as wfh:
- context.config.serialize(wfh)
- context.runner.init_queue(new_specs)
-
- def get_sweep_workload_specs(self, old_specs, sweep_spec, context):
- new_specs = []
- for old_spec in old_specs:
- for freq in sweep_spec['frequencies']:
- spec = old_spec.copy()
- if 'runtime_params' in sweep_spec:
- spec.runtime_parameters = spec.runtime_parameters.copy()
- spec.runtime_parameters.update(sweep_spec['runtime_params'])
-
- if 'workload_params' in sweep_spec:
- spec.workload_parameters = spec.workload_parameters.copy()
- spec.workload_parameters.update(sweep_spec['workload_params'])
-
- spec.runtime_parameters['{}_governor'.format(sweep_spec['cluster'])] = "userspace"
- spec.runtime_parameters['{}_frequency'.format(sweep_spec['cluster'])] = freq
- spec.id = '{}_{}_{}'.format(spec.id, sweep_spec['label'], freq)
- spec.classifiers['core'] = sweep_spec['cluster']
- spec.classifiers['freq'] = freq
- spec.load(self.device, context.config.ext_loader)
- spec.workload.init_resources(context)
- spec.workload.validate()
- new_specs.append(spec)
- return new_specs
diff --git a/wlauto/instrumentation/hwmon/__init__.py b/wlauto/instrumentation/hwmon/__init__.py
deleted file mode 100644
index d1bde4e5..00000000
--- a/wlauto/instrumentation/hwmon/__init__.py
+++ /dev/null
@@ -1,125 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0613,E1101
-from __future__ import division
-from collections import OrderedDict
-
-from wlauto import Parameter, Instrument
-from wlauto.exceptions import InstrumentError, ConfigError
-from wlauto.utils.hwmon import discover_sensors
-from wlauto.utils.types import list_of_strs
-
-
-# sensor_kind: (report_type, units, conversion)
-HWMON_SENSORS = {
- 'energy': ('diff', 'Joules', lambda x: x / 10 ** 6),
- 'temp': ('before/after', 'Celsius', lambda x: x / 10 ** 3),
-}
-
-HWMON_SENSOR_PRIORITIES = ['energy', 'temp']
-
-
-class HwmonInstrument(Instrument):
-
- name = 'hwmon'
- description = """
- Hardware Monitor (hwmon) is a generic Linux kernel subsystem,
- providing access to hardware monitoring components like temperature or
- voltage/current sensors.
-
- The following web page has more information:
-
- http://blogs.arm.com/software-enablement/925-linux-hwmon-power-management-and-arm-ds-5-streamline/
-
- You can specify which sensors HwmonInstrument looks for by specifying
- hwmon_sensors in your config.py, e.g. ::
-
- hwmon_sensors = ['energy', 'temp']
-
- If this setting is not specified, it will look for all sensors it knows about.
- Current valid values are::
-
- :energy: Collect energy measurements and report energy consumed
- during run execution (the diff of before and after readings)
- in Joules.
- :temp: Collect temperature measurements and report the before and
- after readings in degrees Celsius.
-
- """
-
- parameters = [
- Parameter('sensors', kind=list_of_strs, default=['energy', 'temp'],
- global_alias='hwmon_sensors',
- description='The kinds of sensors hwmon instrument will look for')
- ]
-
- def __init__(self, device, **kwargs):
- super(HwmonInstrument, self).__init__(device, **kwargs)
-
- if self.sensors:
- self.sensor_kinds = {}
- for kind in self.sensors:
- if kind in HWMON_SENSORS:
- self.sensor_kinds[kind] = HWMON_SENSORS[kind]
- else:
- message = 'Unexpected sensor type: {}; must be in {}'.format(kind, HWMON_SENSORS.keys())
- raise ConfigError(message)
- else:
- self.sensor_kinds = HWMON_SENSORS
-
- self.sensors = []
-
- def initialize(self, context):
- self.sensors = []
- self.logger.debug('Searching for HWMON sensors.')
- discovered_sensors = discover_sensors(self.device, self.sensor_kinds.keys())
- for sensor in sorted(discovered_sensors, key=lambda s: HWMON_SENSOR_PRIORITIES.index(s.kind)):
- self.logger.debug('Adding {}'.format(sensor.filepath))
- self.sensors.append(sensor)
-
- def setup(self, context):
- for sensor in self.sensors:
- sensor.clear_readings()
-
- def fast_start(self, context):
- for sensor in reversed(self.sensors):
- sensor.take_reading()
-
- def fast_stop(self, context):
- for sensor in self.sensors:
- sensor.take_reading()
-
- def update_result(self, context):
- for sensor in self.sensors:
- try:
- report_type, units, conversion = HWMON_SENSORS[sensor.kind]
- if report_type == 'diff':
- before, after = sensor.readings
- diff = conversion(after - before)
- context.result.add_metric(sensor.label, diff, units)
- elif report_type == 'before/after':
- before, after = sensor.readings
- mean = conversion((after + before) / 2)
- context.result.add_metric(sensor.label, mean, units)
- context.result.add_metric(sensor.label + ' before', conversion(before), units)
- context.result.add_metric(sensor.label + ' after', conversion(after), units)
- else:
- raise InstrumentError('Unexpected report_type: {}'.format(report_type))
- except ValueError, e:
- self.logger.error('Could not collect all {} readings for {}'.format(sensor.kind, sensor.label))
- self.logger.error('Got: {}'.format(e))
-
diff --git a/wlauto/instrumentation/juno_energy/__init__.py b/wlauto/instrumentation/juno_energy/__init__.py
deleted file mode 100644
index db71cdea..00000000
--- a/wlauto/instrumentation/juno_energy/__init__.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0613,W0201
-import os
-import csv
-import time
-import threading
-import logging
-from operator import itemgetter
-
-from wlauto import Instrument, File, Parameter
-from wlauto.exceptions import InstrumentError
-
-UNIT_MAP = {
- 'curr': 'Amps',
- 'volt': 'Volts',
- 'cenr': 'Joules',
- 'pow': 'Watts',
-}
-
-JUNO_MAX_INT = 0x7fffffffffffffff
-
-
-class JunoEnergy(Instrument):
-
- name = 'juno_energy'
- description = """
- Collects internal energy meter measurements from Juno development board.
-
- This instrument was created because (at the time of creation) Juno's energy
- meter measurements aren't exposed through HWMON or similar standardized mechanism,
- necessitating a dedicated instrument to access them.
-
- This instrument, and the ``readenergy`` executable it relies on are very much tied
- to the Juno platform and are not expected to work on other boards.
-
- """
-
- parameters = [
- Parameter('period', kind=float, default=0.1,
- description="""
- Specifies the time, in Seconds, between polling energy counters.
- """),
- Parameter('strict', kind=bool, default=True,
- description="""
- Setting this to ``False`` will omit the check that the ``device`` is
- ``"juno"``. This is useful if the underlying board is actually Juno
- but WA connects via a different interface (e.g. ``generic_linux``).
- """),
- ]
-
- def on_run_init(self, context):
- local_file = context.resolver.get(File(self, 'readenergy'))
- self.device.killall('readenergy', as_root=True)
- self.readenergy = self.device.install(local_file)
-
- def setup(self, context):
- self.host_output_file = os.path.join(context.output_directory, 'energy.csv')
- self.device_output_file = self.device.path.join(self.device.working_directory, 'energy.csv')
- self.command = '{} -o {}'.format(self.readenergy, self.device_output_file)
- self.device.killall('readenergy', as_root=True)
-
- def start(self, context):
- self.device.kick_off(self.command)
-
- def stop(self, context):
- self.device.killall('readenergy', signal='TERM', as_root=True)
-
- def update_result(self, context):
- self.device.pull(self.device_output_file, self.host_output_file)
- context.add_artifact('junoenergy', self.host_output_file, 'data')
-
- with open(self.host_output_file) as fh:
- reader = csv.reader(fh)
- headers = reader.next()
- columns = zip(*reader)
- for header, data in zip(headers, columns):
- data = map(float, data)
- if header.endswith('cenr'):
- value = data[-1] - data[0]
- if value < 0: # counter wrapped
- value = JUNO_MAX_INT + value
- else: # not cumulative energy
- value = sum(data) / len(data)
- context.add_metric(header, value, UNIT_MAP[header.split('_')[-1]])
-
- def teardown(self, conetext):
- self.device.remove(self.device_output_file)
-
- def validate(self):
- if self.strict:
- if self.device.name.lower() != 'juno':
- message = 'juno_energy instrument is only supported on juno devices; found {}'
- raise InstrumentError(message.format(self.device.name))
-
diff --git a/wlauto/instrumentation/juno_energy/readenergy b/wlauto/instrumentation/juno_energy/readenergy
deleted file mode 100755
index c26991c2..00000000
--- a/wlauto/instrumentation/juno_energy/readenergy
+++ /dev/null
Binary files differ
diff --git a/wlauto/instrumentation/misc/__init__.py b/wlauto/instrumentation/misc/__init__.py
deleted file mode 100644
index a02793b0..00000000
--- a/wlauto/instrumentation/misc/__init__.py
+++ /dev/null
@@ -1,390 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0613,no-member,attribute-defined-outside-init
-"""
-
-Some "standard" instruments to collect additional info about workload execution.
-
-.. note:: The run() method of a Workload may perform some "boilerplate" as well as
- the actual execution of the workload (e.g. it may contain UI automation
- needed to start the workload). This "boilerplate" execution will also
- be measured by these instruments. As such, they are not suitable for collected
- precise data about specific operations.
-"""
-import os
-import re
-import logging
-import time
-import tarfile
-from itertools import izip, izip_longest
-from subprocess import CalledProcessError
-
-from devlib.exception import TargetError
-
-from wlauto import Instrument, Parameter
-from wlauto.core import signal
-from wlauto.exceptions import ConfigError
-from wlauto.utils.misc import diff_tokens, write_table, check_output, as_relative
-from wlauto.utils.misc import ensure_file_directory_exists as _f
-from wlauto.utils.misc import ensure_directory_exists as _d
-from wlauto.utils.android import ApkInfo
-from wlauto.utils.types import list_of_strings
-
-
-logger = logging.getLogger(__name__)
-
-
-class SysfsExtractor(Instrument):
-
- name = 'sysfs_extractor'
- description = """
- Collects the contest of a set of directories, before and after workload execution
- and diffs the result.
-
- """
-
- mount_command = 'mount -t tmpfs -o size={} tmpfs {}'
- extract_timeout = 30
- tarname = 'sysfs.tar.gz'
- DEVICE_PATH = 0
- BEFORE_PATH = 1
- AFTER_PATH = 2
- DIFF_PATH = 3
-
- parameters = [
- Parameter('paths', kind=list_of_strings, mandatory=True,
- description="""A list of paths to be pulled from the device. These could be directories
- as well as files.""",
- global_alias='sysfs_extract_dirs'),
- Parameter('use_tmpfs', kind=bool, default=None,
- description="""
- Specifies whether tmpfs should be used to cache sysfile trees and then pull them down
- as a tarball. This is significantly faster then just copying the directory trees from
- the device directly, bur requres root and may not work on all devices. Defaults to
- ``True`` if the device is rooted and ``False`` if it is not.
- """),
- Parameter('tmpfs_mount_point', default=None,
- description="""Mount point for tmpfs partition used to store snapshots of paths."""),
- Parameter('tmpfs_size', default='32m',
- description="""Size of the tempfs partition."""),
- ]
-
- def initialize(self, context):
- if not self.device.is_rooted and self.use_tmpfs: # pylint: disable=access-member-before-definition
- raise ConfigError('use_tempfs must be False for an unrooted device.')
- elif self.use_tmpfs is None: # pylint: disable=access-member-before-definition
- self.use_tmpfs = self.device.is_rooted
-
- if self.use_tmpfs:
- self.on_device_before = self.device.path.join(self.tmpfs_mount_point, 'before')
- self.on_device_after = self.device.path.join(self.tmpfs_mount_point, 'after')
-
- if not self.device.file_exists(self.tmpfs_mount_point):
- self.device.execute('mkdir -p {}'.format(self.tmpfs_mount_point), as_root=True)
- self.device.execute(self.mount_command.format(self.tmpfs_size, self.tmpfs_mount_point),
- as_root=True)
-
- def setup(self, context):
- before_dirs = [
- _d(os.path.join(context.output_directory, 'before', self._local_dir(d)))
- for d in self.paths
- ]
- after_dirs = [
- _d(os.path.join(context.output_directory, 'after', self._local_dir(d)))
- for d in self.paths
- ]
- diff_dirs = [
- _d(os.path.join(context.output_directory, 'diff', self._local_dir(d)))
- for d in self.paths
- ]
- self.device_and_host_paths = zip(self.paths, before_dirs, after_dirs, diff_dirs)
-
- if self.use_tmpfs:
- for d in self.paths:
- before_dir = self.device.path.join(self.on_device_before,
- self.device.path.dirname(as_relative(d)))
- after_dir = self.device.path.join(self.on_device_after,
- self.device.path.dirname(as_relative(d)))
- if self.device.file_exists(before_dir):
- self.device.execute('rm -rf {}'.format(before_dir), as_root=True)
- self.device.execute('mkdir -p {}'.format(before_dir), as_root=True)
- if self.device.file_exists(after_dir):
- self.device.execute('rm -rf {}'.format(after_dir), as_root=True)
- self.device.execute('mkdir -p {}'.format(after_dir), as_root=True)
-
- def slow_start(self, context):
- if self.use_tmpfs:
- for d in self.paths:
- dest_dir = self.device.path.join(self.on_device_before, as_relative(d))
- if '*' in dest_dir:
- dest_dir = self.device.path.dirname(dest_dir)
- self.device.execute('{} cp -Hr {} {}'.format(self.device.busybox, d, dest_dir),
- as_root=True, check_exit_code=False)
- else: # not rooted
- for dev_dir, before_dir, _, _ in self.device_and_host_paths:
- self.device.pull(dev_dir, before_dir)
-
- def slow_stop(self, context):
- if self.use_tmpfs:
- for d in self.paths:
- dest_dir = self.device.path.join(self.on_device_after, as_relative(d))
- if '*' in dest_dir:
- dest_dir = self.device.path.dirname(dest_dir)
- self.device.execute('{} cp -Hr {} {}'.format(self.device.busybox, d, dest_dir),
- as_root=True, check_exit_code=False)
- else: # not using tmpfs
- for dev_dir, _, after_dir, _ in self.device_and_host_paths:
- self.device.pull(dev_dir, after_dir)
-
- def update_result(self, context):
- if self.use_tmpfs:
- on_device_tarball = self.device.path.join(self.device.working_directory, self.tarname)
- on_host_tarball = self.device.path.join(context.output_directory, self.tarname)
- self.device.execute('{} tar czf {} -C {} .'.format(self.device.busybox,
- on_device_tarball,
- self.tmpfs_mount_point),
- as_root=True)
- self.device.execute('chmod 0777 {}'.format(on_device_tarball), as_root=True)
- self.device.pull(on_device_tarball, on_host_tarball)
- with tarfile.open(on_host_tarball, 'r:gz') as tf:
- tf.extractall(context.output_directory)
- self.device.remove(on_device_tarball)
- os.remove(on_host_tarball)
-
- for paths in self.device_and_host_paths:
- after_dir = paths[self.AFTER_PATH]
- dev_dir = paths[self.DEVICE_PATH].strip('*') # remove potential trailing '*'
- if (not os.listdir(after_dir) and
- self.device.file_exists(dev_dir) and
- self.device.list_directory(dev_dir)):
- self.logger.error('sysfs files were not pulled from the device.')
- self.device_and_host_paths.remove(paths) # Path is removed to skip diffing it
- for _, before_dir, after_dir, diff_dir in self.device_and_host_paths:
- _diff_sysfs_dirs(before_dir, after_dir, diff_dir)
-
- def teardown(self, context):
- self._one_time_setup_done = []
-
- def finalize(self, context):
- if self.use_tmpfs:
- try:
- self.device.execute('umount {}'.format(self.tmpfs_mount_point), as_root=True)
- except (TargetError, CalledProcessError):
- # assume a directory but not mount point
- pass
- self.device.execute('rm -rf {}'.format(self.tmpfs_mount_point),
- as_root=True, check_exit_code=False)
-
- def validate(self):
- if not self.tmpfs_mount_point: # pylint: disable=access-member-before-definition
- self.tmpfs_mount_point = self.device.path.join(self.device.working_directory, 'temp-fs')
-
- def _local_dir(self, directory):
- return os.path.dirname(as_relative(directory).replace(self.device.path.sep, os.sep))
-
-
-class ExecutionTimeInstrument(Instrument):
-
- name = 'execution_time'
- description = """
- Measure how long it took to execute the run() methods of a Workload.
-
- """
-
- priority = 15
-
- def __init__(self, target, **kwargs):
- super(ExecutionTimeInstrument, self).__init__(target, **kwargs)
- self.start_time = None
- self.end_time = None
-
- def on_run_start(self, context):
- signal.connect(self.get_start_time, signal.BEFORE_WORKLOAD_EXECUTION, priority=self.priority)
- signal.connect(self.get_stop_time, signal.AFTER_WORKLOAD_EXECUTION, priority=self.priority)
-
- def get_start_time(self, context):
- self.start_time = time.time()
-
- def get_stop_time(self, context):
- self.end_time = time.time()
-
- def update_result(self, context):
- execution_time = self.end_time - self.start_time
- context.result.add_metric('execution_time', execution_time, 'seconds')
-
-
-class ApkVersion(Instrument):
-
- name = 'apk_version'
- description = """
- Extracts APK versions for workloads that have them.
-
- """
-
- def __init__(self, device, **kwargs):
- super(ApkVersion, self).__init__(device, **kwargs)
- self.apk_info = None
-
- def setup(self, context):
- if hasattr(context.workload, 'apk_file'):
- self.apk_info = ApkInfo(context.workload.apk_file)
- else:
- self.apk_info = None
-
- def update_result(self, context):
- if self.apk_info:
- context.result.add_metric(self.name, self.apk_info.version_name)
-
-
-class InterruptStatsInstrument(Instrument):
-
- name = 'interrupts'
- description = """
- Pulls the ``/proc/interrupts`` file before and after workload execution and diffs them
- to show what interrupts occurred during that time.
-
- """
-
- def __init__(self, device, **kwargs):
- super(InterruptStatsInstrument, self).__init__(device, **kwargs)
- self.before_file = None
- self.after_file = None
- self.diff_file = None
-
- def setup(self, context):
- self.before_file = os.path.join(context.output_directory, 'before', 'proc', 'interrupts')
- self.after_file = os.path.join(context.output_directory, 'after', 'proc', 'interrupts')
- self.diff_file = os.path.join(context.output_directory, 'diff', 'proc', 'interrupts')
-
- def start(self, context):
- with open(_f(self.before_file), 'w') as wfh:
- wfh.write(self.device.execute('cat /proc/interrupts'))
-
- def stop(self, context):
- with open(_f(self.after_file), 'w') as wfh:
- wfh.write(self.device.execute('cat /proc/interrupts'))
-
- def update_result(self, context):
- # If workload execution failed, the after_file may not have been created.
- if os.path.isfile(self.after_file):
- _diff_interrupt_files(self.before_file, self.after_file, _f(self.diff_file))
-
-
-class DynamicFrequencyInstrument(SysfsExtractor):
-
- name = 'cpufreq'
- description = """
- Collects dynamic frequency (DVFS) settings before and after workload execution.
-
- """
-
- tarname = 'cpufreq.tar.gz'
-
- parameters = [
- Parameter('paths', mandatory=False, override=True),
- ]
-
- def setup(self, context):
- self.paths = ['/sys/devices/system/cpu']
- if self.use_tmpfs:
- self.paths.append('/sys/class/devfreq/*') # the '*' would cause problems for adb pull.
- super(DynamicFrequencyInstrument, self).setup(context)
-
- def validate(self):
- # temp-fs would have been set in super's validate, if not explicitly specified.
- if not self.tmpfs_mount_point.endswith('-cpufreq'): # pylint: disable=access-member-before-definition
- self.tmpfs_mount_point += '-cpufreq'
-
-
-def _diff_interrupt_files(before, after, result): # pylint: disable=R0914
- output_lines = []
- with open(before) as bfh:
- with open(after) as ofh:
- for bline, aline in izip(bfh, ofh):
- bchunks = bline.strip().split()
- while True:
- achunks = aline.strip().split()
- if achunks[0] == bchunks[0]:
- diffchunks = ['']
- diffchunks.append(achunks[0])
- diffchunks.extend([diff_tokens(b, a) for b, a
- in zip(bchunks[1:], achunks[1:])])
- output_lines.append(diffchunks)
- break
- else: # new category appeared in the after file
- diffchunks = ['>'] + achunks
- output_lines.append(diffchunks)
- try:
- aline = ofh.next()
- except StopIteration:
- break
-
- # Offset heading columns by one to allow for row labels on subsequent
- # lines.
- output_lines[0].insert(0, '')
-
- # Any "columns" that do not have headings in the first row are not actually
- # columns -- they are a single column where space-spearated words got
- # split. Merge them back together to prevent them from being
- # column-aligned by write_table.
- table_rows = [output_lines[0]]
- num_cols = len(output_lines[0])
- for row in output_lines[1:]:
- table_row = row[:num_cols]
- table_row.append(' '.join(row[num_cols:]))
- table_rows.append(table_row)
-
- with open(result, 'w') as wfh:
- write_table(table_rows, wfh)
-
-
-def _diff_sysfs_dirs(before, after, result): # pylint: disable=R0914
- before_files = []
- os.path.walk(before,
- lambda arg, dirname, names: arg.extend([os.path.join(dirname, f) for f in names]),
- before_files
- )
- before_files = filter(os.path.isfile, before_files)
- files = [os.path.relpath(f, before) for f in before_files]
- after_files = [os.path.join(after, f) for f in files]
- diff_files = [os.path.join(result, f) for f in files]
-
- for bfile, afile, dfile in zip(before_files, after_files, diff_files):
- if not os.path.isfile(afile):
- logger.debug('sysfs_diff: {} does not exist or is not a file'.format(afile))
- continue
-
- with open(bfile) as bfh, open(afile) as afh: # pylint: disable=C0321
- with open(_f(dfile), 'w') as dfh:
- for i, (bline, aline) in enumerate(izip_longest(bfh, afh), 1):
- if aline is None:
- logger.debug('Lines missing from {}'.format(afile))
- break
- bchunks = re.split(r'(\W+)', bline)
- achunks = re.split(r'(\W+)', aline)
- if len(bchunks) != len(achunks):
- logger.debug('Token length mismatch in {} on line {}'.format(bfile, i))
- dfh.write('xxx ' + bline)
- continue
- if ((len([c for c in bchunks if c.strip()]) == len([c for c in achunks if c.strip()]) == 2) and
- (bchunks[0] == achunks[0])):
- # if there are only two columns and the first column is the
- # same, assume it's a "header" column and do not diff it.
- dchunks = [bchunks[0]] + [diff_tokens(b, a) for b, a in zip(bchunks[1:], achunks[1:])]
- else:
- dchunks = [diff_tokens(b, a) for b, a in zip(bchunks, achunks)]
- dfh.write(''.join(dchunks))
diff --git a/wlauto/instrumentation/netstats/__init__.py b/wlauto/instrumentation/netstats/__init__.py
deleted file mode 100644
index acb09d8d..00000000
--- a/wlauto/instrumentation/netstats/__init__.py
+++ /dev/null
@@ -1,192 +0,0 @@
-import os
-import re
-import csv
-import tempfile
-import logging
-from datetime import datetime
-from collections import defaultdict
-from itertools import izip_longest
-
-from wlauto import Instrument, Parameter
-from wlauto import ApkFile
-from wlauto.exceptions import DeviceError, HostError
-from wlauto.utils.android import ApkInfo
-from wlauto.utils.types import list_of_strings
-
-
-THIS_DIR = os.path.dirname(__file__)
-
-NETSTAT_REGEX = re.compile(r'I/(?P<tag>netstats-\d+)\(\s*\d*\): (?P<ts>\d+) '
- r'"(?P<package>[^"]+)" TX: (?P<tx>\S+) RX: (?P<rx>\S+)')
-
-
-def extract_netstats(filepath, tag=None):
- netstats = []
- with open(filepath) as fh:
- for line in fh:
- match = NETSTAT_REGEX.search(line)
- if not match:
- continue
- if tag and match.group('tag') != tag:
- continue
- netstats.append((match.group('tag'),
- match.group('ts'),
- match.group('package'),
- match.group('tx'),
- match.group('rx')))
- return netstats
-
-
-def netstats_to_measurements(netstats):
- measurements = defaultdict(list)
- for row in netstats:
- tag, ts, package, tx, rx = row # pylint: disable=unused-variable
- measurements[package + '_tx'].append(tx)
- measurements[package + '_rx'].append(rx)
- return measurements
-
-
-def write_measurements_csv(measurements, filepath):
- headers = sorted(measurements.keys())
- columns = [measurements[h] for h in headers]
- with open(filepath, 'wb') as wfh:
- writer = csv.writer(wfh)
- writer.writerow(headers)
- writer.writerows(izip_longest(*columns))
-
-
-class NetstatsCollector(object):
-
- def __init__(self, target, apk, service='.TrafficMetricsService'):
- """
- Additional paramerter:
-
- :apk: Path to the APK file that contains ``com.arm.devlab.netstats``
- package. If not specified, it will be assumed that an APK with
- name "netstats.apk" is located in the same directory as the
- Python module for the instrument.
- :service: Name of the service to be launched. This service must be
- present in the APK.
-
- """
- self.target = target
- self.apk = apk
- self.logger = logging.getLogger('netstat')
- self.package = ApkInfo(self.apk).package
- self.service = service
- self.tag = None
- self.command = None
- self.stop_command = 'am kill {}'.format(self.package)
-
- def setup(self, force=False):
- if self.target.package_is_installed(self.package):
- if force:
- self.logger.debug('Re-installing {} (forced)'.format(self.package))
- self.target.uninstall(self.package)
- self.target.install(self.apk, timeout=300)
- else:
- self.logger.debug('{} already present on target'.format(self.package))
- else:
- self.logger.debug('Deploying {} to target'.format(self.package))
- self.target.install(self.apk)
-
- def reset(self, sites=None, period=None):
- period_arg, packages_arg = '', ''
- self.tag = 'netstats-{}'.format(datetime.now().strftime('%Y%m%d%H%M%s'))
- tag_arg = ' --es tag {}'.format(self.tag)
- if sites:
- packages_arg = ' --es packages {}'.format(','.join(sites))
- if period:
- period_arg = ' --ei period {}'.format(period)
- self.command = 'am startservice{}{}{} {}/{}'.format(tag_arg,
- period_arg,
- packages_arg,
- self.package,
- self.service)
- self.target.execute(self.stop_command) # ensure the service is not running.
-
- def start(self):
- if self.command is None:
- raise RuntimeError('reset() must be called before start()')
- self.target.execute(self.command)
-
- def stop(self):
- self.target.execute(self.stop_command)
-
- def get_data(self, outfile):
- raw_log_file = tempfile.mktemp()
- self.target.dump_logcat(raw_log_file)
- data = extract_netstats(raw_log_file)
- measurements = netstats_to_measurements(data)
- write_measurements_csv(measurements, outfile)
- os.remove(raw_log_file)
-
- def teardown(self):
- self.target.uninstall(self.package)
-
-
-class NetstatsInstrument(Instrument):
- # pylint: disable=unused-argument
-
- name = 'netstats'
- description = """
- Measures transmit/receive network traffic on an Android divice on per-package
- basis.
-
- """
-
- parameters = [
- Parameter('packages', kind=list_of_strings,
- description="""
- List of Android packages who's traffic will be monitored. If
- unspecified, all packages in the device will be monitorred.
- """),
- Parameter('period', kind=int, default=5,
- description="""
- Polling period for instrumentation on the device. Traffic statistics
- will be updated every ``period`` seconds.
- """),
- Parameter('force_reinstall', kind=bool, default=False,
- description="""
- If ``True``, instrumentation APK will always be re-installed even if
- it already installed on the device.
- """),
- Parameter('uninstall_on_completion', kind=bool, default=False,
- global_alias='cleanup',
- description="""
- If ``True``, instrumentation will be uninstalled upon run completion.
- """),
- ]
-
- def initialize(self, context):
- if self.device.os != 'android':
- raise DeviceError('nestats instrument only supports on Android devices.')
- apk = context.resolver.get(ApkFile(self))
- self.collector = NetstatsCollector(self.device, apk) # pylint: disable=attribute-defined-outside-init
- self.collector.setup(force=self.force_reinstall)
-
- def setup(self, context):
- self.collector.reset(sites=self.packages, period=self.period)
-
- def start(self, context):
- self.collector.start()
-
- def stop(self, context):
- self.collector.stop()
-
- def update_result(self, context):
- outfile = os.path.join(context.output_directory, 'netstats.csv')
- self.collector.get_data(outfile)
- context.add_artifact('netstats', outfile, kind='data')
- with open(outfile, 'rb') as fh:
- reader = csv.reader(fh)
- metrics = reader.next()
- data = [c for c in izip_longest(*list(reader))]
- for name, values in zip(metrics, data):
- value = sum(map(int, [v for v in values if v]))
- context.add_metric(name, value, units='bytes')
-
- def finalize(self, context):
- if self.uninstall_on_completion:
- self.collector.teardown()
-
diff --git a/wlauto/instrumentation/netstats/netstats.apk b/wlauto/instrumentation/netstats/netstats.apk
deleted file mode 100644
index 8b93da6e..00000000
--- a/wlauto/instrumentation/netstats/netstats.apk
+++ /dev/null
Binary files differ
diff --git a/wlauto/instrumentation/perf/LICENSE b/wlauto/instrumentation/perf/LICENSE
deleted file mode 100644
index 99f70b0d..00000000
--- a/wlauto/instrumentation/perf/LICENSE
+++ /dev/null
@@ -1,9 +0,0 @@
-perf binaries included here are part of the Linux kernel and are distributed
-under GPL version 2; The full text of the license may be viewed here:
-
-http://www.gnu.org/licenses/gpl-2.0.html
-
-Source for these binaries is part of Linux Kernel source tree. This may be obtained
-from Linaro here:
-
-https://git.linaro.org/arm/big.LITTLE/mp.git
diff --git a/wlauto/instrumentation/perf/__init__.py b/wlauto/instrumentation/perf/__init__.py
deleted file mode 100644
index f1c7bc7b..00000000
--- a/wlauto/instrumentation/perf/__init__.py
+++ /dev/null
@@ -1,179 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0613,E1101,W0201
-import os
-import re
-import itertools
-
-
-from wlauto import Instrument, Executable, Parameter
-from wlauto.exceptions import ConfigError
-from wlauto.utils.misc import ensure_file_directory_exists as _f
-from wlauto.utils.types import list_or_string, list_of_strs
-
-PERF_COMMAND_TEMPLATE = '{} stat {} {} sleep 1000 > {} 2>&1 '
-
-DEVICE_RESULTS_FILE = '/data/local/perf_results.txt'
-HOST_RESULTS_FILE_BASENAME = 'perf.txt'
-
-PERF_COUNT_REGEX = re.compile(r'^(CPU\d+)?\s*(\d+)\s*(.*?)\s*(\[\s*\d+\.\d+%\s*\])?\s*$')
-
-
-class PerfInstrument(Instrument):
-
- name = 'perf'
- description = """
- Perf is a Linux profiling with performance counters.
-
- Performance counters are CPU hardware registers that count hardware events
- such as instructions executed, cache-misses suffered, or branches
- mispredicted. They form a basis for profiling applications to trace dynamic
- control flow and identify hotspots.
-
- pref accepts options and events. If no option is given the default '-a' is
- used. For events, the default events are migrations and cs. They both can
- be specified in the config file.
-
- Events must be provided as a list that contains them and they will look like
- this ::
-
- perf_events = ['migrations', 'cs']
-
- Events can be obtained by typing the following in the command line on the
- device ::
-
- perf list
-
- Whereas options, they can be provided as a single string as following ::
-
- perf_options = '-a -i'
-
- Options can be obtained by running the following in the command line ::
-
- man perf-record
- """
-
- parameters = [
- Parameter('events', kind=list_of_strs, default=['migrations', 'cs'],
- global_alias='perf_events',
- constraint=(lambda x: x, 'must not be empty.'),
- description="""Specifies the events to be counted."""),
- Parameter('optionstring', kind=list_or_string, default='-a',
- global_alias='perf_options',
- description="""Specifies options to be used for the perf command. This
- may be a list of option strings, in which case, multiple instances of perf
- will be kicked off -- one for each option string. This may be used to e.g.
- collected different events from different big.LITTLE clusters.
- """),
- Parameter('labels', kind=list_of_strs, default=None,
- global_alias='perf_labels',
- description="""Provides labels for pref output. If specified, the number of
- labels must match the number of ``optionstring``\ s.
- """),
- Parameter('force_install', kind=bool, default=False,
- description="""
- always install perf binary even if perf is already present on the device.
- """),
- ]
-
- def on_run_init(self, context):
- binary = context.resolver.get(Executable(self, self.device.abi, 'perf'))
- if self.force_install:
- self.binary = self.device.install(binary)
- else:
- self.binary = self.device.install_if_needed(binary)
- self.commands = self._build_commands()
-
- def setup(self, context):
- self._clean_device()
-
- def start(self, context):
- for command in self.commands:
- self.device.kick_off(command)
-
- def stop(self, context):
- as_root = self.device.os == 'android'
- self.device.killall('sleep', as_root=as_root)
-
- def update_result(self, context):
- for label in self.labels:
- device_file = self._get_device_outfile(label)
- host_relpath = os.path.join('perf', os.path.basename(device_file))
- host_file = _f(os.path.join(context.output_directory, host_relpath))
- self.device.pull(device_file, host_file)
- context.add_iteration_artifact(label, kind='raw', path=host_relpath)
- with open(host_file) as fh:
- in_results_section = False
- for line in fh:
- if 'Performance counter stats' in line:
- in_results_section = True
- fh.next() # skip the following blank line
- if in_results_section:
- if not line.strip(): # blank line
- in_results_section = False
- break
- else:
- line = line.split('#')[0] # comment
- match = PERF_COUNT_REGEX.search(line)
- if match:
- classifiers = {}
- cpu = match.group(1)
- if cpu is not None:
- classifiers['cpu'] = int(cpu.replace('CPU', ''))
- count = int(match.group(2))
- metric = '{}_{}'.format(label, match.group(3))
- context.result.add_metric(metric, count, classifiers=classifiers)
-
- def teardown(self, context): # pylint: disable=R0201
- self._clean_device()
-
- def validate(self):
- if isinstance(self.optionstring, list):
- self.optionstrings = self.optionstring
- else:
- self.optionstrings = [self.optionstring]
- if isinstance(self.events[0], list): # we know events are non-empty due to param constraint pylint: disable=access-member-before-definition
- self.events = self.events
- else:
- self.events = [self.events]
- if not self.labels: # pylint: disable=E0203
- self.labels = ['perf_{}'.format(i) for i in xrange(len(self.optionstrings))]
- if len(self.labels) != len(self.optionstrings):
- raise ConfigError('The number of labels must match the number of optstrings provided for perf.')
-
- def _build_commands(self):
- events = itertools.cycle(self.events)
- commands = []
- for opts, label in itertools.izip(self.optionstrings, self.labels):
- commands.append(self._build_perf_command(opts, events.next(), label))
- return commands
-
- def _clean_device(self):
- for label in self.labels:
- filepath = self._get_device_outfile(label)
- self.device.remove(filepath)
-
- def _get_device_outfile(self, label):
- return self.device.path.join(self.device.working_directory, '{}.out'.format(label))
-
- def _build_perf_command(self, options, events, label):
- event_string = ' '.join(['-e {}'.format(e) for e in events])
- command = PERF_COMMAND_TEMPLATE.format(self.binary,
- options or '',
- event_string,
- self._get_device_outfile(label))
- return command
diff --git a/wlauto/instrumentation/perf/bin/arm64/perf b/wlauto/instrumentation/perf/bin/arm64/perf
deleted file mode 100755
index 31d054ee..00000000
--- a/wlauto/instrumentation/perf/bin/arm64/perf
+++ /dev/null
Binary files differ
diff --git a/wlauto/instrumentation/perf/bin/armeabi/perf b/wlauto/instrumentation/perf/bin/armeabi/perf
deleted file mode 100755
index dcabee6c..00000000
--- a/wlauto/instrumentation/perf/bin/armeabi/perf
+++ /dev/null
Binary files differ
diff --git a/wlauto/instrumentation/pmu_logger/__init__.py b/wlauto/instrumentation/pmu_logger/__init__.py
deleted file mode 100644
index 2a4de746..00000000
--- a/wlauto/instrumentation/pmu_logger/__init__.py
+++ /dev/null
@@ -1,152 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0613,E1101,W0201
-import os
-import re
-import csv
-
-from wlauto import Instrument, settings, Parameter
-from wlauto.instrumentation import instrument_is_installed
-from wlauto.exceptions import ConfigError
-from wlauto.utils.types import boolean
-
-
-NUMBER_OF_CCI_PMU_COUNTERS = 4
-DEFAULT_EVENTS = ['0x63', '0x6A', '0x83', '0x8A']
-DEFAULT_PERIOD = 10 # in jiffies
-
-CPL_BASE = '/sys/kernel/debug/cci_pmu_logger/'
-CPL_CONTROL_FILE = CPL_BASE + 'control'
-CPL_PERIOD_FILE = CPL_BASE + 'period_jiffies'
-
-DRIVER = 'pmu_logger.ko'
-
-REGEX = re.compile(r'(\d+(?:\.\d+)?):\s+bprint:.*Cycles:\s*(\S+)\s*Counter_0:\s*(\S+)\s*Counter_1:\s*(\S+)\s*Counter_2:\s*(\S+)\s*Counter_3:\s*(\S+)')
-
-
-class CciPmuLogger(Instrument):
-
- name = "cci_pmu_logger"
- description = """
- This instrument allows collecting CCI counter data.
-
- It relies on the pmu_logger.ko kernel driver, the source for which is
- included with Workload Automation (see inside ``wlauto/external`` directory).
- You will need to build this against your specific kernel. Once compiled, it needs
- to be placed in the dependencies directory (usually ``~/.workload_uatomation/dependencies``).
-
- .. note:: When compling pmu_logger.ko for a new hardware platform, you may need to
- modify CCI_BASE inside pmu_logger.c to contain the base address of where
- CCI is mapped in memory on your device.
-
- This instrument relies on ``trace-cmd`` instrument to also be enabled. You should enable
- at least ``'bprint'`` trace event.
-
- """
-
- parameters = [
- Parameter('events', kind=list, default=DEFAULT_EVENTS,
- global_alias='cci_pmu_events',
- description="""
- A list of strings, each representing an event to be counted. The length
- of the list cannot exceed the number of PMU counters available (4 in CCI-400).
- If this is not specified, shareable read transactions and snoop hits on both
- clusters will be counted by default. E.g. ``['0x63', '0x83']``.
- """),
- Parameter('event_labels', kind=list, default=[],
- global_alias='cci_pmu_event_labels',
- description="""
- A list of labels to be used when reporting PMU counts. If specified,
- this must be of the same length as ``cci_pmu_events``. If not specified,
- events will be labeled "event_<event_number>".
- """),
- Parameter('period', kind=int, default=10,
- global_alias='cci_pmu_period',
- description='The period (in jiffies) between counter reads.'),
- Parameter('install_module', kind=boolean, default=True,
- global_alias='cci_pmu_install_module',
- description="""
- Specifies whether pmu_logger has been compiled as a .ko module that needs
- to be installed by the instrument. (.ko binary must be in {}). If this is set
- to ``False``, it will be assumed that pmu_logger has been compiled into the kernel,
- or that it has been installed prior to the invocation of WA.
- """.format(settings.dependencies_directory)),
- ]
-
- def on_run_init(self, context):
- if self.install_module:
- self.device_driver_file = self.device.path.join(self.device.working_directory, DRIVER)
- host_driver_file = os.path.join(settings.dependencies_directory, DRIVER)
- self.device.push(host_driver_file, self.device_driver_file)
-
- def setup(self, context):
- if self.install_module:
- self.device.execute('insmod {}'.format(self.device_driver_file), check_exit_code=False)
- self.device.write_value(CPL_PERIOD_FILE, self.period)
- for i, event in enumerate(self.events):
- counter = CPL_BASE + 'counter{}'.format(i)
- self.device.write_value(counter, event, verify=False)
-
- def start(self, context):
- self.device.write_value(CPL_CONTROL_FILE, 1, verify=False)
-
- def stop(self, context):
- self.device.write_value(CPL_CONTROL_FILE, 1, verify=False)
-
- # Doing result processing inside teardown because need to make sure that
- # trace-cmd has processed its results and generated the trace.txt
- def teardown(self, context):
- trace_file = os.path.join(context.output_directory, 'trace.txt')
- rows = [['timestamp', 'cycles'] + self.event_labels]
- with open(trace_file) as fh:
- for line in fh:
- match = REGEX.search(line)
- if match:
- rows.append([
- float(match.group(1)),
- int(match.group(2), 16),
- int(match.group(3), 16),
- int(match.group(4), 16),
- int(match.group(5), 16),
- int(match.group(6), 16),
- ])
- output_file = os.path.join(context.output_directory, 'cci_counters.txt')
- with open(output_file, 'wb') as wfh:
- writer = csv.writer(wfh)
- writer.writerows(rows)
- context.add_iteration_artifact('cci_counters', path='cci_counters.txt', kind='data',
- description='CCI PMU counter data.')
-
- # summary metrics
- sums = map(sum, zip(*(r[1:] for r in rows[1:])))
- labels = ['cycles'] + self.event_labels
- for label, value in zip(labels, sums):
- context.result.add_metric('cci ' + label, value, lower_is_better=True)
-
- # actual teardown
- if self.install_module:
- self.device.execute('rmmod pmu_logger', check_exit_code=False)
-
- def validate(self):
- if not instrument_is_installed('trace-cmd'):
- raise ConfigError('To use cci_pmu_logger, trace-cmd instrument must also be enabled.')
- if not self.event_labels: # pylint: disable=E0203
- self.event_labels = ['event_{}'.format(e) for e in self.events]
- elif len(self.events) != len(self.event_labels):
- raise ConfigError('cci_pmu_events and cci_pmu_event_labels must be of the same length.')
- if len(self.events) > NUMBER_OF_CCI_PMU_COUNTERS:
- raise ConfigError('The number cci_pmu_counters must be at most {}'.format(NUMBER_OF_CCI_PMU_COUNTERS))
diff --git a/wlauto/instrumentation/screenon/__init__.py b/wlauto/instrumentation/screenon/__init__.py
deleted file mode 100644
index 0d0b425d..00000000
--- a/wlauto/instrumentation/screenon/__init__.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=unused-argument
-import time
-import threading
-
-from wlauto import Instrument, Parameter
-from wlauto.exceptions import InstrumentError
-
-
-class ScreenMonitor(threading.Thread):
-
- def __init__(self, device, polling_period):
- super(ScreenMonitor, self).__init__()
- self.device = device
- self.polling_period = polling_period
- self.stop_event = threading.Event()
-
- def run(self):
- last_poll = time.time()
- while not self.stop_event.is_set():
- time.sleep(1)
- if (time.time() - last_poll) >= self.polling_period:
- self.device.ensure_screen_is_on()
- last_poll = time.time()
-
- def stop(self):
- self.stop_event.set()
- self.join()
-
-
-class ScreenOnInstrument(Instrument):
- # pylint: disable=attribute-defined-outside-init
-
- name = 'screenon'
-
- description = """
- Ensure screen is on before each iteration on Android devices.
-
- A very basic instrument that checks that the screen is on on android devices. Optionally,
- it call poll the device periodically to ensure that the screen is still on.
-
- """
-
- parameters = [
- Parameter('polling_period', kind=int,
- description="""
- Set this to a non-zero value to enable periodic (every
- ``polling_period`` seconds) polling of the screen on
- the device to ensure it is on during a run.
- """),
- ]
-
- def initialize(self, context):
- self.monitor = None
- if self.device.os != 'android':
- raise InstrumentError('screenon instrument currently only supports Android devices.')
-
- def slow_setup(self, context): # slow to run before most other setups
- self.device.ensure_screen_is_on()
- if self.polling_period:
- self.monitor = ScreenMonitor(self.device, self.polling_period)
- self.monitor.start()
-
- def teardown(self, context):
- if self.polling_period:
- self.monitor.stop()
-
diff --git a/wlauto/instrumentation/streamline/__init__.py b/wlauto/instrumentation/streamline/__init__.py
deleted file mode 100644
index cfd2614a..00000000
--- a/wlauto/instrumentation/streamline/__init__.py
+++ /dev/null
@@ -1,278 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0613,E1101
-import os
-import signal
-import shutil
-import subprocess
-import logging
-import re
-
-from wlauto import settings, Instrument, Parameter, ResourceGetter, GetterPriority, File
-from wlauto.exceptions import InstrumentError, DeviceError, ResourceError
-from wlauto.utils.misc import ensure_file_directory_exists as _f, which
-from wlauto.utils.types import boolean
-from wlauto.utils.log import StreamLogger, LogWriter, LineLogWriter
-
-
-SESSION_TEXT_TEMPLATE = ('<?xml version="1.0" encoding="US-ASCII" ?>'
- '<session'
- ' version="1"'
- ' output_path="x"'
- ' call_stack_unwinding="no"'
- ' parse_debug_info="no"'
- ' high_resolution="no"'
- ' buffer_mode="streaming"'
- ' sample_rate="none"'
- ' duration="0"'
- ' target_host="{}"'
- ' target_port="{}"'
- ' energy_cmd_line="{}">'
- '</session>')
-
-VERSION_REGEX = re.compile(r'Streamline (.*?) ')
-
-
-class StreamlineResourceGetter(ResourceGetter):
-
- name = 'streamline_resource'
- resource_type = 'file'
- priority = GetterPriority.environment + 1 # run before standard enviroment resolvers.
-
- dependencies_directory = os.path.join(settings.dependencies_directory, 'streamline')
-
- def get(self, resource, **kwargs):
- if resource.owner.name != 'streamline':
- return None
- test_path = _f(os.path.join(self.dependencies_directory, resource.path))
- if os.path.isfile(test_path):
- return test_path
- #test_path = _f(os.path.join(self.old_dependencies_directory, resource.path))
- #if os.path.isfile(test_path):
- # return test_path
-
-
-def _instantiate(resolver):
- return StreamlineResourceGetter(resolver)
-
-
-class StreamlineInstrument(Instrument):
-
- name = 'streamline'
- description = """
- Collect Streamline traces from the device.
-
- .. note:: This instrument supports streamline that comes with DS-5 5.17 and later
- earlier versions of streamline may not work correctly (or at all).
-
- This Instrument allows collecting streamline traces (such as PMU counter values) from
- the device. It assumes you have DS-5 (which Streamline is part of) installed on your
- system, and that streamline command is somewhere in PATH.
-
- Streamline works by connecting to gator service on the device. gator comes in two parts
- a driver (gator.ko) and daemon (gatord). The driver needs to be compiled against your
- kernel and both driver and daemon need to be compatible with your version of Streamline.
- The best way to ensure compatibility is to build them from source which came with your
- DS-5. gator source can be found in ::
-
- /usr/local/DS-5/arm/gator
-
- (the exact path may vary depending of where you have installed DS-5.) Please refer to the
- README the accompanies the source for instructions on how to build it.
-
- Once you have built the driver and the daemon, place the binaries into your
- ~/.workload_automation/streamline/ directory (if you haven't tried running WA with
- this instrument before, the streamline/ subdirectory might not exist, in which
- case you will need to create it.
-
- In order to specify which events should be captured, you need to provide a
- configuration.xml for the gator. The easiest way to obtain this file is to export it
- from event configuration dialog in DS-5 streamline GUI. The file should be called
- "configuration.xml" and it be placed in the same directory as the gator binaries.
- """
- parameters = [
- Parameter('port', default='8080',
- description='Specifies the port on which streamline will connect to gator'),
- Parameter('configxml', default=None,
- description='streamline configuration XML file to be used. This must be '
- 'an absolute path, though it may count the user home symbol (~)'),
- Parameter('report', kind=boolean, default=False, global_alias='streamline_report_csv',
- description='Specifies whether a report should be generated from streamline data.'),
- Parameter('report_options', kind=str, default='-format csv',
- description='A string with options that will be added to streamline -report command.'),
- ]
-
- daemon = 'gatord'
- driver = 'gator.ko'
- configuration_file_name = 'configuration.xml'
-
- def __init__(self, device, **kwargs):
- super(StreamlineInstrument, self).__init__(device, **kwargs)
- self.streamline = None
- self.session_file = None
- self.capture_file = None
- self.analysis_file = None
- self.report_file = None
- self.configuration_file = None
- self.on_device_config = None
- self.daemon_process = None
- self.resource_getter = None
-
- self.host_daemon_file = None
- self.host_driver_file = None
- self.device_driver_file = None
-
- self._check_has_valid_display()
-
- def validate(self):
- if not which('streamline'):
- raise InstrumentError('streamline not in PATH. Cannot enable Streamline tracing.')
- p = subprocess.Popen('streamline --version 2>&1', stdout=subprocess.PIPE, shell=True)
- out, _ = p.communicate()
- match = VERSION_REGEX.search(out)
- if not match:
- raise InstrumentError('Could not find streamline version.')
- version_tuple = tuple(map(int, match.group(1).split('.')))
- if version_tuple < (5, 17):
- raise InstrumentError('Need DS-5 v5.17 or greater; found v{}'.format(match.group(1)))
-
- def initialize(self, context):
- self.resource_getter = _instantiate(context.resolver)
- self.resource_getter.register()
-
- try:
- self.host_daemon_file = context.resolver.get(File(self, self.daemon))
- self.logger.debug('Using daemon from {}.'.format(self.host_daemon_file))
- self.device.killall(self.daemon) # in case a version is already running
- self.device.install(self.host_daemon_file)
- except ResourceError:
- self.logger.debug('Using on-device daemon.')
-
- try:
- self.host_driver_file = context.resolver.get(File(self, self.driver))
- self.logger.debug('Using driver from {}.'.format(self.host_driver_file))
- self.device_driver_file = self.device.install(self.host_driver_file)
- except ResourceError:
- self.logger.debug('Using on-device driver.')
-
- try:
- self.configuration_file = (os.path.expanduser(self.configxml or '') or
- context.resolver.get(File(self, self.configuration_file_name)))
- self.logger.debug('Using {}'.format(self.configuration_file))
- self.on_device_config = self.device.path.join(self.device.working_directory, 'configuration.xml')
- shutil.copy(self.configuration_file, settings.meta_directory)
- except ResourceError:
- self.logger.debug('No configuration file was specfied.')
-
- caiman_path = subprocess.check_output('which caiman', shell=True).strip() # pylint: disable=E1103
- self.session_file = os.path.join(context.host_working_directory, 'streamline_session.xml')
- with open(self.session_file, 'w') as wfh:
- if self.device.os == "android":
- wfh.write(SESSION_TEXT_TEMPLATE.format('127.0.0.1', self.port, caiman_path))
- else:
- wfh.write(SESSION_TEXT_TEMPLATE.format(self.device.host, self.port, caiman_path))
-
- if self.configuration_file:
- self.device.push(self.configuration_file, self.on_device_config)
- self._initialize_daemon()
-
- def setup(self, context):
- self.capture_file = _f(os.path.join(context.output_directory, 'streamline', 'capture.apc'))
- self.report_file = _f(os.path.join(context.output_directory, 'streamline', 'streamline.csv'))
-
- def start(self, context):
- command = ['streamline', '-capture', self.session_file, '-output', self.capture_file]
- self.streamline = subprocess.Popen(command,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- stdin=subprocess.PIPE,
- preexec_fn=os.setpgrp)
- outlogger = StreamLogger('streamline', self.streamline.stdout, klass=LineLogWriter)
- errlogger = StreamLogger('streamline', self.streamline.stderr, klass=LineLogWriter)
- outlogger.start()
- errlogger.start()
-
- def stop(self, context):
- os.killpg(self.streamline.pid, signal.SIGTERM)
-
- def update_result(self, context):
- if self.report:
- self.logger.debug('Creating report...')
- command = ['streamline', '-report', self.capture_file, '-output', self.report_file]
- command += self.report_options.split()
- _run_streamline_command(command)
- context.add_artifact('streamlinecsv', self.report_file, 'data')
-
- def teardown(self, context):
- self._kill_daemon()
- self.device.remove(self.on_device_config)
-
- def _check_has_valid_display(self): # pylint: disable=R0201
- reason = None
- if os.name == 'posix' and not os.getenv('DISPLAY'):
- reason = 'DISPLAY is not set.'
- else:
- p = subprocess.Popen('xhost', stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- _, error = p.communicate()
- if p.returncode:
- reason = 'Invalid DISPLAY; xhost returned: "{}".'.format(error.strip()) # pylint: disable=E1103
- if reason:
- raise InstrumentError('{}\nstreamline binary requires a valid display server to be running.'.format(reason))
-
- def _initialize_daemon(self):
- if self.device_driver_file:
- try:
- self.device.execute('insmod {}'.format(self.device_driver_file))
- except DeviceError, e:
- if 'File exists' not in e.message:
- raise
- self.logger.debug('Driver was already installed.')
- self._start_daemon()
- if self.device.os == "android":
- port_spec = 'tcp:{}'.format(self.port)
- self.device.forward_port(port_spec, port_spec)
-
- def _start_daemon(self):
- self.logger.debug('Starting gatord')
- self.device.killall('gatord', as_root=True)
- if self.configuration_file:
- command = '{} -c {}'.format(self.daemon, self.on_device_config)
- else:
- command = '{}'.format(self.daemon)
-
- self.daemon_process = self.device.execute(command, as_root=True, background=True)
- outlogger = StreamLogger('gatord', self.daemon_process.stdout)
- errlogger = StreamLogger('gatord', self.daemon_process.stderr, logging.ERROR)
- outlogger.start()
- errlogger.start()
- if self.daemon_process.poll() is not None:
- # If adb returned, something went wrong.
- raise InstrumentError('Could not start gatord.')
-
- def _kill_daemon(self):
- self.logger.debug('Killing daemon process.')
- self.daemon_process.kill()
-
-
-def _run_streamline_command(command):
- streamline = subprocess.Popen(command,
- stdout=subprocess.PIPE,
- stderr=subprocess.PIPE,
- stdin=subprocess.PIPE)
- output, error = streamline.communicate()
- LogWriter('streamline').write(output).close()
- LogWriter('streamline').write(error).close()
diff --git a/wlauto/instrumentation/systrace/__init__.py b/wlauto/instrumentation/systrace/__init__.py
deleted file mode 100644
index d504d13c..00000000
--- a/wlauto/instrumentation/systrace/__init__.py
+++ /dev/null
@@ -1,154 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=W0613,attribute-defined-outside-init
-import os
-import subprocess
-import shutil
-
-from wlauto import Instrument, Parameter
-from wlauto.utils.types import list_of_strings, boolean
-from wlauto.utils.misc import check_output
-from wlauto.exceptions import ConfigError, InstrumentError
-
-
-class systrace(Instrument):
- name = 'systrace'
- description = """
- This instrument uses systrace.py from the android SDK to dump atrace
- output.
-
- Note: This is unlikely to work on devices that have an android build built
- before 15-May-2015. Before this date there was a bug with running
- atrace asynchronously.
-
- From developer.android.com:
- The Systrace tool helps analyze the performance of your application by
- capturing and displaying execution times of your applications processes
- and other Android system processes. The tool combines data from the
- Android kernel such as the CPU scheduler, disk activity, and application
- threads to generate an HTML report that shows an overall picture of an
- Android device's system processes for a given period of time.
- """
- parameters = [
- Parameter('buffer_size', kind=int, default=1024,
- description="""
- Use a trace buffer size of N kilobytes. This option lets you
- limit the total size of the data collected during a trace.
- """),
- Parameter('use_circular_buffer', kind=boolean, default=False,
- description="""
- When true trace data will be put into a circular buffer such
- that when it overflows it will start overwriting the beginning
- of the buffer.
- """),
- Parameter('kernel_functions', kind=list_of_strings,
- description="""
- Specify the names of kernel functions to trace.
- """),
- Parameter('categories', kind=list_of_strings,
- default=["freq", "sched"],
- description="""
- A list of the categories you wish to trace.
- """),
- Parameter('app_names', kind=list_of_strings,
- description="""
- Enable tracing for applications, specified as a
- comma-separated list of package names. The apps must contain
- tracing instrumentation calls from the Trace class. For more
- information, see
- http://developer.android.com/tools/debugging/systrace.html#app-trace
- """),
- Parameter("ignore_signals", kind=boolean, default=False,
- description="""
- This will cause atrace to ignore ``SIGHUP``, ``SIGINT``,
- ``SIGQUIT`` and ``SIGTERM``.
- """),
- Parameter("compress_trace", kind=boolean, default=True,
- description="""
- Compresses atrace output. This *greatly* decreases the time
- it takes to pull results from a device but the resulting txt
- file is not human readable.
- """)
- ]
-
- def initialize(self, context):
- cmd_options = {}
- if context.device.get_sdk_version() >= 23:
- # Set up command line options
- if self.app_names:
- cmd_options["-a"] = ",".join(self.app_names)
- if self.buffer_size:
- cmd_options["-b"] = self.buffer_size
- if self.use_circular_buffer:
- cmd_options["-c"] = None
- if self.kernel_functions:
- cmd_options["-k"] = ",".join(self.kernel_functions)
- if self.ignore_signals:
- cmd_options["-n"] = None
-
- # Generate commands
- opt_string = ''.join(['{} {} '.format(name, value or "")
- for name, value in cmd_options.iteritems()])
- self.start_cmd = "atrace --async_start {} {}".format(opt_string,
- " ".join(self.categories))
- self.output_file = os.path.join(self.device.working_directory, "atrace.txt")
- self.stop_cmd = "atrace --async_stop {} > {}".format("-z" if self.compress_trace else "",
- self.output_file)
-
- # Check if provided categories are available on the device
- available_categories = [cat.strip().split(" - ")[0] for cat in
- context.device.execute("atrace --list_categories").splitlines()]
- for category in self.categories:
- if category not in available_categories:
- raise ConfigError("Unknown category '{}'; Must be one of: {}"
- .format(category, available_categories))
- else:
- raise InstrumentError("Only android devices with an API level >= 23 can use systrace properly")
-
- def setup(self, context):
- self.device.execute("atrace --async_dump")
-
- def start(self, context):
- result = self.device.execute(self.start_cmd)
- if "error" in result:
- raise InstrumentError(result)
-
- def stop(self, context):
- self.p = self.device.execute(self.stop_cmd, background=True)
-
- def update_result(self, context): # pylint: disable=r0201
- self.logger.debug("Waiting for atrace to finish dumping data")
- self.p.wait()
- context.device.pull_file(self.output_file, context.output_directory)
- cmd = "python {} --from-file={} -o {}"
- cmd = cmd.format(os.path.join(os.environ['ANDROID_HOME'],
- "platform-tools/systrace/systrace.py"),
- os.path.join(context.output_directory, "atrace.txt"),
- os.path.join(context.output_directory, "systrace.html"))
- self.logger.debug(cmd)
- _, error = check_output(cmd.split(" "), timeout=10)
- if error:
- raise InstrumentError(error)
-
- context.add_iteration_artifact('atrace.txt',
- path=os.path.join(context.output_directory,
- "atace.txt"),
- kind='data',
- description='atrace dump.')
- context.add_iteration_artifact('systrace.html',
- path=os.path.join(context.output_directory,
- "systrace.html"),
- kind='data',
- description='Systrace HTML report.')
diff --git a/wlauto/instrumentation/trace_cmd/LICENSE b/wlauto/instrumentation/trace_cmd/LICENSE
deleted file mode 100644
index 9d46c1a5..00000000
--- a/wlauto/instrumentation/trace_cmd/LICENSE
+++ /dev/null
@@ -1,39 +0,0 @@
-Included trace-cmd binaries are Free Software ditributed under GPLv2:
-
-/*
- * Copyright (C) 2009, 2010 Red Hat Inc, Steven Rostedt <srostedt@redhat.com>
- *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; version 2 of the License (not later!)
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License
- * along with this program; if not, see <http://www.gnu.org/licenses>
- *
- * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
- */
-
-The full text of the license may be viewed here:
-
-http://www.gnu.org/licenses/gpl-2.0.html
-
-Source code for trace-cmd may be obtained here:
-
-git://git.kernel.org/pub/scm/linux/kernel/git/rostedt/trace-cmd.git
-
-Binaries included here contain modifications by ARM that, at the time of writing,
-have not yet made it into the above repository. The patches for these modifications
-are available here:
-
-http://article.gmane.org/gmane.linux.kernel/1869111
-http://article.gmane.org/gmane.linux.kernel/1869112
-
-
-
diff --git a/wlauto/instrumentation/trace_cmd/__init__.py b/wlauto/instrumentation/trace_cmd/__init__.py
deleted file mode 100644
index 7a09202e..00000000
--- a/wlauto/instrumentation/trace_cmd/__init__.py
+++ /dev/null
@@ -1,346 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0613,E1101
-from __future__ import division
-import os
-import time
-import subprocess
-from collections import defaultdict
-
-from wlauto import Instrument, Parameter, Executable
-from wlauto.exceptions import InstrumentError, ConfigError, DeviceError
-from wlauto.core import signal
-from wlauto.utils.types import boolean
-
-OUTPUT_TRACE_FILE = 'trace.dat'
-OUTPUT_TEXT_FILE = '{}.txt'.format(os.path.splitext(OUTPUT_TRACE_FILE)[0])
-TIMEOUT = 180
-
-
-class TraceCmdInstrument(Instrument):
-
- name = 'trace-cmd'
- description = """
- trace-cmd is an instrument which interacts with Ftrace Linux kernel internal
- tracer
-
- From trace-cmd man page:
-
- trace-cmd command interacts with the Ftrace tracer that is built inside the
- Linux kernel. It interfaces with the Ftrace specific files found in the
- debugfs file system under the tracing directory.
-
- trace-cmd reads a list of events it will trace, which can be specified in
- the config file as follows ::
-
- trace_events = ['irq*', 'power*']
-
- If no event is specified in the config file, trace-cmd traces the following events:
-
- - sched*
- - irq*
- - power*
- - cpufreq_interactive*
-
- The list of available events can be obtained by rooting and running the following
- command line on the device ::
-
- trace-cmd list
-
- You may also specify ``trace_buffer_size`` setting which must be an integer that will
- be used to set the ftrace buffer size. It will be interpreted as KB::
-
- trace_cmd_buffer_size = 8000
-
- The maximum buffer size varies from device to device, but there is a maximum and trying
- to set buffer size beyound that will fail. If you plan on collecting a lot of trace over
- long periods of time, the buffer size will not be enough and you will only get trace for
- the last portion of your run. To deal with this you can set the ``trace_mode`` setting to
- ``'record'`` (the default is ``'start'``)::
-
- trace_cmd_mode = 'record'
-
- This will cause trace-cmd to trace into file(s) on disk, rather than the buffer, and so the
- limit for the max size of the trace is set by the storage available on device. Bear in mind
- that ``'record'`` mode *is* more instrusive than the default, so if you do not plan on
- generating a lot of trace, it is best to use the default ``'start'`` mode.
-
- .. note:: Mode names correspend to the underlying trace-cmd exectuable's command used to
- implement them. You can find out more about what is happening in each case from
- trace-cmd documentation: https://lwn.net/Articles/341902/.
-
- This instrument comes with an Android trace-cmd binary that will be copied and used on the
- device, however post-processing will be done on-host and you must have trace-cmd installed and
- in your path. On Ubuntu systems, this may be done with::
-
- sudo apt-get install trace-cmd
-
- """
-
- parameters = [
- Parameter('events', kind=list, default=['sched*', 'irq*', 'power*', 'cpufreq_interactive*'],
- global_alias='trace_events',
- description="""
- Specifies the list of events to be traced. Each event in the list will be passed to
- trace-cmd with -e parameter and must be in the format accepted by trace-cmd.
- """),
- Parameter('mode', default='start', allowed_values=['start', 'record'],
- global_alias='trace_mode',
- description="""
- Trace can be collected using either 'start' or 'record' trace-cmd
- commands. In 'start' mode, trace will be collected into the ftrace buffer;
- in 'record' mode, trace will be written into a file on the device's file
- system. 'start' mode is (in theory) less intrusive than 'record' mode, however
- it is limited by the size of the ftrace buffer (which is configurable --
- see ``buffer_size`` -- but only up to a point) and that may overflow
- for long-running workloads, which will result in dropped events.
- """),
- Parameter('buffer_size', kind=int, default=None,
- global_alias='trace_buffer_size',
- description="""
- Attempt to set ftrace buffer size to the specified value (in KB). Default buffer size
- may need to be increased for long-running workloads, or if a large number
- of events have been enabled. Note: there is a maximum size that the buffer can
- be set, and that varies from device to device. Attempting to set buffer size higher
- than this will fail. In that case, this instrument will set the size to the highest
- possible value by going down from the specified size in ``buffer_size_step`` intervals.
- """),
- Parameter('buffer_size_step', kind=int, default=1000,
- global_alias='trace_buffer_size_step',
- description="""
- Defines the decremental step used if the specified ``buffer_size`` could not be set.
- This will be subtracted form the buffer size until set succeeds or size is reduced to
- 1MB.
- """),
- Parameter('buffer_size_file', default='/sys/kernel/debug/tracing/buffer_size_kb',
- description="""
- Path to the debugs file that may be used to set ftrace buffer size. This should need
- to be modified for the vast majority devices.
- """),
- Parameter('report', kind=boolean, default=True,
- description="""
- Specifies whether reporting should be performed once the binary trace has been generated.
- """),
- Parameter('no_install', kind=boolean, default=False,
- description="""
- Do not install the bundled trace-cmd and use the one on the device instead. If there is
- not already a trace-cmd on the device, an error is raised.
-
- """),
- Parameter('report_on_target', kind=boolean, default=False,
- description="""
- When enabled generation of reports will be done host-side because the generated file is
- very large. If trace-cmd is not available on the host device this setting and be disabled
- and the report will be generated on the target device.
-
- .. note:: This requires the latest version of trace-cmd to be installed on the host (the
- one in your distribution's repos may be too old).
- """),
- ]
-
- def __init__(self, device, **kwargs):
- super(TraceCmdInstrument, self).__init__(device, **kwargs)
- self.trace_cmd = None
- self.event_string = _build_trace_events(self.events)
- self.output_file = os.path.join(self.device.working_directory, OUTPUT_TRACE_FILE)
- self.temp_trace_file = self.device.path.join(self.device.working_directory, OUTPUT_TRACE_FILE)
-
- def on_run_init(self, context):
- if not self.device.is_rooted:
- raise InstrumentError('trace-cmd instrument cannot be used on an unrooted device.')
- if not self.no_install:
- host_file = context.resolver.get(Executable(self, self.device.abi, 'trace-cmd'))
- self.trace_cmd = self.device.install(host_file)
- else:
- self.trace_cmd = self.device.get_installed("trace-cmd")
- if not self.trace_cmd:
- raise ConfigError('No trace-cmd found on device and no_install=True is specified.')
-
- # Register ourselves as absolute last event before and
- # first after so we can mark the trace at the right time
- signal.connect(self.insert_start_mark, signal.BEFORE_WORKLOAD_EXECUTION, priority=11)
- signal.connect(self.insert_end_mark, signal.AFTER_WORKLOAD_EXECUTION, priority=11)
-
- def setup(self, context):
- if self.mode == 'start':
- if self.buffer_size:
- self._set_buffer_size()
- self.device.execute('{} reset'.format(self.trace_cmd), as_root=True, timeout=180)
- elif self.mode == 'record':
- pass
- else:
- raise ValueError('Bad mode: {}'.format(self.mode)) # should never get here
-
- def very_slow_start(self, context):
- self.start_time = time.time() # pylint: disable=attribute-defined-outside-init
- if self.mode == 'start':
- self.device.execute('{} start {}'.format(self.trace_cmd, self.event_string), as_root=True)
- elif self.mode == 'record':
- self.device.kick_off('{} record -o {} {}'.format(self.trace_cmd, self.output_file, self.event_string))
- else:
- raise ValueError('Bad mode: {}'.format(self.mode)) # should never get here
-
- def stop(self, context):
- self.stop_time = time.time() # pylint: disable=attribute-defined-outside-init
- if self.mode == 'start':
- self.device.execute('{} stop'.format(self.trace_cmd), timeout=60, as_root=True)
- elif self.mode == 'record':
- # There will be a trace-cmd worker process per CPU core plus a main
- # control trace-cmd process. Interrupting the control process will
- # trigger the generation of the single binary trace file.
- trace_cmds = self.device.ps(name=self.trace_cmd)
- if not trace_cmds:
- raise InstrumentError('Could not find running trace-cmd on device.')
- # The workers will have their PPID set to the PID of control.
- parent_map = defaultdict(list)
- for entry in trace_cmds:
- parent_map[entry.ppid].append(entry.pid)
- controls = [v[0] for _, v in parent_map.iteritems()
- if len(v) == 1 and v[0] in parent_map]
- if len(controls) > 1:
- self.logger.warning('More than one trace-cmd instance found; stopping all of them.')
- for c in controls:
- self.device.kill(c, signal='INT', as_root=True)
- else:
- raise ValueError('Bad mode: {}'.format(self.mode)) # should never get here
-
- def update_result(self, context): # NOQA pylint: disable=R0912
- if self.mode == 'start':
- self.device.execute('{} extract -o {}'.format(self.trace_cmd, self.output_file),
- timeout=TIMEOUT, as_root=True)
- elif self.mode == 'record':
- self.logger.debug('Waiting for trace.dat to be generated.')
- while self.device.ps(name=self.trace_cmd):
- time.sleep(2)
- else:
- raise ValueError('Bad mode: {}'.format(self.mode)) # should never get here
-
- # The size of trace.dat will depend on how long trace-cmd was running.
- # Therefore timout for the pull command must also be adjusted
- # accordingly.
- self._pull_timeout = (self.stop_time - self.start_time) # pylint: disable=attribute-defined-outside-init
- self.device.pull(self.output_file, context.output_directory, timeout=self._pull_timeout)
- context.add_iteration_artifact('bintrace', OUTPUT_TRACE_FILE, kind='data',
- description='trace-cmd generated ftrace dump.')
-
- local_txt_trace_file = os.path.join(context.output_directory, OUTPUT_TEXT_FILE)
-
- if self.report:
- # To get the output of trace.dat, trace-cmd must be installed
- # By default this is done host-side because the generated file is
- # very large
- if self.report_on_target:
- self._generate_report_on_target(context)
- else:
- self._generate_report_on_host(context)
-
- if os.path.isfile(local_txt_trace_file):
- context.add_iteration_artifact('txttrace', OUTPUT_TEXT_FILE, kind='export',
- description='trace-cmd generated ftrace dump.')
- self.logger.debug('Verifying traces.')
- with open(local_txt_trace_file) as fh:
- for line in fh:
- if 'EVENTS DROPPED' in line:
- self.logger.warning('Dropped events detected.')
- break
- else:
- self.logger.debug('Trace verified.')
- else:
- self.logger.warning('Could not generate trace.txt.')
-
- def teardown(self, context):
- self.device.remove(os.path.join(self.device.working_directory, OUTPUT_TRACE_FILE))
-
- def on_run_end(self, context):
- pass
-
- def validate(self):
- if self.report and not self.report_on_target and os.system('which trace-cmd > /dev/null'):
- raise InstrumentError('trace-cmd is not in PATH; is it installed?')
- if self.buffer_size:
- if self.mode == 'record':
- self.logger.debug('trace_buffer_size specified with record mode; it will be ignored.')
- else:
- try:
- int(self.buffer_size)
- except ValueError:
- raise ConfigError('trace_buffer_size must be an int.')
-
- def insert_start_mark(self, context):
- # trace marker appears in ftrace as an ftrace/print event with TRACE_MARKER_START in info field
- self.device.write_value("/sys/kernel/debug/tracing/trace_marker", "TRACE_MARKER_START", verify=False)
-
- def insert_end_mark(self, context):
- # trace marker appears in ftrace as an ftrace/print event with TRACE_MARKER_STOP in info field
- self.device.write_value("/sys/kernel/debug/tracing/trace_marker", "TRACE_MARKER_STOP", verify=False)
-
- def _set_buffer_size(self):
- target_buffer_size = self.buffer_size
- attempt_buffer_size = target_buffer_size
- buffer_size = 0
- floor = 1000 if target_buffer_size > 1000 else target_buffer_size
- while attempt_buffer_size >= floor:
- self.device.write_value(self.buffer_size_file, attempt_buffer_size, verify=False)
- buffer_size = self.device.get_sysfile_value(self.buffer_size_file, kind=int)
- if buffer_size == attempt_buffer_size:
- break
- else:
- attempt_buffer_size -= self.buffer_size_step
- if buffer_size == target_buffer_size:
- return
- while attempt_buffer_size < target_buffer_size:
- attempt_buffer_size += self.buffer_size_step
- self.device.write_value(self.buffer_size_file, attempt_buffer_size, verify=False)
- buffer_size = self.device.get_sysfile_value(self.buffer_size_file, kind=int)
- if attempt_buffer_size != buffer_size:
- self.logger.warning('Failed to set trace buffer size to {}, value set was {}'.format(target_buffer_size, buffer_size))
- break
-
- def _generate_report_on_target(self, context):
- try:
- trace_file = self.output_file
- txt_trace_file = os.path.join(self.device.working_directory, OUTPUT_TEXT_FILE)
- command = 'trace-cmd report {} > {}'.format(trace_file, txt_trace_file)
- self.device.execute(command)
- self.device.pull(txt_trace_file, context.output_directory, timeout=self._pull_timeout)
- except DeviceError:
- raise InstrumentError('Could not generate TXT report on target.')
-
- def _generate_report_on_host(self, context):
- local_trace_file = os.path.join(context.output_directory, OUTPUT_TRACE_FILE)
- local_txt_trace_file = os.path.join(context.output_directory, OUTPUT_TEXT_FILE)
- command = 'trace-cmd report {} > {}'.format(local_trace_file, local_txt_trace_file)
- self.logger.debug(command)
- if not os.path.isfile(local_trace_file):
- self.logger.warning('Not generating trace.txt, as {} does not exist.'.format(OUTPUT_TRACE_FILE))
- else:
- try:
- process = subprocess.Popen(command, stderr=subprocess.PIPE, shell=True)
- _, error = process.communicate()
- if process.returncode:
- raise InstrumentError('trace-cmd returned non-zero exit code {}'.format(process.returncode))
- if error:
- # logged at debug level, as trace-cmd always outputs some
- # errors that seem benign.
- self.logger.debug(error)
- except OSError:
- raise InstrumentError('Could not find trace-cmd. Please make sure it is installed and is in PATH.')
-
-
-def _build_trace_events(events):
- event_string = ' '.join(['-e {}'.format(e) for e in events])
- return event_string
diff --git a/wlauto/instrumentation/trace_cmd/bin/arm64/trace-cmd b/wlauto/instrumentation/trace_cmd/bin/arm64/trace-cmd
deleted file mode 100755
index 0d025d0d..00000000
--- a/wlauto/instrumentation/trace_cmd/bin/arm64/trace-cmd
+++ /dev/null
Binary files differ
diff --git a/wlauto/instrumentation/trace_cmd/bin/armeabi/trace-cmd b/wlauto/instrumentation/trace_cmd/bin/armeabi/trace-cmd
deleted file mode 100755
index a4456627..00000000
--- a/wlauto/instrumentation/trace_cmd/bin/armeabi/trace-cmd
+++ /dev/null
Binary files differ
diff --git a/wlauto/managers/__init__.py b/wlauto/managers/__init__.py
deleted file mode 100644
index edd97a24..00000000
--- a/wlauto/managers/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
diff --git a/wlauto/managers/android.py b/wlauto/managers/android.py
deleted file mode 100644
index 96f9d09a..00000000
--- a/wlauto/managers/android.py
+++ /dev/null
@@ -1,189 +0,0 @@
-import os
-import sys
-import re
-import time
-import tempfile
-import shutil
-import threading
-
-from wlauto.core.device_manager import DeviceManager
-from wlauto import Parameter, Alias
-from wlauto.utils.types import boolean, regex
-from wlauto.utils.android import adb_command
-from wlauto.exceptions import WorkerThreadError
-
-from devlib.target import AndroidTarget
-
-SCREEN_STATE_REGEX = re.compile('(?:mPowerState|mScreenOn|Display Power: state)=([0-9]+|true|false|ON|OFF)', re.I)
-SCREEN_SIZE_REGEX = re.compile(r'mUnrestrictedScreen=\(\d+,\d+\)\s+(?P<width>\d+)x(?P<height>\d+)')
-
-
-class AndroidDevice(DeviceManager):
-
- name = "android"
- target_type = AndroidTarget
-
- aliases = [
- Alias('generic_android'),
- ]
-
- parameters = [
- Parameter('adb_name', default=None, kind=str,
- description='The unique ID of the device as output by "adb devices".'),
- Parameter('android_prompt', kind=regex, default=re.compile('^.*(shell|root)@.*:/\S* [#$] ', re.MULTILINE), # ##
- description='The format of matching the shell prompt in Android.'),
- Parameter('working_directory', default='/sdcard/wa-working', override=True),
- Parameter('binaries_directory', default='/data/local/tmp', override=True),
- Parameter('package_data_directory', default='/data/data',
- description='Location of of data for an installed package (APK).'),
- Parameter('external_storage_directory', default='/sdcard',
- description='Mount point for external storage.'),
- Parameter('logcat_poll_period', kind=int,
- description="""
- If specified and is not ``0``, logcat will be polled every
- ``logcat_poll_period`` seconds, and buffered on the host. This
- can be used if a lot of output is expected in logcat and the fixed
- logcat buffer on the device is not big enough. The trade off is that
- this introduces some minor runtime overhead. Not set by default.
- """), # ##
- Parameter('enable_screen_check', kind=boolean, default=False,
- description="""
- Specified whether the device should make sure that the screen is on
- during initialization.
- """),
- Parameter('swipe_to_unlock', kind=str, default=None,
- allowed_values=[None, "horizontal", "vertical"],
- description="""
- If set a swipe of the specified direction will be performed.
- This should unlock the screen.
- """), # ##
- ]
-
- def __init__(self, **kwargs):
- super(AndroidDevice, self).__init__(**kwargs)
- self.connection_settings = self._make_connection_settings()
-
- self.platform = self.platform_type(core_names=self.core_names, # pylint: disable=E1102
- core_clusters=self.core_clusters)
-
- self.target = self.target_type(connection_settings=self.connection_settings,
- connect=False,
- platform=self.platform,
- working_directory=self.working_directory,
- executables_directory=self.binaries_directory,)
- self._logcat_poller = None
-
- def connect(self):
- self.target.connect()
-
- def initialize(self, context):
- super(AndroidDevice, self).initialize(context)
- if self.enable_screen_check:
- self.target.ensure_screen_is_on()
- if self.swipe_to_unlock:
- self.target.swipe_to_unlock(direction=self.swipe_to_unlock)
-
- def start(self):
- if self.logcat_poll_period:
- if self._logcat_poller:
- self._logcat_poller.close()
- self._logcat_poller = _LogcatPoller(self, self.logcat_poll_period,
- timeout=self.default_timeout)
- self._logcat_poller.start()
- else:
- self.target.clear_logcat()
-
- def _make_connection_settings(self):
- connection_settings = {}
- connection_settings['device'] = self.adb_name
- return connection_settings
-
- def dump_logcat(self, outfile, filter_spec=None):
- """
- Dump the contents of logcat, for the specified filter spec to the
- specified output file.
- See http://developer.android.com/tools/help/logcat.html
-
- :param outfile: Output file on the host into which the contents of the
- log will be written.
- :param filter_spec: Logcat filter specification.
- see http://developer.android.com/tools/debugging/debugging-log.html#filteringOutput
-
- """
- if self._logcat_poller:
- return self._logcat_poller.write_log(outfile)
- else:
- if filter_spec:
- command = 'logcat -d -s {} > {}'.format(filter_spec, outfile)
- else:
- command = 'logcat -d > {}'.format(outfile)
- return adb_command(self.adb_name, command)
-
-
-class _LogcatPoller(threading.Thread):
-
- join_timeout = 5
-
- def __init__(self, target, period, timeout=None):
- super(_LogcatPoller, self).__init__()
- self.target = target
- self.logger = target.logger
- self.period = period
- self.timeout = timeout
- self.stop_signal = threading.Event()
- self.lock = threading.RLock()
- self.buffer_file = tempfile.mktemp()
- self.last_poll = 0
- self.daemon = True
- self.exc = None
-
- def run(self):
- self.logger.debug('Starting logcat polling.')
- try:
- while True:
- if self.stop_signal.is_set():
- break
- with self.lock:
- current_time = time.time()
- if (current_time - self.last_poll) >= self.period:
- self._poll()
- time.sleep(0.5)
- except Exception: # pylint: disable=W0703
- self.exc = WorkerThreadError(self.name, sys.exc_info())
- self.logger.debug('Logcat polling stopped.')
-
- def stop(self):
- self.logger.debug('Stopping logcat polling.')
- self.stop_signal.set()
- self.join(self.join_timeout)
- if self.is_alive():
- self.logger.error('Could not join logcat poller thread.')
- if self.exc:
- raise self.exc # pylint: disable=E0702
-
- def clear_buffer(self):
- self.logger.debug('Clearing logcat buffer.')
- with self.lock:
- self.target.clear_logcat()
- with open(self.buffer_file, 'w') as _: # NOQA
- pass
-
- def write_log(self, outfile):
- self.logger.debug('Writing logbuffer to {}.'.format(outfile))
- with self.lock:
- self._poll()
- if os.path.isfile(self.buffer_file):
- shutil.copy(self.buffer_file, outfile)
- else: # there was no logcat trace at this time
- with open(outfile, 'w') as _: # NOQA
- pass
-
- def close(self):
- self.logger.debug('Closing logcat poller.')
- if os.path.isfile(self.buffer_file):
- os.remove(self.buffer_file)
-
- def _poll(self):
- with self.lock:
- self.last_poll = time.time()
- self.target.dump_logcat(self.buffer_file, append=True)
diff --git a/wlauto/managers/linux.py b/wlauto/managers/linux.py
deleted file mode 100644
index 3cb4989a..00000000
--- a/wlauto/managers/linux.py
+++ /dev/null
@@ -1,65 +0,0 @@
-from wlauto.core.device_manager import DeviceManager
-from wlauto import Parameter, Alias
-from wlauto.utils.types import boolean
-from wlauto.exceptions import ConfigError
-
-from devlib.target import LinuxTarget
-
-
-class LinuxManager(DeviceManager):
-
- name = "linux"
- target_type = LinuxTarget
-
- aliases = [
- Alias('generic_linux'),
- ]
-
- parameters = [
- Parameter('host', mandatory=True, description='Host name or IP address for the device.'),
- Parameter('username', mandatory=True, description='User name for the account on the device.'),
- Parameter('password', description='Password for the account on the device (for password-based auth).'),
- Parameter('keyfile', description='Keyfile to be used for key-based authentication.'),
- Parameter('port', kind=int, default=22, description='SSH port number on the device.'),
- Parameter('password_prompt', default='[sudo] password',
- description='Prompt presented by sudo when requesting the password.'),
- Parameter('use_telnet', kind=boolean, default=False,
- description='Optionally, telnet may be used instead of ssh, though this is discouraged.'),
- Parameter('boot_timeout', kind=int, default=120,
- description='How long to try to connect to the device after a reboot.'),
- Parameter('working_directory', default="/root/wa", override=True),
- Parameter('binaries_directory', default="/root/wa/bin", override=True),
- ]
-
- def __init__(self, **kwargs):
- super(LinuxManager, self).__init__(**kwargs)
- self.platform = self.platform_type(core_names=self.core_names, # pylint: disable=E1102
- core_clusters=self.core_clusters,
- modules=self.modules)
- self.target = self.target_type(connection_settings=self._make_connection_settings(),
- connect=False,
- platform=self.platform,
- working_directory=self.working_directory,
- executables_directory=self.binaries_directory,)
-
- def validate(self):
- if self.password and self.keyfile:
- raise ConfigError("Either `password` or `keyfile` must be given but not both")
-
- def connect(self):
- self.target.connect(self.boot_timeout)
-
- def _make_connection_settings(self):
- connection_settings = {}
- connection_settings['host'] = self.host
- connection_settings['username'] = self.username
- connection_settings['port'] = self.port
- connection_settings['telnet'] = self.use_telnet
- connection_settings['password_prompt'] = self.password_prompt
-
- if self.keyfile:
- connection_settings['keyfile'] = self.keyfilehollis
- elif self.password:
- connection_settings['password'] = self.password
-
- return connection_settings
diff --git a/wlauto/managers/locallinux.py b/wlauto/managers/locallinux.py
deleted file mode 100644
index 8f1c1297..00000000
--- a/wlauto/managers/locallinux.py
+++ /dev/null
@@ -1,30 +0,0 @@
-from wlauto.core.device_manager import DeviceManager
-from wlauto import Parameter
-
-from devlib.target import LocalLinuxTarget
-
-
-class LocalLinuxManager(DeviceManager):
-
- name = "local_linux"
- target_type = LocalLinuxTarget
-
- parameters = [
- Parameter('password',
- description='Password for the user.'),
- ]
-
- def __init__(self, **kwargs):
- super(LocalLinuxManager, self).__init__(**kwargs)
- self.platform = self.platform_type(core_names=self.core_names, # pylint: disable=E1102
- core_clusters=self.core_clusters,
- modules=self.modules)
- self.target = self.target_type(connection_settings=self._make_connection_settings())
-
- def connect(self):
- self.target.connect()
-
- def _make_connection_settings(self):
- connection_settings = {}
- connection_settings['password'] = self.password
- return connection_settings
diff --git a/wlauto/resource_getters/__init__.py b/wlauto/resource_getters/__init__.py
deleted file mode 100644
index cd5d64d6..00000000
--- a/wlauto/resource_getters/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/resource_getters/standard.py b/wlauto/resource_getters/standard.py
deleted file mode 100644
index bbedaf52..00000000
--- a/wlauto/resource_getters/standard.py
+++ /dev/null
@@ -1,508 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-This module contains the standard set of resource getters used by Workload Automation.
-
-"""
-import os
-import sys
-import shutil
-import inspect
-import httplib
-import logging
-import json
-
-import requests
-
-from wlauto import ResourceGetter, GetterPriority, Parameter, NO_ONE, settings, __file__ as __base_filepath
-from wlauto.exceptions import ResourceError
-from wlauto.utils.misc import ensure_directory_exists as _d, ensure_file_directory_exists as _f, sha256, urljoin
-from wlauto.utils.types import boolean
-
-
-logging.getLogger("requests").setLevel(logging.WARNING)
-logging.getLogger("urllib3").setLevel(logging.WARNING)
-
-
-class PackageFileGetter(ResourceGetter):
-
- name = 'package_file'
- description = """
- Looks for exactly one file with the specified plugin in the owner's directory. If a version
- is specified on invocation of get, it will filter the discovered file based on that version.
- Versions are treated as case-insensitive.
- """
-
- plugin = None
-
- def register(self):
- self.resolver.register(self, self.plugin, GetterPriority.package)
-
- def get(self, resource, **kwargs):
- resource_dir = os.path.dirname(sys.modules[resource.owner.__module__].__file__)
- version = kwargs.get('version')
- return get_from_location_by_plugin(resource, resource_dir, self.plugin, version)
-
-
-class EnvironmentFileGetter(ResourceGetter):
-
- name = 'environment_file'
- description = """Looks for exactly one file with the specified plugin in the owner's directory. If a version
- is specified on invocation of get, it will filter the discovered file based on that version.
- Versions are treated as case-insensitive."""
-
- plugin = None
-
- def register(self):
- self.resolver.register(self, self.plugin, GetterPriority.environment)
-
- def get(self, resource, **kwargs):
- resource_dir = resource.owner.dependencies_directory
-
- version = kwargs.get('version')
- return get_from_location_by_plugin(resource, resource_dir, self.plugin, version)
-
-
-class ReventGetter(ResourceGetter):
- """Implements logic for identifying revent files."""
-
- def get_base_location(self, resource):
- raise NotImplementedError()
-
- def register(self):
- self.resolver.register(self, 'revent', GetterPriority.package)
-
- def get(self, resource, **kwargs):
- filename = '.'.join([resource.owner.device.model, resource.stage, 'revent']).lower()
- location = _d(os.path.join(self.get_base_location(resource), 'revent_files'))
- for candidate in os.listdir(location):
- if candidate.lower() == filename.lower():
- return os.path.join(location, candidate)
-
-
-class PackageApkGetter(PackageFileGetter):
- name = 'package_apk'
- plugin = 'apk'
-
-
-class PackageJarGetter(PackageFileGetter):
- name = 'package_jar'
- plugin = 'jar'
-
-
-class PackageReventGetter(ReventGetter):
-
- name = 'package_revent'
-
- def get_base_location(self, resource):
- return get_owner_path(resource)
-
-
-class EnvironmentApkGetter(EnvironmentFileGetter):
- name = 'environment_apk'
- plugin = 'apk'
-
-
-class EnvironmentJarGetter(EnvironmentFileGetter):
- name = 'environment_jar'
- plugin = 'jar'
-
-
-class EnvironmentReventGetter(ReventGetter):
-
- name = 'enviroment_revent'
-
- def get_base_location(self, resource):
- return resource.owner.dependencies_directory
-
-
-class ExecutableGetter(ResourceGetter):
-
- name = 'exe_getter'
- resource_type = 'executable'
- priority = GetterPriority.environment
-
- def get(self, resource, **kwargs):
- if settings.assets_repository:
- path = os.path.join(settings.assets_repository, resource.platform, resource.filename)
- if os.path.isfile(path):
- return path
-
-
-class PackageExecutableGetter(ExecutableGetter):
-
- name = 'package_exe_getter'
- priority = GetterPriority.package
-
- def get(self, resource, **kwargs):
- path = os.path.join(get_owner_path(resource), 'bin', resource.platform, resource.filename)
- if os.path.isfile(path):
- return path
-
-
-class EnvironmentExecutableGetter(ExecutableGetter):
-
- name = 'env_exe_getter'
-
- def get(self, resource, **kwargs):
- paths = [
- os.path.join(resource.owner.dependencies_directory, 'bin',
- resource.platform, resource.filename),
- os.path.join(settings.user_directory, 'bin',
- resource.platform, resource.filename),
- ]
- for path in paths:
- if os.path.isfile(path):
- return path
-
-
-class DependencyFileGetter(ResourceGetter):
-
- name = 'filer'
- description = """
- Gets resources from the specified mount point. Copies them the local dependencies
- directory, and returns the path to the local copy.
-
- """
- resource_type = 'file'
- relative_path = '' # May be overridden by subclasses.
-
- priority = GetterPriority.remote
-
- parameters = [
- Parameter('mount_point', default='/', global_alias='remote_assets_path',
- description='Local mount point for the remote filer.'),
- ]
-
- def __init__(self, resolver, **kwargs):
- super(DependencyFileGetter, self).__init__(resolver, **kwargs)
-
- def get(self, resource, **kwargs):
- force = kwargs.get('force')
- remote_path = os.path.join(self.mount_point, self.relative_path, resource.path)
- local_path = os.path.join(resource.owner.dependencies_directory, os.path.basename(resource.path))
-
- if not os.path.isfile(local_path) or force:
- if not os.path.isfile(remote_path):
- return None
- self.logger.debug('Copying {} to {}'.format(remote_path, local_path))
- shutil.copy(remote_path, local_path)
-
- return local_path
-
-
-class PackageCommonDependencyGetter(ResourceGetter):
-
- name = 'packaged_common_dependency'
- resource_type = 'file'
- priority = GetterPriority.package - 1 # check after owner-specific locations
-
- def get(self, resource, **kwargs):
- path = os.path.join(settings.package_directory, 'common', resource.path)
- if os.path.exists(path):
- return path
-
-
-class EnvironmentCommonDependencyGetter(ResourceGetter):
-
- name = 'environment_common_dependency'
- resource_type = 'file'
- priority = GetterPriority.environment - 1 # check after owner-specific locations
-
- def get(self, resource, **kwargs):
- path = os.path.join(settings.dependencies_directory,
- os.path.basename(resource.path))
- if os.path.exists(path):
- return path
-
-
-class PackageDependencyGetter(ResourceGetter):
-
- name = 'packaged_dependency'
- resource_type = 'file'
- priority = GetterPriority.package
-
- def get(self, resource, **kwargs):
- owner_path = inspect.getfile(resource.owner.__class__)
- path = os.path.join(os.path.dirname(owner_path), resource.path)
- if os.path.exists(path):
- return path
-
-
-class EnvironmentDependencyGetter(ResourceGetter):
-
- name = 'environment_dependency'
- resource_type = 'file'
- priority = GetterPriority.environment
-
- def get(self, resource, **kwargs):
- path = os.path.join(resource.owner.dependencies_directory, os.path.basename(resource.path))
- if os.path.exists(path):
- return path
-
-
-class PluginAssetGetter(DependencyFileGetter):
-
- name = 'plugin_asset'
- resource_type = 'plugin_asset'
-
-
-class HttpGetter(ResourceGetter):
-
- name = 'http_assets'
- description = """
- Downloads resources from a server based on an index fetched from the specified URL.
-
- Given a URL, this will try to fetch ``<URL>/index.json``. The index file maps plugin
- names to a list of corresponing asset descriptons. Each asset description continas a path
- (relative to the base URL) of the resource and a SHA256 hash, so that this Getter can
- verify whether the resource on the remote has changed.
-
- For example, let's assume we want to get the APK file for workload "foo", and that
- assets are hosted at ``http://example.com/assets``. This Getter will first try to
- donwload ``http://example.com/assests/index.json``. The index file may contian
- something like ::
-
- {
- "foo": [
- {
- "path": "foo-app.apk",
- "sha256": "b14530bb47e04ed655ac5e80e69beaa61c2020450e18638f54384332dffebe86"
- },
- {
- "path": "subdir/some-other-asset.file",
- "sha256": "48d9050e9802246d820625717b72f1c2ba431904b8484ca39befd68d1dbedfff"
- }
- ]
- }
-
- This Getter will look through the list of assets for "foo" (in this case, two) check
- the paths until it finds one matching the resource (in this case, "foo-app.apk").
- Finally, it will try to dowload that file relative to the base URL and plugin name
- (in this case, "http://example.com/assets/foo/foo-app.apk"). The downloaded version
- will be cached locally, so that in the future, the getter will check the SHA256 hash
- of the local file against the one advertised inside index.json, and provided that hasn't
- changed, it won't try to download the file again.
-
- """
- priority = GetterPriority.remote
- resource_type = ['apk', 'file', 'jar', 'revent']
-
- parameters = [
- Parameter('url', global_alias='remote_assets_url',
- description="""URL of the index file for assets on an HTTP server."""),
- Parameter('username',
- description="""User name for authenticating with assets URL"""),
- Parameter('password',
- description="""Password for authenticationg with assets URL"""),
- Parameter('always_fetch', kind=boolean, default=False, global_alias='always_fetch_remote_assets',
- description="""If ``True``, will always attempt to fetch assets from the remote, even if
- a local cached copy is available."""),
- Parameter('chunk_size', kind=int, default=1024,
- description="""Chunk size for streaming large assets."""),
- ]
-
- def __init__(self, resolver, **kwargs):
- super(HttpGetter, self).__init__(resolver, **kwargs)
- self.index = None
-
- def get(self, resource, **kwargs):
- if not resource.owner:
- return # TODO: add support for unowned resources
- if not self.index:
- self.index = self.fetch_index()
- asset = self.resolve_resource(resource)
- if not asset:
- return
- return self.download_asset(asset, resource.owner.name)
-
- def fetch_index(self):
- if not self.url:
- return {}
- index_url = urljoin(self.url, 'index.json')
- response = self.geturl(index_url)
- if response.status_code != httplib.OK:
- message = 'Could not fetch "{}"; recieved "{} {}"'
- self.logger.error(message.format(index_url, response.status_code, response.reason))
- return {}
- return json.loads(response.content)
-
- def download_asset(self, asset, owner_name):
- url = urljoin(self.url, owner_name, asset['path'])
- local_path = _f(os.path.join(settings.dependencies_directory, '__remote',
- owner_name, asset['path'].replace('/', os.sep)))
- if os.path.isfile(local_path) and not self.always_fetch:
- local_sha = sha256(local_path)
- if local_sha == asset['sha256']:
- self.logger.debug('Local SHA256 matches; not re-downloading')
- return local_path
- self.logger.debug('Downloading {}'.format(url))
- response = self.geturl(url, stream=True)
- if response.status_code != httplib.OK:
- message = 'Could not download asset "{}"; recieved "{} {}"'
- self.logger.warning(message.format(url, response.status_code, response.reason))
- return
- with open(local_path, 'wb') as wfh:
- for chunk in response.iter_content(chunk_size=self.chunk_size):
- wfh.write(chunk)
- return local_path
-
- def geturl(self, url, stream=False):
- if self.username:
- auth = (self.username, self.password)
- else:
- auth = None
- return requests.get(url, auth=auth, stream=stream)
-
- def resolve_resource(self, resource):
- assets = self.index.get(resource.owner.name, {})
- if not assets:
- return {}
- if resource.name in ['apk', 'jar']:
- paths = [a['path'] for a in assets]
- version = getattr(resource, 'version', None)
- found = get_from_list_by_plugin(resource, paths, resource.name, version)
- if found:
- for a in assets:
- if a['path'] == found:
- return a
- elif resource.name == 'revent':
- filename = '.'.join([resource.owner.device.name, resource.stage, 'revent']).lower()
- for asset in assets:
- pathname = os.path.basename(asset['path']).lower()
- if pathname == filename:
- return asset
- else: # file
- for asset in assets:
- if asset['path'].lower() == resource.path.lower():
- return asset
-
-
-class RemoteFilerGetter(ResourceGetter):
-
- name = 'filer_assets'
- description = """
- Finds resources on a (locally mounted) remote filer and caches them locally.
-
- This assumes that the filer is mounted on the local machine (e.g. as a samba share).
-
- """
- priority = GetterPriority.remote
- resource_type = ['apk', 'file', 'jar', 'revent']
-
- parameters = [
- Parameter('remote_path', global_alias='remote_assets_path', default='',
- description="""Path, on the local system, where the assets are located."""),
- Parameter('always_fetch', kind=boolean, default=False, global_alias='always_fetch_remote_assets',
- description="""If ``True``, will always attempt to fetch assets from the remote, even if
- a local cached copy is available."""),
- ]
-
- def get(self, resource, **kwargs):
- version = kwargs.get('version')
- if resource.owner:
- remote_path = os.path.join(self.remote_path, resource.owner.name)
- local_path = os.path.join(settings.user_directory, '__filer', resource.owner.dependencies_directory)
- return self.try_get_resource(resource, version, remote_path, local_path)
- else:
- result = None
- for entry in os.listdir(remote_path):
- remote_path = os.path.join(self.remote_path, entry)
- local_path = os.path.join(settings.user_directory, '__filer', settings.dependencies_directory, entry)
- result = self.try_get_resource(resource, version, remote_path, local_path)
- if result:
- break
- return result
-
- def try_get_resource(self, resource, version, remote_path, local_path):
- if not self.always_fetch:
- result = self.get_from(resource, version, local_path)
- if result:
- return result
- if remote_path:
- # Didn't find it cached locally; now check the remoted
- result = self.get_from(resource, version, remote_path)
- if not result:
- return result
- else: # remote path is not set
- return None
- # Found it remotely, cache locally, then return it
- local_full_path = os.path.join(_d(local_path), os.path.basename(result))
- self.logger.debug('cp {} {}'.format(result, local_full_path))
- shutil.copy(result, local_full_path)
- return local_full_path
-
- def get_from(self, resource, version, location): # pylint: disable=no-self-use
- if resource.name in ['apk', 'jar']:
- return get_from_location_by_plugin(resource, location, resource.name, version)
- elif resource.name == 'file':
- filepath = os.path.join(location, resource.path)
- if os.path.exists(filepath):
- return filepath
- elif resource.name == 'revent':
- filename = '.'.join([resource.owner.device.model, resource.stage, 'revent']).lower()
- alternate_location = os.path.join(location, 'revent_files')
- # There tends to be some confusion as to where revent files should
- # be placed. This looks both in the plugin's directory, and in
- # 'revent_files' subdirectory under it, if it exists.
- if os.path.isdir(alternate_location):
- for candidate in os.listdir(alternate_location):
- if candidate.lower() == filename.lower():
- return os.path.join(alternate_location, candidate)
- if os.path.isdir(location):
- for candidate in os.listdir(location):
- if candidate.lower() == filename.lower():
- return os.path.join(location, candidate)
- else:
- raise ValueError('Unexpected resource type: {}'.format(resource.name))
-
-
-# Utility functions
-
-def get_from_location_by_plugin(resource, location, plugin, version=None):
- try:
- found_files = [os.path.join(location, f) for f in os.listdir(location)]
- except OSError:
- return None
- try:
- return get_from_list_by_plugin(resource, found_files, plugin, version)
- except ResourceError:
- raise ResourceError('More than one .{} found in {} for {}.'.format(plugin,
- location,
- resource.owner.name))
-
-
-def get_from_list_by_plugin(resource, filelist, plugin, version=None):
- filelist = [ff for ff in filelist
- if os.path.splitext(ff)[1].lower().endswith(plugin)]
- if version:
- filelist = [ff for ff in filelist if version.lower() in os.path.basename(ff).lower()]
- if len(filelist) == 1:
- return filelist[0]
- elif not filelist:
- return None
- else:
- raise ResourceError('More than one .{} found in {} for {}.'.format(plugin,
- filelist,
- resource.owner.name))
-
-
-def get_owner_path(resource):
- if resource.owner is NO_ONE:
- return os.path.join(os.path.dirname(__base_filepath), 'common')
- else:
- return os.path.dirname(sys.modules[resource.owner.__module__].__file__)
diff --git a/wlauto/result_processors/__init__.py b/wlauto/result_processors/__init__.py
deleted file mode 100644
index cd5d64d6..00000000
--- a/wlauto/result_processors/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/result_processors/cpustate.py b/wlauto/result_processors/cpustate.py
deleted file mode 100644
index 293b8abe..00000000
--- a/wlauto/result_processors/cpustate.py
+++ /dev/null
@@ -1,225 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import csv
-from collections import OrderedDict
-
-from wlauto import ResultProcessor, Parameter
-from wlauto.core import signal
-from wlauto.exceptions import ConfigError
-from wlauto.instrumentation import instrument_is_installed
-from wlauto.utils.power import report_power_stats
-from wlauto.utils.misc import unique
-
-
-class CpuStatesProcessor(ResultProcessor):
-
- name = 'cpustates'
- description = '''
- Process power ftrace to produce CPU state and parallelism stats.
-
- Parses trace-cmd output to extract power events and uses those to generate
- statistics about parallelism and frequency/idle core residency.
-
- .. note:: trace-cmd instrument must be enabled and configured to collect
- at least ``power:cpu_idle`` and ``power:cpu_frequency`` events.
- Reporting should also be enabled (it is by default) as
- ``cpustate`` parses the text version of the trace.
- Finally, the device should have ``cpuidle`` module installed.
-
- This generates two reports for the run:
-
- *parallel.csv*
-
- Shows what percentage of time was spent with N cores active (for N
- from 0 to the total number of cores), for a cluster or for a system as
- a whole. It contain the following columns:
-
- :workload: The workload label
- :iteration: iteration that was run
- :cluster: The cluster for which statics are reported. The value of
- ``"all"`` indicates that this row reports statistics for
- the whole system.
- :number_of_cores: number of cores active. ``0`` indicates the cluster
- was idle.
- :total_time: Total time spent in this state during workload execution
- :%time: Percentage of total workload execution time spent in this state
- :%running_time: Percentage of the time the cluster was active (i.e.
- ignoring time the cluster was idling) spent in this
- state.
-
- *cpustate.csv*
-
- Shows percentage of the time a core spent in a particular power state. The first
- column names the state is followed by a column for each core. Power states include
- available DVFS frequencies (for heterogeneous systems, this is the union of
- frequencies supported by different core types) and idle states. Some shallow
- states (e.g. ARM WFI) will consume different amount of power depending on the
- current OPP. For such states, there will be an entry for each opp. ``"unknown"``
- indicates the percentage of time for which a state could not be established from the
- trace. This is usually due to core state being unknown at the beginning of the trace,
- but may also be caused by dropped events in the middle of the trace.
-
- '''
-
- parameters = [
- Parameter('first_cluster_state', kind=int, default=2,
- description="""
- The first idle state which is common to a cluster.
- """),
- Parameter('first_system_state', kind=int, default=3,
- description="""
- The first idle state which is common to all cores.
- """),
- Parameter('write_iteration_reports', kind=bool, default=False,
- description="""
- By default, this instrument will generate reports for the entire run
- in the overall output directory. Enabling this option will, in addition,
- create reports in each iteration's output directory. The formats of these
- reports will be similar to the overall report, except they won't mention
- the workload name or iteration number (as that is implied by their location).
- """),
- Parameter('use_ratios', kind=bool, default=False,
- description="""
- By default proportional values will be reported as percentages, if this
- flag is enabled, they will be reported as ratios instead.
- """),
- Parameter('create_timeline', kind=bool, default=True,
- description="""
- Create a CSV with the timeline of core power states over the course of the run
- as well as the usual stats reports.
- """),
-
- ]
-
- def validate(self):
- if not instrument_is_installed('trace-cmd'):
- message = '''
- {} requires "trace-cmd" instrument to be installed and the collection of at
- least "power:cpu_frequency" and "power:cpu_idle" events to be enabled during worklad
- execution.
- '''
- raise ConfigError(message.format(self.name).strip())
-
- def initialize(self, context):
- # pylint: disable=attribute-defined-outside-init
- device = context.device
- for modname in ['cpuidle', 'cpufreq']:
- if not device.has(modname):
- message = 'Device does not appear to have {} capability; is the right module installed?'
- raise ConfigError(message.format(modname))
- if not device.core_names:
- message = '{} requires"core_names" and "core_clusters" to be specified for the device.'
- raise ConfigError(message.format(self.name))
- self.core_names = device.core_names
- self.core_clusters = device.core_clusters
- idle_states = {s.id: s.desc for s in device.get_cpuidle_states()}
- self.idle_state_names = [idle_states[i] for i in sorted(idle_states.keys())]
- self.num_idle_states = len(self.idle_state_names)
- self.iteration_reports = OrderedDict()
- # priority -19: just higher than the slow_start of instrumentation
- signal.connect(self.set_initial_state, signal.BEFORE_WORKLOAD_EXECUTION, priority=-19)
-
- def set_initial_state(self, context):
- # TODO: this does not play well with hotplug but leaving as-is, as this will be changed with
- # the devilib port anyway.
- # Write initial frequencies into the trace.
- # NOTE: this assumes per-cluster DVFS, that is valid for devices that
- # currently exist. This will need to be updated for per-CPU DFS.
- self.logger.debug('Writing initial frequencies into trace...')
- device = context.device
- cluster_freqs = {}
- for c in unique(device.core_clusters):
- cluster_freqs[c] = device.get_cluster_cur_frequency(c)
- for i, c in enumerate(device.core_clusters):
- entry = 'CPU {} FREQUENCY: {} kHZ'.format(i, cluster_freqs[c])
- device.set_sysfile_value('/sys/kernel/debug/tracing/trace_marker',
- entry, verify=False)
-
- # Nudge each cpu to force idle state transitions in the trace
- self.logger.debug('Nudging all cores awake...')
- for i in xrange(len(device.core_names)):
- command = device.busybox + ' taskset 0x{:x} {}'
- device.execute(command.format(1 << i, 'ls'))
-
- def process_iteration_result(self, result, context):
- trace = context.get_artifact('txttrace')
- if not trace:
- self.logger.debug('Text trace does not appear to have been generated; skipping this iteration.')
- return
- self.logger.debug('Generating power state reports from trace...')
- if self.create_timeline:
- timeline_csv_file = os.path.join(context.output_directory, 'power_states.csv')
- else:
- timeline_csv_file = None
- parallel_report, powerstate_report = report_power_stats( # pylint: disable=unbalanced-tuple-unpacking
- trace_file=trace.path,
- idle_state_names=self.idle_state_names,
- core_names=self.core_names,
- core_clusters=self.core_clusters,
- num_idle_states=self.num_idle_states,
- first_cluster_state=self.first_cluster_state,
- first_system_state=self.first_system_state,
- use_ratios=self.use_ratios,
- timeline_csv_file=timeline_csv_file,
- )
- if parallel_report is None:
- self.logger.warning('No power state reports generated; are power '
- 'events enabled in the trace?')
- return
- else:
- self.logger.debug('Reports generated.')
-
- iteration_id = (context.spec.id, context.spec.label, context.current_iteration)
- self.iteration_reports[iteration_id] = (parallel_report, powerstate_report)
- if self.write_iteration_reports:
- self.logger.debug('Writing iteration reports')
- parallel_report.write(os.path.join(context.output_directory, 'parallel.csv'))
- powerstate_report.write(os.path.join(context.output_directory, 'cpustates.csv'))
-
- def process_run_result(self, result, context): # pylint: disable=too-many-locals
- if not self.iteration_reports:
- self.logger.warning('No power state reports generated.')
- return
-
- parallel_rows = []
- powerstate_rows = []
- for iteration_id, reports in self.iteration_reports.iteritems():
- spec_id, workload, iteration = iteration_id
- parallel_report, powerstate_report = reports
- for record in parallel_report.values:
- parallel_rows.append([spec_id, workload, iteration] + record)
- for state in sorted(powerstate_report.state_stats):
- stats = powerstate_report.state_stats[state]
- powerstate_rows.append([spec_id, workload, iteration, state] +
- ['{:.3f}'.format(s if s is not None else 0)
- for s in stats])
-
- with open(os.path.join(context.output_directory, 'parallel.csv'), 'w') as wfh:
- writer = csv.writer(wfh)
- writer.writerow(['id', 'workload', 'iteration', 'cluster',
- 'number_of_cores', 'total_time',
- '%time', '%running_time'])
- writer.writerows(parallel_rows)
-
- with open(os.path.join(context.output_directory, 'cpustate.csv'), 'w') as wfh:
- writer = csv.writer(wfh)
- headers = ['id', 'workload', 'iteration', 'state']
- headers += ['{} CPU{}'.format(c, i)
- for i, c in enumerate(powerstate_report.core_names)]
- writer.writerow(headers)
- writer.writerows(powerstate_rows)
-
diff --git a/wlauto/result_processors/dvfs.py b/wlauto/result_processors/dvfs.py
deleted file mode 100644
index 89ee2e33..00000000
--- a/wlauto/result_processors/dvfs.py
+++ /dev/null
@@ -1,376 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import csv
-import re
-
-from wlauto import ResultProcessor, settings, instrumentation
-from wlauto.exceptions import ConfigError, ResultProcessorError
-
-
-class DVFS(ResultProcessor):
- name = 'dvfs'
- description = """
- Reports DVFS state residency data form ftrace power events.
-
- This generates a ``dvfs.csv`` in the top-level results directory that,
- for each workload iteration, reports the percentage of time each CPU core
- spent in each of the DVFS frequency states (P-states), as well as percentage
- of the time spent in idle, during the execution of the workload.
-
- .. note:: ``trace-cmd`` instrument *MUST* be enabled in the instrumentation,
- and at least ``'power*'`` events must be enabled.
-
-
- """
-
- def __init__(self, **kwargs):
- super(DVFS, self).__init__(**kwargs)
- self.device = None
- self.infile = None
- self.outfile = None
- self.current_cluster = None
- self.currentstates_of_clusters = []
- self.current_frequency_of_clusters = []
- self.timestamp = []
- self.state_time_map = {} # hold state at timestamp
- self.cpuid_time_map = {} # hold cpuid at timestamp
- self.cpu_freq_time_spent = {}
- self.cpuids_of_clusters = []
- self.power_state = [0, 1, 2, 3]
- self.UNKNOWNSTATE = 4294967295
- self.multiply_factor = None
- self.corename_of_clusters = []
- self.numberofcores_in_cluster = []
- self.minimum_frequency_cluster = []
- self.idlestate_description = {}
-
- def validate(self):
- if not instrumentation.instrument_is_installed('trace-cmd'):
- raise ConfigError('"dvfs" works only if "trace_cmd" in enabled in instrumentation')
-
- def initialize(self, context): # pylint: disable=R0912
- self.device = context.device
- if not self.device.has('cpuidle'):
- raise ConfigError('Device does not appear to have cpuidle capability; is the right module installed?')
- if not self.device.core_names:
- message = 'Device does not specify its core types (core_names/core_clusters not set in device_config).'
- raise ResultProcessorError(message)
- number_of_clusters = max(self.device.core_clusters) + 1
- # In IKS devices, actual number of cores is double
- # from what we get from device.number_of_cores
- if self.device.scheduler == 'iks':
- self.multiply_factor = 2
- elif self.device.scheduler == 'unknown':
- # Device doesn't specify its scheduler type. It could be IKS, in
- # which case reporeted values would be wrong, so error out.
- message = ('The Device doesn not specify it\'s scheduler type. If you are '
- 'using a generic device interface, please make sure to set the '
- '"scheduler" parameter in the device config.')
- raise ResultProcessorError(message)
- else:
- self.multiply_factor = 1
- # separate out the cores in each cluster
- # It is list of list of cores in cluster
- listof_cores_clusters = []
- for cluster in range(number_of_clusters):
- listof_cores_clusters.append([core for core in self.device.core_clusters if core == cluster])
- # Extract minimum frequency of each cluster and
- # the idle power state with its descriptive name
- #
- total_cores = 0
- current_cores = 0
- for cluster, cores_list in enumerate(listof_cores_clusters):
- self.corename_of_clusters.append(self.device.core_names[total_cores])
- if self.device.scheduler != 'iks':
- self.idlestate_description.update({s.id: s.desc for s in self.device.get_cpuidle_states(total_cores)})
- else:
- self.idlestate_description.update({s.id: s.desc for s in self.device.get_cpuidle_states()})
- total_cores += len(cores_list)
- self.numberofcores_in_cluster.append(len(cores_list))
- for i in range(current_cores, total_cores):
- if i in self.device.online_cpus:
- self.minimum_frequency_cluster.append(int(self.device.get_cpu_min_frequency("cpu{}".format(i))))
- break
- current_cores = total_cores
- length_frequency_cluster = len(self.minimum_frequency_cluster)
- if length_frequency_cluster != number_of_clusters:
- diff = number_of_clusters - length_frequency_cluster
- offline_value = -1
- for i in range(diff):
- if self.device.scheduler != 'iks':
- self.minimum_frequency_cluster.append(offline_value)
- else:
- self.minimum_frequency_cluster.append(self.device.iks_switch_frequency)
-
- def process_iteration_result(self, result, context):
- """
- Parse the trace.txt for each iteration, calculate DVFS residency state/frequencies
- and dump the result in csv and flush the data for next iteration.
- """
- self.infile = os.path.join(context.output_directory, 'trace.txt')
- if os.path.isfile(self.infile):
- self.logger.debug('Running result_processor "dvfs"')
- self.outfile = os.path.join(context.output_directory, 'dvfs.csv')
- self.flush_parse_initialize()
- self.calculate()
- self.percentage()
- self.generate_csv(context)
- self.logger.debug('Completed result_processor "dvfs"')
- else:
- self.logger.debug('trace.txt not found.')
-
- def flush_parse_initialize(self):
- """
- Store state, cpu_id for each timestamp from trace.txt and flush all the values for
- next iterations.
- """
- self.current_cluster = 0
- self.current_frequency_of_clusters = []
- self.timestamp = []
- self.currentstates_of_clusters = []
- self.state_time_map = {}
- self.cpuid_time_map = {}
- self.cpu_freq_time_spent = {}
- self.cpuids_of_clusters = []
- self.parse() # Parse trace.txt generated from trace-cmd instrumentation
- # Initialize the states of each core of clusters and frequency of
- # each clusters with its minimum freq
- # cpu_id is assigned for each of clusters.
- # For IKS devices cpuid remains same in other clusters
- # and for other it will increment by 1
- count = 0
- for cluster, cores_number in enumerate(self.numberofcores_in_cluster):
- self.currentstates_of_clusters.append([-1 for dummy in range(cores_number)])
- self.current_frequency_of_clusters.append(self.minimum_frequency_cluster[cluster])
- if self.device.scheduler == 'iks':
- self.cpuids_of_clusters.append([j for j in range(cores_number)])
- else:
- self.cpuids_of_clusters.append(range(count, count + cores_number))
- count += cores_number
-
- # Initialize the time spent in each state/frequency for each core.
- for i in range(self.device.number_of_cores * self.multiply_factor):
- self.cpu_freq_time_spent["cpu{}".format(i)] = {}
- for j in self.unique_freq():
- self.cpu_freq_time_spent["cpu{}".format(i)][j] = 0
- # To determine offline -1 state is added
- offline_value = -1
- self.cpu_freq_time_spent["cpu{}".format(i)][offline_value] = 0
- if 0 not in self.unique_freq():
- self.cpu_freq_time_spent["cpu{}".format(i)][0] = 0
-
- def update_cluster_freq(self, state, cpu_id):
- """ Update the cluster frequency and current cluster"""
- # For IKS devices cluster changes only possible when
- # freq changes, for other it is determine by cpu_id.
- if self.device.scheduler != 'iks':
- self.current_cluster = self.get_cluster(cpu_id, state)
- if self.get_state_name(state) == "freqstate":
- self.current_cluster = self.get_cluster(cpu_id, state)
- self.current_frequency_of_clusters[self.current_cluster] = state
-
- def get_cluster(self, cpu_id, state):
- # For IKS if current state is greater than switch
- # freq then it is in cluster2 else cluster1
- # For other, Look the current cpu_id and check this id
- # belong to which cluster.
- if self.device.scheduler == 'iks':
- return 1 if state >= self.device.iks_switch_frequency else 0
- else:
- for cluster, cpuids_list in enumerate(self.cpuids_of_clusters):
- if cpu_id in cpuids_list:
- return cluster
-
- def get_cluster_freq(self):
- return self.current_frequency_of_clusters[self.current_cluster]
-
- def update_state(self, state, cpu_id): # pylint: disable=R0912
- """
- Update state of each cores in every cluster.
- This is done for each timestamp.
- """
- POWERDOWN = 2
- offline_value = -1
- # if state is in unknowstate, then change state of current cpu_id
- # with cluster freq of current cluster.
- # if state is in powerstate then change state with that power state.
- if self.get_state_name(state) in ["unknownstate", "powerstate"]:
- for i in range(len(self.cpuids_of_clusters[self.current_cluster])):
- if cpu_id == self.cpuids_of_clusters[self.current_cluster][i]:
- if self.get_state_name(state) == "unknownstate":
- self.currentstates_of_clusters[self.current_cluster][i] = self.current_frequency_of_clusters[self.current_cluster]
- elif self.get_state_name(state) == "powerstate":
- self.currentstates_of_clusters[self.current_cluster][i] = state
- # If state is in freqstate then update the state with current state.
- # For IKS, if all cores is in power down and current state is freqstate
- # then update the all the cores in current cluster to current state
- # and other state cluster changed to Power down.
- if self.get_state_name(state) == "freqstate":
- for i, j in enumerate(self.currentstates_of_clusters[self.current_cluster]):
- if j != offline_value:
- self.currentstates_of_clusters[self.current_cluster][i] = state
- if cpu_id == self.cpuids_of_clusters[self.current_cluster][i]:
- self.currentstates_of_clusters[self.current_cluster][i] = state
- if self.device.scheduler == 'iks':
- check = False # All core in cluster is power down
- for i in range(len(self.currentstates_of_clusters[self.current_cluster])):
- if self.currentstates_of_clusters[self.current_cluster][i] != POWERDOWN:
- check = True
- break
- if not check:
- for i in range(len(self.currentstates_of_clusters[self.current_cluster])):
- self.currentstates_of_clusters[self.current_cluster][i] = self.current_frequency_of_clusters[self.current_cluster]
- for cluster, state_list in enumerate(self.currentstates_of_clusters):
- if cluster != self.current_cluster:
- for j in range(len(state_list)):
- self.currentstates_of_clusters[i][j] = POWERDOWN
-
- def unique_freq(self):
- """ Determine the unique Frequency and state"""
- unique_freq = []
- for i in self.timestamp:
- if self.state_time_map[i] not in unique_freq and self.state_time_map[i] != self.UNKNOWNSTATE:
- unique_freq.append(self.state_time_map[i])
- for i in self.minimum_frequency_cluster:
- if i not in unique_freq:
- unique_freq.append(i)
- return unique_freq
-
- def parse(self):
- """
- Parse the trace.txt ::
-
- store timestamp, state, cpu_id
- ---------------------------------------------------------------------------------
- |timestamp| |state| |cpu_id|
- <idle>-0 [001] 294.554380: cpu_idle: state=4294967295 cpu_id=1
- <idle>-0 [001] 294.554454: power_start: type=1 state=0 cpu_id=1
- <idle>-0 [001] 294.554458: cpu_idle: state=0 cpu_id=1
- <idle>-0 [001] 294.554464: power_end: cpu_id=1
- <idle>-0 [001] 294.554471: cpu_idle: state=4294967295 cpu_id=1
- <idle>-0 [001] 294.554590: power_start: type=1 state=0 cpu_id=1
- <idle>-0 [001] 294.554593: cpu_idle: state=0 cpu_id=1
- <idle>-0 [001] 294.554636: power_end: cpu_id=1
- <idle>-0 [001] 294.554639: cpu_idle: state=4294967295 cpu_id=1
- <idle>-0 [001] 294.554669: power_start: type=1 state=0 cpu_id=1
-
-
- """
- pattern = re.compile(r'\s+(?P<time>\S+)\S+\s*(?P<desc>(cpu_idle:|cpu_frequency:))\s*state=(?P<state>\d+)\s*cpu_id=(?P<cpu_id>\d+)')
- start_trace = False
- stop_trace = False
- with open(self.infile, 'r') as f:
- for line in f:
- #Start collecting data from label "TRACE_MARKER_START" and
- #stop with label "TRACE_MARKER_STOP"
- if line.find("TRACE_MARKER_START") != -1:
- start_trace = True
- if line.find("TRACE_MARKER_STOP") != -1:
- stop_trace = True
- if start_trace and not stop_trace:
- match = pattern.search(line)
- if match:
- self.timestamp.append(float(match.group('time')))
- self.state_time_map[float(match.group('time'))] = int(match.group('state'))
- self.cpuid_time_map[float(match.group('time'))] = int(match.group('cpu_id'))
-
- def get_state_name(self, state):
- if state in self.power_state:
- return "powerstate"
- elif state == self.UNKNOWNSTATE:
- return "unknownstate"
- else:
- return "freqstate"
-
- def populate(self, time1, time2):
- diff = time2 - time1
- for cluster, states_list in enumerate(self.currentstates_of_clusters):
- for k, j in enumerate(states_list):
- if self.device.scheduler == 'iks' and cluster == 1:
- self.cpu_freq_time_spent["cpu{}".format(self.cpuids_of_clusters[cluster][k] + len(self.currentstates_of_clusters[0]))][j] += diff
- else:
- self.cpu_freq_time_spent["cpu{}".format(self.cpuids_of_clusters[cluster][k])][j] += diff
-
- def calculate(self):
- for i in range(len(self.timestamp) - 1):
- self.update_cluster_freq(self.state_time_map[self.timestamp[i]], self.cpuid_time_map[self.timestamp[i]])
- self.update_state(self.state_time_map[self.timestamp[i]], self.cpuid_time_map[self.timestamp[i]])
- self.populate(self.timestamp[i], self.timestamp[i + 1])
-
- def percentage(self):
- """Normalize the result with total execution time."""
- temp = self.cpu_freq_time_spent.copy()
- for i in self.cpu_freq_time_spent:
- total = 0
- for j in self.cpu_freq_time_spent[i]:
- total += self.cpu_freq_time_spent[i][j]
- for j in self.cpu_freq_time_spent[i]:
- if total != 0:
- temp[i][j] = self.cpu_freq_time_spent[i][j] * 100 / total
- else:
- temp[i][j] = 0
- return temp
-
- def generate_csv(self, context): # pylint: disable=R0912,R0914
- """ generate the '''dvfs.csv''' with the state, frequency and cores """
- temp = self.percentage()
- total_state = self.unique_freq()
- offline_value = -1
- ghz_conversion = 1000000
- mhz_conversion = 1000
- with open(self.outfile, 'a+') as f:
- writer = csv.writer(f, delimiter=',')
- reader = csv.reader(f)
- # Create the header in the format below
- # workload name, iteration, state, A7 CPU0,A7 CPU1,A7 CPU2,A7 CPU3,A15 CPU4,A15 CPU5
- if sum(1 for row in reader) == 0:
- header_row = ['workload', 'iteration', 'state']
- count = 0
- for cluster, states_list in enumerate(self.currentstates_of_clusters):
- for dummy_index in range(len(states_list)):
- header_row.append("{} CPU{}".format(self.corename_of_clusters[cluster], count))
- count += 1
- writer.writerow(header_row)
- if offline_value in total_state:
- total_state.remove(offline_value) # remove the offline state
- for i in sorted(total_state):
- temprow = []
- temprow.extend([context.result.spec.label, context.result.iteration])
- if "state{}".format(i) in self.idlestate_description:
- temprow.append(self.idlestate_description["state{}".format(i)])
- else:
- state_value = float(i)
- if state_value / ghz_conversion >= 1:
- temprow.append("{} Ghz".format(state_value / ghz_conversion))
- else:
- temprow.append("{} Mhz".format(state_value / mhz_conversion))
- for j in range(self.device.number_of_cores * self.multiply_factor):
- temprow.append("{0:.3f}".format(temp["cpu{}".format(j)][i]))
- writer.writerow(temprow)
- check_off = True # Checking whether core is OFFLINE
- for i in range(self.device.number_of_cores * self.multiply_factor):
- temp_val = "{0:.3f}".format(temp["cpu{}".format(i)][offline_value])
- if float(temp_val) > 1:
- check_off = False
- break
- if check_off is False:
- temprow = []
- temprow.extend([context.result.spec.label, context.result.iteration])
- temprow.append("OFFLINE")
- for i in range(self.device.number_of_cores * self.multiply_factor):
- temprow.append("{0:.3f}".format(temp["cpu{}".format(i)][offline_value]))
- writer.writerow(temprow)
diff --git a/wlauto/result_processors/ipynb_exporter/__init__.py b/wlauto/result_processors/ipynb_exporter/__init__.py
deleted file mode 100644
index 6689b3e3..00000000
--- a/wlauto/result_processors/ipynb_exporter/__init__.py
+++ /dev/null
@@ -1,182 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=attribute-defined-outside-init
-
-from datetime import datetime
-import os
-import shutil
-import webbrowser
-
-try:
- import jinja2
-except ImportError:
- jinja2 = None
-
-from wlauto import File, Parameter, ResultProcessor
-from wlauto.exceptions import ConfigError, ResultProcessorError
-import wlauto.utils.ipython as ipython
-from wlauto.utils.misc import open_file
-
-
-DEFAULT_NOTEBOOK_TEMPLATE = 'template.ipynb'
-
-
-class IPythonNotebookExporter(ResultProcessor):
-
- name = 'ipynb_exporter'
- description = """
- Generates an IPython notebook from a template with the results and runs it.
- Optionally it can show the resulting notebook in a web browser.
- It can also generate a PDF from the notebook.
-
- The template syntax is that of `jinja2 <http://jinja.pocoo.org/>`_
- and the template should generate a valid ipython notebook. The
- templates receives ``result`` and ``context`` which correspond to
- the RunResult and ExecutionContext respectively. You can use those
- in your ipython notebook template to extract any information you
- want to parse or show.
-
- This results_processor depends on ``ipython`` and ``python-jinja2`` being
- installed on the system.
-
- For example, a simple template that plots a bar graph of the results is::
-
- """
- # Note: the example template is appended after the class definition
-
- parameters = [
- Parameter('notebook_template', default=DEFAULT_NOTEBOOK_TEMPLATE,
- description='''Filename of the ipython notebook template. If
- no `notebook_template` is specified, the example template
- above is used.'''),
- Parameter('notebook_name_prefix', default='result_',
- description=''' Prefix of the name of the notebook. The date,
- time and ``.ipynb`` are appended to form the notebook filename.
- E.g. if notebook_name_prefix is ``result_`` then a run on 13th
- April 2015 at 9:54 would generate a notebook called
- ``result_150413-095400.ipynb``. When generating a PDF,
- the resulting file will have the same name, but
- ending in ``.pdf``.'''),
- Parameter('show_notebook', kind=bool,
- description='Open a web browser with the resulting notebook.'),
- Parameter('notebook_directory',
- description='''Path to the notebooks directory served by the
- ipython notebook server. You must set it if
- ``show_notebook`` is selected. The ipython notebook
- will be copied here if specified.'''),
- Parameter('notebook_url', default='http://localhost:8888/notebooks',
- description='''URL of the notebook on the IPython server. If
- not specified, it will be assumed to be in the root notebooks
- location on localhost, served on port 8888. Only needed if
- ``show_notebook`` is selected.
-
- .. note:: the URL should not contain the final part (the notebook name) which will be populated automatically.
- '''),
- Parameter('convert_to_html', kind=bool,
- description='Convert the resulting notebook to HTML.'),
- Parameter('show_html', kind=bool,
- description='''Open the exported html notebook at the end of
- the run. This can only be selected if convert_to_html has
- also been selected.'''),
- Parameter('convert_to_pdf', kind=bool,
- description='Convert the resulting notebook to PDF.'),
- Parameter('show_pdf', kind=bool,
- description='''Open the pdf at the end of the run. This can
- only be selected if convert_to_pdf has also been selected.'''),
- ]
-
- def initialize(self, context):
- file_resource = File(self, self.notebook_template)
- self.notebook_template_file = context.resolver.get(file_resource)
- nbbasename_template = self.notebook_name_prefix + '%y%m%d-%H%M%S.ipynb'
- self.nbbasename = datetime.now().strftime(nbbasename_template)
-
- def validate(self):
- if ipython.import_error_str:
- raise ResultProcessorError(ipython.import_error_str)
-
- if not jinja2:
- msg = '{} requires python-jinja2 package to be installed'.format(self.name)
- raise ResultProcessorError(msg)
-
- if self.show_notebook and not self.notebook_directory:
- raise ConfigError('Requested "show_notebook" but no notebook_directory was specified')
-
- if self.notebook_directory and not os.path.isdir(self.notebook_directory):
- raise ConfigError('notebook_directory {} does not exist'.format(self.notebook_directory))
-
- if self.show_html and not self.convert_to_html: # pylint: disable=E0203
- self.convert_to_html = True
- self.logger.debug('Assuming "convert_to_html" as "show_html" is set')
-
- if self.show_pdf and not self.convert_to_pdf: # pylint: disable=E0203
- self.convert_to_pdf = True
- self.logger.debug('Assuming "convert_to_pdf" as "show_pdf" is set')
-
- def export_run_result(self, result, context):
- self.generate_notebook(result, context)
- if self.show_notebook:
- self.open_notebook()
-
- if self.convert_to_pdf:
- ipython.export_notebook(self.nbbasename,
- context.run_output_directory, 'pdf')
- if self.show_pdf:
- self.open_file('pdf')
-
- if self.convert_to_html:
- ipython.export_notebook(self.nbbasename,
- context.run_output_directory, 'html')
- if self.show_html:
- self.open_file('html')
-
- def generate_notebook(self, result, context):
- """Generate a notebook from the template and run it"""
- with open(self.notebook_template_file) as fin:
- template = jinja2.Template(fin.read())
-
- notebook_in = template.render(result=result, context=context)
- notebook = ipython.read_notebook(notebook_in)
-
- ipython.run_notebook(notebook)
-
- self.notebook_file = os.path.join(context.run_output_directory,
- self.nbbasename)
- with open(self.notebook_file, 'w') as wfh:
- ipython.write_notebook(notebook, wfh)
-
- if self.notebook_directory:
- shutil.copy(self.notebook_file,
- os.path.join(self.notebook_directory))
-
- def open_notebook(self):
- """Open the notebook in a browser"""
- webbrowser.open(self.notebook_url.rstrip('/') + '/' + self.nbbasename)
-
- def open_file(self, output_format):
- """Open the exported notebook"""
- fname = os.path.splitext(self.notebook_file)[0] + "." + output_format
- open_file(fname)
-
-
-# Add the default template to the documentation
-with open(os.path.join(os.path.dirname(__file__), DEFAULT_NOTEBOOK_TEMPLATE)) as in_file:
- # Without an empty indented line, wlauto.misc.doc.strip_inlined_text() gets
- # confused
- IPythonNotebookExporter.description += " \n"
-
- for line in in_file:
- IPythonNotebookExporter.description += " " + line
diff --git a/wlauto/result_processors/ipynb_exporter/template.ipynb b/wlauto/result_processors/ipynb_exporter/template.ipynb
deleted file mode 100644
index 7eea4337..00000000
--- a/wlauto/result_processors/ipynb_exporter/template.ipynb
+++ /dev/null
@@ -1,60 +0,0 @@
-{
- "metadata": {
- "name": ""
- },
- "nbformat": 3,
- "nbformat_minor": 0,
- "worksheets": [
- {
- "cells": [
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "%pylab inline"
- ],
- "language": "python",
- "metadata": {},
- "outputs": [],
- "prompt_number": 1
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "results = {",
- {% for ir in result.iteration_results -%}
- {% for metric in ir.metrics -%}
- {% if metric.name in ir.workload.summary_metrics or not ir.workload.summary_metrics -%}
- "\"{{ ir.spec.label }}_{{ ir.id }}_{{ ir.iteration }}_{{ metric.name }}\": {{ metric.value }}, ",
- {%- endif %}
- {%- endfor %}
- {%- endfor %}
- "}\n",
- "width = 0.7\n",
- "ind = np.arange(len(results))"
- ],
- "language": "python",
- "metadata": {},
- "outputs": [],
- "prompt_number": 2
- },
- {
- "cell_type": "code",
- "collapsed": false,
- "input": [
- "fig, ax = plt.subplots()\n",
- "ax.bar(ind, results.values(), width)\n",
- "ax.set_xticks(ind + width/2)\n",
- "_ = ax.set_xticklabels(results.keys())"
- ],
- "language": "python",
- "metadata": {},
- "outputs": [],
- "prompt_number": 3
- }
- ],
- "metadata": {}
- }
- ]
-}
diff --git a/wlauto/result_processors/json_rp.py b/wlauto/result_processors/json_rp.py
deleted file mode 100644
index 22de698a..00000000
--- a/wlauto/result_processors/json_rp.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-#pylint: disable=E1101,W0201
-import os
-from base64 import b64encode
-
-from wlauto import ResultProcessor, Parameter
-from wlauto.utils.serializer import json
-from wlauto.utils.misc import istextfile
-from wlauto.utils.types import list_of_strings
-from wlauto.exceptions import ResultProcessorError
-
-
-class JsonReportProcessor(ResultProcessor):
-
- name = 'json'
- description = """
- Produces a JSON file with WA config, results ect.
-
-
- This includes embedded artifacts either as text or base64
-
- """
-
- parameters = [
- Parameter("ignore_artifact_types", kind=list_of_strings,
- default=['export', 'raw'],
- description="""A list of which artifact types to be ignored,
- and thus not embedded in the JSON""")
- ]
- final = {}
-
- def initialize(self, context):
- self.final = context.run_info.to_dict()
- del self.final['workload_specs']
-
- wa_adapter = self.final['device']
- self.final['device'] = {}
- self.final['device']['props'] = self.final['device_properties']
- self.final['device']['wa_adapter'] = wa_adapter
- del self.final['device_properties']
-
- self.final['output_directory'] = os.path.abspath(context.output_directory)
- self.final['artifacts'] = []
- self.final['workloads'] = context.config.to_dict()['workload_specs']
- for workload in self.final['workloads']:
- workload['name'] = workload['workload_name']
- del workload['workload_name']
- workload['results'] = []
-
- def export_iteration_result(self, result, context):
- r = {}
- r['iteration'] = context.current_iteration
- r['status'] = result.status
- r['events'] = [e.to_dict() for e in result.events]
- r['metrics'] = []
- for m in result.metrics:
- md = m.to_dict()
- md['is_summary'] = m.name in context.workload.summary_metrics
- r['metrics'].append(md)
- iteration_artefacts = [self.embed_artifact(context, a) for a in context.iteration_artifacts]
- r['artifacts'] = [e for e in iteration_artefacts if e is not None]
- for workload in self.final['workloads']:
- if workload['id'] == context.spec.id:
- workload.update(r)
- break
- else:
- raise ResultProcessorError("No workload spec with matching id found")
-
- def export_run_result(self, result, context):
- run_artifacts = [self.embed_artifact(context, a) for a in context.run_artifacts]
- self.logger.debug('Generating results bundle...')
- run_stats = {
- 'status': result.status,
- 'events': [e.to_dict() for e in result.events],
- 'end_time': context.run_info.end_time,
- 'duration': context.run_info.duration.total_seconds(),
- 'artifacts': [e for e in run_artifacts if e is not None],
- }
- self.final.update(run_stats)
- json_path = os.path.join(os.path.abspath(context.output_directory), "run.json")
- with open(json_path, 'w') as json_file:
- json.dump(self.final, json_file)
-
- def embed_artifact(self, context, artifact):
- artifact_path = os.path.join(context.output_directory, artifact.path)
-
- if not os.path.exists(artifact_path):
- self.logger.debug('Artifact {} has not been generated'.format(artifact_path))
- return
- elif artifact.kind in self.ignore_artifact_types:
- self.logger.debug('Ignoring {} artifact {}'.format(artifact.kind, artifact_path))
- return
- else:
- self.logger.debug('Uploading artifact {}'.format(artifact_path))
- entry = artifact.to_dict()
- path = os.path.join(os.path.abspath(context.output_directory), entry['path'])
- if istextfile(open(path)):
- entry['encoding'] = "text"
- entry['content'] = open(path).read()
- else:
- entry['encoding'] = "base64"
- entry['content'] = b64encode(open(path).read())
-
- del entry['path']
- del entry['level']
- del entry['mandatory']
- return entry
diff --git a/wlauto/result_processors/mongodb.py b/wlauto/result_processors/mongodb.py
deleted file mode 100644
index 054ccab6..00000000
--- a/wlauto/result_processors/mongodb.py
+++ /dev/null
@@ -1,235 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-#pylint: disable=E1101,W0201
-import os
-import re
-import string
-import tarfile
-
-try:
- import pymongo
- from bson.objectid import ObjectId
- from gridfs import GridFS
-except ImportError:
- pymongo = None
-
-from wlauto import ResultProcessor, Parameter, Artifact
-from wlauto.exceptions import ResultProcessorError
-from wlauto.utils.misc import as_relative
-
-
-__bad_chars = '$.'
-KEY_TRANS_TABLE = string.maketrans(__bad_chars, '_' * len(__bad_chars))
-BUNDLE_NAME = 'files.tar.gz'
-
-
-class MongodbUploader(ResultProcessor):
-
- name = 'mongodb'
- description = """
- Uploads run results to a MongoDB instance.
-
- MongoDB is a popular document-based data store (NoSQL database).
-
- """
-
- parameters = [
- Parameter('uri', kind=str, default=None,
- description="""Connection URI. If specified, this will be used for connecting
- to the backend, and host/port parameters will be ignored."""),
- Parameter('host', kind=str, default='localhost', mandatory=True,
- description='IP address/name of the machinge hosting the MongoDB server.'),
- Parameter('port', kind=int, default=27017, mandatory=True,
- description='Port on which the MongoDB server is listening.'),
- Parameter('db', kind=str, default='wa', mandatory=True,
- description='Database on the server used to store WA results.'),
- Parameter('extra_params', kind=dict, default={},
- description='''Additional connection parameters may be specfied using this (see
- pymongo documentation.'''),
- Parameter('authentication', kind=dict, default={},
- description='''If specified, this will be passed to db.authenticate() upon connection;
- please pymongo documentaion authentication examples for detail.'''),
- ]
-
- def initialize(self, context):
- if pymongo is None:
- raise ResultProcessorError('mongodb result processor requres pymongo package to be installed.')
- try:
- self.client = pymongo.MongoClient(self.host, self.port, **self.extra_params)
- except pymongo.errors.PyMongoError, e:
- raise ResultProcessorError('Error connecting to mongod: {}'.fromat(e))
- self.dbc = self.client[self.db]
- self.fs = GridFS(self.dbc)
- if self.authentication:
- if not self.dbc.authenticate(**self.authentication):
- raise ResultProcessorError('Authentication to database {} failed.'.format(self.db))
-
- self.run_result_dbid = ObjectId()
- run_doc = context.run_info.to_dict()
-
- wa_adapter = run_doc['device']
- devprops = dict((k.translate(KEY_TRANS_TABLE), v)
- for k, v in run_doc['device_properties'].iteritems())
- run_doc['device'] = devprops
- run_doc['device']['wa_adapter'] = wa_adapter
- del run_doc['device_properties']
-
- run_doc['output_directory'] = os.path.abspath(context.output_directory)
- run_doc['artifacts'] = []
- run_doc['workloads'] = context.config.to_dict()['workload_specs']
- for workload in run_doc['workloads']:
- workload['name'] = workload['workload_name']
- del workload['workload_name']
- workload['results'] = []
- self.run_dbid = self.dbc.runs.insert(run_doc)
-
- prefix = context.run_info.project if context.run_info.project else '[NOPROJECT]'
- run_part = context.run_info.run_name or context.run_info.uuid.hex
- self.gridfs_dir = os.path.join(prefix, run_part)
- i = 0
- while self.gridfs_directory_exists(self.gridfs_dir):
- if self.gridfs_dir.endswith('-{}'.format(i)):
- self.gridfs_dir = self.gridfs_dir[:-2]
- i += 1
- self.gridfs_dir += '-{}'.format(i)
-
- # Keep track of all generated artefacts, so that we know what to
- # include in the tarball. The tarball will contains raw artificats
- # (other kinds would have been uploaded directly or do not contain
- # new data) and all files in the results dir that have not been marked
- # as artificats.
- self.artifacts = []
-
- def export_iteration_result(self, result, context):
- r = {}
- r['iteration'] = context.current_iteration
- r['status'] = result.status
- r['events'] = [e.to_dict() for e in result.events]
- r['metrics'] = []
- for m in result.metrics:
- md = m.to_dict()
- md['is_summary'] = m.name in context.workload.summary_metrics
- r['metrics'].append(md)
- iteration_artefacts = [self.upload_artifact(context, a) for a in context.iteration_artifacts]
- r['artifacts'] = [e for e in iteration_artefacts if e is not None]
- self.dbc.runs.update({'_id': self.run_dbid, 'workloads.id': context.spec.id},
- {'$push': {'workloads.$.results': r}})
-
- def export_run_result(self, result, context):
- run_artifacts = [self.upload_artifact(context, a) for a in context.run_artifacts]
- self.logger.debug('Generating results bundle...')
- bundle = self.generate_bundle(context)
- if bundle:
- run_artifacts.append(self.upload_artifact(context, bundle))
- else:
- self.logger.debug('No untracked files found.')
- run_stats = {
- 'status': result.status,
- 'events': [e.to_dict() for e in result.events],
- 'end_time': context.run_info.end_time,
- 'duration': context.run_info.duration.total_seconds(),
- 'artifacts': [e for e in run_artifacts if e is not None],
- }
- self.dbc.runs.update({'_id': self.run_dbid}, {'$set': run_stats})
-
- def finalize(self, context):
- self.client.close()
-
- def validate(self):
- if self.uri:
- has_warned = False
- if self.host != self.parameters['host'].default:
- self.logger.warning('both uri and host specified; host will be ignored')
- has_warned = True
- if self.port != self.parameters['port'].default:
- self.logger.warning('both uri and port specified; port will be ignored')
- has_warned = True
- if has_warned:
- self.logger.warning('To supress this warning, please remove either uri or '
- 'host/port from your config.')
-
- def upload_artifact(self, context, artifact):
- artifact_path = os.path.join(context.output_directory, artifact.path)
- self.artifacts.append((artifact_path, artifact))
- if not os.path.exists(artifact_path):
- self.logger.debug('Artifact {} has not been generated'.format(artifact_path))
- return
- elif artifact.kind in ['raw', 'export']:
- self.logger.debug('Ignoring {} artifact {}'.format(artifact.kind, artifact_path))
- return
- else:
- self.logger.debug('Uploading artifact {}'.format(artifact_path))
- entry = artifact.to_dict()
- path = entry['path']
- del entry['path']
- del entry['name']
- del entry['level']
- del entry['mandatory']
-
- if context.workload is None:
- entry['filename'] = os.path.join(self.gridfs_dir, as_relative(path))
- else:
- entry['filename'] = os.path.join(self.gridfs_dir,
- '{}-{}-{}'.format(context.spec.id,
- context.spec.label,
- context.current_iteration),
- as_relative(path))
- with open(artifact_path, 'rb') as fh:
- fsid = self.fs.put(fh, **entry)
- entry['gridfs_id'] = fsid
-
- return entry
-
- def gridfs_directory_exists(self, path):
- regex = re.compile('^{}'.format(path))
- return self.fs.exists({'filename': regex})
-
- def generate_bundle(self, context): # pylint: disable=R0914
- """
- The bundle will contain files generated during the run that have not
- already been processed. This includes all files for which there isn't an
- explicit artifact as well as "raw" artifacts that aren't uploaded individually.
- Basically, this ensures that everything that is not explicilty marked as an
- "export" (which means it's guarnteed not to contain information not accessible
- from other artifacts/scores) is avialable in the DB. The bundle is compressed,
- so it shouldn't take up too much space, however it also means that it's not
- easy to query for or get individual file (a trade off between space and convinience).
-
- """
- to_upload = []
- artpaths = []
- outdir = context.output_directory
- for artpath, artifact in self.artifacts:
- artpaths.append(os.path.relpath(artpath, outdir))
- if artifact.kind == 'raw':
- to_upload.append((artpath, os.path.relpath(artpath, outdir)))
- for root, _, files in os.walk(outdir):
- for f in files:
- path = os.path.relpath(os.path.join(root, f), outdir)
- if path not in artpaths:
- to_upload.append((os.path.join(outdir, path), path))
-
- if not to_upload:
- # Nothing unexpected/unprocessed has been generated during the run.
- return None
- else:
- archive_path = os.path.join(outdir, BUNDLE_NAME)
- with tarfile.open(archive_path, 'w:gz') as tf:
- for fpath, arcpath in to_upload:
- tf.add(fpath, arcpath)
- return Artifact('mongo_bundle', BUNDLE_NAME, 'data',
- description='bundle to be uploaded to mongodb.')
diff --git a/wlauto/result_processors/notify.py b/wlauto/result_processors/notify.py
deleted file mode 100644
index a77d7ccb..00000000
--- a/wlauto/result_processors/notify.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import collections
-import sys
-
-
-try:
- import notify2
-except ImportError:
- notify2 = None
-
-
-from wlauto import ResultProcessor
-from wlauto.core.result import IterationResult
-from wlauto.exceptions import ResultProcessorError
-
-
-class NotifyProcessor(ResultProcessor):
-
- name = 'notify'
- description = '''Display a desktop notification when the run finishes
-
- Notifications only work in linux systems. It uses the generic
- freedesktop notification specification. For this results processor
- to work, you need to have python-notify installed in your system.
-
- '''
-
- def initialize(self, context):
- if sys.platform != 'linux2':
- raise ResultProcessorError('Notifications are only supported in linux')
-
- if not notify2:
- raise ResultProcessorError('notify2 not installed. Please install the notify2 package')
-
- notify2.init("Workload Automation")
-
- def process_run_result(self, result, context):
- num_iterations = sum(context.job_iteration_counts.values())
-
- counter = collections.Counter()
- for result in result.iteration_results:
- counter[result.status] += 1
-
- score_board = []
- for status in IterationResult.values:
- if status in counter:
- score_board.append('{} {}'.format(counter[status], status))
-
- summary = 'Workload Automation run finised'
- body = 'Ran a total of {} iterations: '.format(num_iterations)
- body += ', '.join(score_board)
- notification = notify2.Notification(summary, body)
-
- if not notification.show():
- self.logger.warning('Notification failed to show')
diff --git a/wlauto/result_processors/sqlite.py b/wlauto/result_processors/sqlite.py
deleted file mode 100644
index 71ead7dc..00000000
--- a/wlauto/result_processors/sqlite.py
+++ /dev/null
@@ -1,184 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=attribute-defined-outside-init
-
-import os
-import sqlite3
-import json
-import uuid
-from datetime import datetime, timedelta
-from contextlib import contextmanager
-
-from wlauto import ResultProcessor, settings, Parameter
-from wlauto.exceptions import ResultProcessorError
-from wlauto.utils.types import boolean
-
-
-# IMPORTANT: when updating this schema, make sure to bump the version!
-SCHEMA_VERSION = '0.0.2'
-SCHEMA = [
- '''CREATE TABLE runs (
- uuid text,
- start_time datetime,
- end_time datetime,
- duration integer
- )''',
- '''CREATE TABLE workload_specs (
- id text,
- run_oid text,
- number_of_iterations integer,
- label text,
- workload_name text,
- boot_parameters text,
- runtime_parameters text,
- workload_parameters text
- )''',
- '''CREATE TABLE metrics (
- spec_oid int,
- iteration integer,
- metric text,
- value text,
- units text,
- lower_is_better integer
- )''',
- '''CREATE VIEW results AS
- SELECT uuid as run_uuid, spec_id, label as workload, iteration, metric, value, units, lower_is_better
- FROM metrics AS m INNER JOIN (
- SELECT ws.OID as spec_oid, ws.id as spec_id, uuid, label
- FROM workload_specs AS ws INNER JOIN runs AS r ON ws.run_oid = r.OID
- ) AS wsr ON wsr.spec_oid = m.spec_oid
- ''',
- '''CREATE TABLE __meta (
- schema_version text
- )''',
- '''INSERT INTO __meta VALUES ("{}")'''.format(SCHEMA_VERSION),
-]
-
-
-sqlite3.register_adapter(datetime, lambda x: x.isoformat())
-sqlite3.register_adapter(timedelta, lambda x: x.total_seconds())
-sqlite3.register_adapter(uuid.UUID, str)
-
-
-class SqliteResultProcessor(ResultProcessor):
-
- name = 'sqlite'
- description = """
- Stores results in an sqlite database.
-
- This may be used accumulate results of multiple runs in a single file.
-
- """
-
- name = 'sqlite'
- parameters = [
- Parameter('database', default=None,
- global_alias='sqlite_database',
- description=""" Full path to the sqlite database to be used. If this is not specified then
- a new database file will be created in the output directory. This setting can be
- used to accumulate results from multiple runs in a single database. If the
- specified file does not exist, it will be created, however the directory of the
- file must exist.
-
- .. note:: The value must resolve to an absolute path,
- relative paths are not allowed; however the
- value may contain environment variables and/or
- the home reference ~.
- """),
- Parameter('overwrite', kind=boolean, default=False,
- global_alias='sqlite_overwrite',
- description="""If ``True``, this will overwrite the database file
- if it already exists. If ``False`` (the default) data
- will be added to the existing file (provided schema
- versions match -- otherwise an error will be raised).
- """),
-
- ]
-
- def initialize(self, context):
- self._last_spec = None
- self._run_oid = None
- self._spec_oid = None
- if not os.path.exists(self.database):
- self._initdb()
- elif self.overwrite: # pylint: disable=no-member
- os.remove(self.database)
- self._initdb()
- else:
- self._validate_schema_version()
- self._update_run(context.run_info.uuid)
-
- def process_iteration_result(self, result, context):
- if self._last_spec != context.spec:
- self._update_spec(context.spec)
- metrics = [(self._spec_oid, context.current_iteration, m.name, str(m.value), m.units, int(m.lower_is_better))
- for m in result.metrics]
- with self._open_connecton() as conn:
- conn.executemany('INSERT INTO metrics VALUES (?,?,?,?,?,?)', metrics)
-
- def process_run_result(self, result, context):
- info = context.run_info
- with self._open_connecton() as conn:
- conn.execute('''UPDATE runs SET start_time=?, end_time=?, duration=?
- WHERE OID=?''', (info.start_time, info.end_time, info.duration, self._run_oid))
-
- def validate(self):
- if not self.database: # pylint: disable=access-member-before-definition
- self.database = os.path.join(settings.output_directory, 'results.sqlite')
- self.database = os.path.expandvars(os.path.expanduser(self.database))
-
- def _initdb(self):
- with self._open_connecton() as conn:
- for command in SCHEMA:
- conn.execute(command)
-
- def _validate_schema_version(self):
- with self._open_connecton() as conn:
- try:
- c = conn.execute('SELECT schema_version FROM __meta')
- found_version = c.fetchone()[0]
- except sqlite3.OperationalError:
- message = '{} does not appear to be a valid WA results database.'.format(self.database)
- raise ResultProcessorError(message)
- if found_version != SCHEMA_VERSION:
- message = 'Schema version in {} ({}) does not match current version ({}).'
- raise ResultProcessorError(message.format(self.database, found_version, SCHEMA_VERSION))
-
- def _update_run(self, run_uuid):
- with self._open_connecton() as conn:
- conn.execute('INSERT INTO runs (uuid) VALUES (?)', (run_uuid,))
- conn.commit()
- c = conn.execute('SELECT OID FROM runs WHERE uuid=?', (run_uuid,))
- self._run_oid = c.fetchone()[0]
-
- def _update_spec(self, spec):
- self._last_spec = spec
- spec_tuple = (spec.id, self._run_oid, spec.number_of_iterations, spec.label, spec.workload_name,
- json.dumps(spec.boot_parameters), json.dumps(spec.runtime_parameters),
- json.dumps(spec.workload_parameters))
- with self._open_connecton() as conn:
- conn.execute('INSERT INTO workload_specs VALUES (?,?,?,?,?,?,?,?)', spec_tuple)
- conn.commit()
- c = conn.execute('SELECT OID FROM workload_specs WHERE run_oid=? AND id=?', (self._run_oid, spec.id))
- self._spec_oid = c.fetchone()[0]
-
- @contextmanager
- def _open_connecton(self):
- conn = sqlite3.connect(self.database)
- try:
- yield conn
- finally:
- conn.commit()
diff --git a/wlauto/result_processors/standard.py b/wlauto/result_processors/standard.py
deleted file mode 100644
index 8bd2f254..00000000
--- a/wlauto/result_processors/standard.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=R0201
-"""
-This module contains a few "standard" result processors that write results to
-text files in various formats.
-
-"""
-import os
-import csv
-
-from wlauto import ResultProcessor, Parameter
-from wlauto.exceptions import ConfigError
-from wlauto.utils.types import list_of_strings
-
-
-class StandardProcessor(ResultProcessor):
-
- name = 'standard'
- description = """
- Creates a ``result.txt`` file for every iteration that contains metrics
- for that iteration.
-
- The metrics are written in ::
-
- metric = value [units]
-
- format.
-
- """
-
- def process_iteration_result(self, result, context):
- outfile = os.path.join(context.output_directory, 'result.txt')
- with open(outfile, 'w') as wfh:
- for metric in result.metrics:
- line = '{} = {}'.format(metric.name, metric.value)
- if metric.units:
- line = ' '.join([line, metric.units])
- line += '\n'
- wfh.write(line)
- context.add_artifact('iteration_result', 'result.txt', 'export')
-
-
-class CsvReportProcessor(ResultProcessor):
-
- name = 'csv'
- description = """
- Creates a ``results.csv`` in the output directory containing results for
- all iterations in CSV format, each line containing a single metric.
-
- """
-
- parameters = [
- Parameter('use_all_classifiers', kind=bool, default=False,
- global_alias='use_all_classifiers',
- description="""
- If set to ``True``, this will add a column for every classifier
- that features in at least one collected metric.
-
- .. note:: This cannot be ``True`` if ``extra_columns`` is set.
-
- """),
- Parameter('extra_columns', kind=list_of_strings,
- description="""
- List of classifiers to use as columns.
-
- .. note:: This cannot be set if ``use_all_classifiers`` is ``True``.
-
- """),
- ]
-
- def validate(self):
- if self.use_all_classifiers and self.extra_columns:
- raise ConfigError('extra_columns cannot be specified when use_all_classifiers is True')
-
- def initialize(self, context):
- self.results_so_far = [] # pylint: disable=attribute-defined-outside-init
-
- def process_iteration_result(self, result, context):
- self.results_so_far.append(result)
- self._write_results(self.results_so_far, context)
-
- def process_run_result(self, result, context):
- self._write_results(result.iteration_results, context)
- context.add_artifact('run_result_csv', 'results.csv', 'export')
-
- def _write_results(self, results, context):
- if self.use_all_classifiers:
- classifiers = set([])
- for ir in results:
- for metric in ir.metrics:
- classifiers.update(metric.classifiers.keys())
- extra_columns = list(classifiers)
- elif self.extra_columns:
- extra_columns = self.extra_columns
- else:
- extra_columns = []
-
- outfile = os.path.join(context.run_output_directory, 'results.csv')
- with open(outfile, 'wb') as wfh:
- writer = csv.writer(wfh)
- writer.writerow(['id', 'workload', 'iteration', 'metric', ] +
- extra_columns + ['value', 'units'])
- for ir in results:
- for metric in ir.metrics:
- row = ([ir.id, ir.spec.label, ir.iteration, metric.name] +
- [str(metric.classifiers.get(c, '')) for c in extra_columns] +
- [str(metric.value), metric.units or ''])
- writer.writerow(row)
-
-
-class SummaryCsvProcessor(ResultProcessor):
- """
- Similar to csv result processor, but only contains workloads' summary metrics.
-
- """
-
- name = 'summary_csv'
-
- def process_run_result(self, result, context):
- outfile = os.path.join(context.run_output_directory, 'summary.csv')
- with open(outfile, 'wb') as wfh:
- writer = csv.writer(wfh)
- writer.writerow(['id', 'workload', 'iteration', 'metric', 'value', 'units'])
- for result in result.iteration_results:
- for metric in result.metrics:
- if metric.name in result.workload.summary_metrics:
- row = [result.id, result.workload.name, result.iteration,
- metric.name, str(metric.value), metric.units or '']
- writer.writerow(row)
- context.add_artifact('run_result_summary', 'summary.csv', 'export')
diff --git a/wlauto/result_processors/status.py b/wlauto/result_processors/status.py
deleted file mode 100644
index 37819175..00000000
--- a/wlauto/result_processors/status.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=R0201
-import os
-import time
-from collections import Counter
-from wlauto import ResultProcessor
-from wlauto.utils.misc import write_table
-
-
-class StatusTxtReporter(ResultProcessor):
- name = 'status'
- description = """
- Outputs a txt file containing general status information about which runs
- failed and which were successful
-
- """
-
- def process_run_result(self, result, context):
- counter = Counter()
- for ir in result.iteration_results:
- counter[ir.status] += 1
-
- outfile = os.path.join(context.run_output_directory, 'status.txt')
- self.logger.info('Status available in {}'.format(outfile))
- with open(outfile, 'w') as wfh:
- wfh.write('Run name: {}\n'.format(context.run_info.run_name))
- wfh.write('Run status: {}\n'.format(context.run_result.status))
- wfh.write('Date: {}\n'.format(time.strftime("%c")))
- wfh.write('{}/{} iterations completed without error\n'.format(counter['OK'], len(result.iteration_results)))
- wfh.write('\n')
- status_lines = [map(str, [ir.id, ir.spec.label, ir.iteration, ir.status,
- ir.events and ir.events[0].message.split('\n')[0] or ''])
- for ir in result.iteration_results]
- write_table(status_lines, wfh, align='<<>><')
- context.add_artifact('run_status_summary', 'status.txt', 'export')
-
diff --git a/wlauto/result_processors/syeg.py b/wlauto/result_processors/syeg.py
deleted file mode 100644
index e6e9fdb4..00000000
--- a/wlauto/result_processors/syeg.py
+++ /dev/null
@@ -1,150 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-#pylint: disable=E1101,W0201
-import os
-import csv
-import math
-import re
-
-from wlauto import ResultProcessor, Parameter, File
-from wlauto.utils.misc import get_meansd
-
-
-class SyegResultProcessor(ResultProcessor):
-
- name = 'syeg_csv'
- description = """
- Generates a CSV results file in the format expected by SYEG toolchain.
-
- Multiple iterations get parsed into columns, adds additional columns for mean
- and standard deviation, append number of threads to metric names (where
- applicable) and add some metadata based on external mapping files.
-
- """
-
- parameters = [
- Parameter('outfile', kind=str, default='syeg_out.csv',
- description='The name of the output CSV file.'),
- ]
-
- def initialize(self, context):
- self.levelmap = self._read_map(context, 'final_sub.csv',
- 'Could not find metrics level mapping.')
- self.typemap = self._read_map(context, 'types.csv',
- 'Could not find benchmark suite types mapping.')
-
- def process_run_result(self, result, context):
- syeg_results = {}
- max_iterations = max(ir.iteration for ir in result.iteration_results)
- for ir in result.iteration_results:
- for metric in ir.metrics:
- key = ir.spec.label + metric.name
- if key not in syeg_results:
- syeg_result = SyegResult(max_iterations)
- syeg_result.suite = ir.spec.label
- syeg_result.version = getattr(ir.workload, 'apk_version', None)
- syeg_result.test = metric.name
- if hasattr(ir.workload, 'number_of_threads'):
- syeg_result.test += ' NT {} (Iterations/sec)'.format(ir.workload.number_of_threads)
- syeg_result.final_sub = self.levelmap.get(metric.name)
- syeg_result.lower_is_better = metric.lower_is_better
- syeg_result.device = context.device.name
- syeg_result.type = self._get_type(ir.workload.name, metric.name)
- syeg_results[key] = syeg_result
- syeg_results[key].runs[ir.iteration - 1] = metric.value
-
- columns = ['device', 'suite', 'test', 'version', 'final_sub', 'best', 'average', 'deviation']
- columns += ['run{}'.format(i + 1) for i in xrange(max_iterations)]
- columns += ['type', 'suite_version']
-
- outfile = os.path.join(context.output_directory, self.outfile)
- with open(outfile, 'wb') as wfh:
- writer = csv.writer(wfh)
- writer.writerow(columns)
- for syeg_result in syeg_results.values():
- writer.writerow([getattr(syeg_result, c) for c in columns])
- context.add_artifact('syeg_csv', outfile, 'export')
-
- def _get_type(self, workload, metric):
- metric = metric.lower()
- type_ = self.typemap.get(workload)
- if type_ == 'mixed':
- if 'native' in metric:
- type_ = 'native'
- if ('java' in metric) or ('dalvik' in metric):
- type_ = 'dalvik'
- return type_
-
- def _read_map(self, context, filename, errormsg):
- mapfile = context.resolver.get(File(self, filename))
- if mapfile:
- with open(mapfile) as fh:
- reader = csv.reader(fh)
- return dict([c.strip() for c in r] for r in reader)
- else:
- self.logger.warning(errormsg)
- return {}
-
-
-class SyegResult(object):
-
- @property
- def average(self):
- if not self._mean:
- self._mean, self._sd = get_meansd(self.run_values)
- return self._mean
-
- @property
- def deviation(self):
- if not self._sd:
- self._mean, self._sd = get_meansd(self.run_values)
- return self._sd
-
- @property
- def run_values(self):
- return [r for r in self.runs if not math.isnan(r)]
-
- @property
- def best(self):
- if self.lower_is_better:
- return min(self.run_values)
- else:
- return max(self.run_values)
-
- @property
- def suite_version(self):
- return ' '.join(map(str, [self.suite, self.version]))
-
- def __init__(self, max_iter):
- self.runs = [float('nan') for _ in xrange(max_iter)]
- self.device = None
- self.suite = None
- self.test = None
- self.version = None
- self.final_sub = None
- self.lower_is_better = None
- self.type = None
- self._mean = None
- self._sd = None
-
- def __getattr__(self, name):
- match = re.search(r'run(\d+)', name)
- if not match:
- raise AttributeError(name)
- return self.runs[int(match.group(1)) - 1]
-
-
diff --git a/wlauto/tests/README b/wlauto/tests/README
deleted file mode 100644
index d5168289..00000000
--- a/wlauto/tests/README
+++ /dev/null
@@ -1,12 +0,0 @@
-To run these tests you need to have nose package installed. You can get it from PyPI by using pip:
-
- pip install nose
-
-Or, if you're on Ubuntu, you can get it from distribution repositories:
-
- sudo apt-get install python-nose
-
-Once you have it, you can the tests by executing the follwing the project's top-level directory (the
-one with setup.py):
-
- nosetests
diff --git a/wlauto/tests/__init__.py b/wlauto/tests/__init__.py
deleted file mode 100644
index cd5d64d6..00000000
--- a/wlauto/tests/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/tests/data/extensions/devices/test_device.py b/wlauto/tests/data/extensions/devices/test_device.py
deleted file mode 100644
index 75eeb1f3..00000000
--- a/wlauto/tests/data/extensions/devices/test_device.py
+++ /dev/null
@@ -1,49 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import Device
-
-
-class TestDevice(Device):
-
- name = 'test-device'
-
- def __init__(self, *args, **kwargs):
- self.modules = []
- self.boot_called = 0
- self.push_file_called = 0
- self.pull_file_called = 0
- self.execute_called = 0
- self.set_sysfile_int_called = 0
- self.close_called = 0
-
- def boot(self):
- self.boot_called += 1
-
- def push_file(self, source, dest):
- self.push_file_called += 1
-
- def pull_file(self, source, dest):
- self.pull_file_called += 1
-
- def execute(self, command):
- self.execute_called += 1
-
- def set_sysfile_int(self, file, value):
- self.set_sysfile_int_called += 1
-
- def close(self, command):
- self.close_called += 1
diff --git a/wlauto/tests/data/interrupts/after b/wlauto/tests/data/interrupts/after
deleted file mode 100755
index 93145098..00000000
--- a/wlauto/tests/data/interrupts/after
+++ /dev/null
@@ -1,98 +0,0 @@
- CPU0 CPU1 CPU2 CPU3 CPU4 CPU5 CPU6 CPU7
- 65: 0 0 0 0 0 0 0 0 GIC dma-pl330.2
- 66: 0 0 0 0 0 0 0 0 GIC dma-pl330.0
- 67: 0 0 0 0 0 0 0 0 GIC dma-pl330.1
- 74: 0 0 0 0 0 0 0 0 GIC s3c2410-wdt
- 85: 2 0 0 0 0 0 0 0 GIC exynos4210-uart
- 89: 368 0 0 0 0 0 0 0 GIC s3c2440-i2c.1
- 90: 0 0 0 0 0 0 0 0 GIC s3c2440-i2c.2
- 92: 1294 0 0 0 0 0 0 0 GIC exynos5-hs-i2c.0
- 95: 831 0 0 0 0 0 0 0 GIC exynos5-hs-i2c.3
-103: 1 0 0 0 0 0 0 0 GIC ehci_hcd:usb1, ohci_hcd:usb2
-104: 7304 0 0 0 0 0 0 0 GIC xhci_hcd:usb3, exynos-ss-udc.0
-105: 0 0 0 0 0 0 0 0 GIC xhci_hcd:usb5
-106: 0 0 0 0 0 0 0 0 GIC mali.0
-107: 16429 0 0 0 0 0 0 0 GIC dw-mci
-108: 1 0 0 0 0 0 0 0 GIC dw-mci
-109: 0 0 0 0 0 0 0 0 GIC dw-mci
-114: 28074 0 0 0 0 0 0 0 GIC mipi-dsi
-117: 0 0 0 0 0 0 0 0 GIC exynos-gsc
-118: 0 0 0 0 0 0 0 0 GIC exynos-gsc
-121: 0 0 0 0 0 0 0 0 GIC exynos5-jpeg-hx
-123: 7 0 0 0 0 0 0 0 GIC s5p-fimg2d
-126: 0 0 0 0 0 0 0 0 GIC s5p-mixer
-127: 0 0 0 0 0 0 0 0 GIC hdmi-int
-128: 0 0 0 0 0 0 0 0 GIC s5p-mfc-v6
-142: 0 0 0 0 0 0 0 0 GIC dma-pl330.3
-146: 0 0 0 0 0 0 0 0 GIC s5p-tvout-cec
-149: 1035 0 0 0 0 0 0 0 GIC mali.0
-152: 26439 0 0 0 0 0 0 0 GIC mct_tick0
-153: 0 2891 0 0 0 0 0 0 GIC mct_tick1
-154: 0 0 3969 0 0 0 0 0 GIC mct_tick2
-155: 0 0 0 2385 0 0 0 0 GIC mct_tick3
-160: 0 0 0 0 8038 0 0 0 GIC mct_tick4
-161: 0 0 0 0 0 8474 0 0 GIC mct_tick5
-162: 0 0 0 0 0 0 7842 0 GIC mct_tick6
-163: 0 0 0 0 0 0 0 7827 GIC mct_tick7
-200: 0 0 0 0 0 0 0 0 GIC exynos5-jpeg-hx
-201: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.29
-218: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.25
-220: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.27
-224: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.19
-251: 320 0 0 0 0 0 0 0 GIC mali.0
-252: 0 0 0 0 0 0 0 0 GIC exynos5-scaler
-253: 0 0 0 0 0 0 0 0 GIC exynos5-scaler
-254: 0 0 0 0 0 0 0 0 GIC exynos5-scaler
-272: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.5
-274: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.6
-280: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.11
-282: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.30
-284: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.12
-286: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.17
-288: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.4
-290: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.20
-294: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-296: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-298: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-300: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-302: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
-306: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.0
-316: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.2
-325: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.0
-332: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
-340: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
-342: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-344: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
-405: 327 0 0 0 0 0 0 0 combiner s3c_fb
-409: 0 0 0 0 0 0 0 0 combiner mcuctl
-414: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.28
-434: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.22
-436: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.23
-438: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.26
-443: 12 0 0 0 0 0 0 0 combiner mct_comp_irq
-446: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.21
-449: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.13
-453: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.15
-474: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.24
-512: 0 0 0 0 0 0 0 0 exynos-eint gpio-keys: KEY_POWER
-518: 0 0 0 0 0 0 0 0 exynos-eint drd_switch_vbus
-524: 0 0 0 0 0 0 0 0 exynos-eint gpio-keys: KEY_HOMEPAGE
-526: 1 0 0 0 0 0 0 0 exynos-eint HOST_DETECT
-527: 1 0 0 0 0 0 0 0 exynos-eint drd_switch_id
-531: 1 0 0 0 0 0 0 0 exynos-eint drd_switch_vbus
-532: 1 0 0 0 0 0 0 0 exynos-eint drd_switch_id
-537: 3 0 0 0 0 0 0 0 exynos-eint mxt540e_ts
-538: 0 0 0 0 0 0 0 0 exynos-eint sec-pmic-irq
-543: 1 0 0 0 0 0 0 0 exynos-eint hdmi-ext
-544: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_VOLUMEDOWN
-545: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_VOLUMEUP
-546: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_MENU
-547: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_BACK
-655: 0 0 0 0 0 0 0 0 sec-pmic rtc-alarm0
-IPI0: 0 0 0 0 0 0 0 0 Timer broadcast interrupts
-IPI1: 8823 7185 4642 5652 2370 2069 1452 1351 Rescheduling interrupts
-IPI2: 4 7 8 6 8 7 8 8 Function call interrupts
-IPI3: 1 0 0 0 0 0 0 0 Single function call interrupts
-IPI4: 0 0 0 0 0 0 0 0 CPU stop interrupts
-IPI5: 0 0 0 0 0 0 0 0 CPU backtrace
-Err: 0
diff --git a/wlauto/tests/data/interrupts/before b/wlauto/tests/data/interrupts/before
deleted file mode 100755
index a332b8e9..00000000
--- a/wlauto/tests/data/interrupts/before
+++ /dev/null
@@ -1,97 +0,0 @@
- CPU0 CPU1 CPU2 CPU3 CPU4 CPU5 CPU6 CPU7
- 65: 0 0 0 0 0 0 0 0 GIC dma-pl330.2
- 66: 0 0 0 0 0 0 0 0 GIC dma-pl330.0
- 67: 0 0 0 0 0 0 0 0 GIC dma-pl330.1
- 74: 0 0 0 0 0 0 0 0 GIC s3c2410-wdt
- 85: 2 0 0 0 0 0 0 0 GIC exynos4210-uart
- 89: 368 0 0 0 0 0 0 0 GIC s3c2440-i2c.1
- 90: 0 0 0 0 0 0 0 0 GIC s3c2440-i2c.2
- 92: 1204 0 0 0 0 0 0 0 GIC exynos5-hs-i2c.0
- 95: 831 0 0 0 0 0 0 0 GIC exynos5-hs-i2c.3
-103: 1 0 0 0 0 0 0 0 GIC ehci_hcd:usb1, ohci_hcd:usb2
-104: 7199 0 0 0 0 0 0 0 GIC xhci_hcd:usb3, exynos-ss-udc.0
-105: 0 0 0 0 0 0 0 0 GIC xhci_hcd:usb5
-106: 0 0 0 0 0 0 0 0 GIC mali.0
-107: 16429 0 0 0 0 0 0 0 GIC dw-mci
-108: 1 0 0 0 0 0 0 0 GIC dw-mci
-109: 0 0 0 0 0 0 0 0 GIC dw-mci
-114: 26209 0 0 0 0 0 0 0 GIC mipi-dsi
-117: 0 0 0 0 0 0 0 0 GIC exynos-gsc
-118: 0 0 0 0 0 0 0 0 GIC exynos-gsc
-121: 0 0 0 0 0 0 0 0 GIC exynos5-jpeg-hx
-123: 7 0 0 0 0 0 0 0 GIC s5p-fimg2d
-126: 0 0 0 0 0 0 0 0 GIC s5p-mixer
-127: 0 0 0 0 0 0 0 0 GIC hdmi-int
-128: 0 0 0 0 0 0 0 0 GIC s5p-mfc-v6
-142: 0 0 0 0 0 0 0 0 GIC dma-pl330.3
-146: 0 0 0 0 0 0 0 0 GIC s5p-tvout-cec
-149: 1004 0 0 0 0 0 0 0 GIC mali.0
-152: 26235 0 0 0 0 0 0 0 GIC mct_tick0
-153: 0 2579 0 0 0 0 0 0 GIC mct_tick1
-154: 0 0 3726 0 0 0 0 0 GIC mct_tick2
-155: 0 0 0 2262 0 0 0 0 GIC mct_tick3
-161: 0 0 0 0 0 2554 0 0 GIC mct_tick5
-162: 0 0 0 0 0 0 1911 0 GIC mct_tick6
-163: 0 0 0 0 0 0 0 1928 GIC mct_tick7
-200: 0 0 0 0 0 0 0 0 GIC exynos5-jpeg-hx
-201: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.29
-218: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.25
-220: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.27
-224: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.19
-251: 312 0 0 0 0 0 0 0 GIC mali.0
-252: 0 0 0 0 0 0 0 0 GIC exynos5-scaler
-253: 0 0 0 0 0 0 0 0 GIC exynos5-scaler
-254: 0 0 0 0 0 0 0 0 GIC exynos5-scaler
-272: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.5
-274: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.6
-280: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.11
-282: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.30
-284: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.12
-286: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.17
-288: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.4
-290: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.20
-294: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-296: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-298: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-300: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-302: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
-306: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.0
-316: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.2
-325: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.0
-332: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
-340: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
-342: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
-344: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
-405: 322 0 0 0 0 0 0 0 combiner s3c_fb
-409: 0 0 0 0 0 0 0 0 combiner mcuctl
-414: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.28
-434: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.22
-436: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.23
-438: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.26
-443: 12 0 0 0 0 0 0 0 combiner mct_comp_irq
-446: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.21
-449: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.13
-453: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.15
-474: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.24
-512: 0 0 0 0 0 0 0 0 exynos-eint gpio-keys: KEY_POWER
-518: 0 0 0 0 0 0 0 0 exynos-eint drd_switch_vbus
-524: 0 0 0 0 0 0 0 0 exynos-eint gpio-keys: KEY_HOMEPAGE
-526: 1 0 0 0 0 0 0 0 exynos-eint HOST_DETECT
-527: 1 0 0 0 0 0 0 0 exynos-eint drd_switch_id
-531: 1 0 0 0 0 0 0 0 exynos-eint drd_switch_vbus
-532: 1 0 0 0 0 0 0 0 exynos-eint drd_switch_id
-537: 3 0 0 0 0 0 0 0 exynos-eint mxt540e_ts
-538: 0 0 0 0 0 0 0 0 exynos-eint sec-pmic-irq
-543: 1 0 0 0 0 0 0 0 exynos-eint hdmi-ext
-544: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_VOLUMEDOWN
-545: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_VOLUMEUP
-546: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_MENU
-547: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_BACK
-655: 0 0 0 0 0 0 0 0 sec-pmic rtc-alarm0
-IPI0: 0 0 0 0 0 0 0 0 Timer broadcast interrupts
-IPI1: 8751 7147 4615 5623 2334 2066 1449 1348 Rescheduling interrupts
-IPI2: 3 6 7 6 7 6 7 7 Function call interrupts
-IPI3: 1 0 0 0 0 0 0 0 Single function call interrupts
-IPI4: 0 0 0 0 0 0 0 0 CPU stop interrupts
-IPI5: 0 0 0 0 0 0 0 0 CPU backtrace
-Err: 0
diff --git a/wlauto/tests/data/interrupts/result b/wlauto/tests/data/interrupts/result
deleted file mode 100755
index b9ec2dd1..00000000
--- a/wlauto/tests/data/interrupts/result
+++ /dev/null
@@ -1,98 +0,0 @@
- CPU0 CPU1 CPU2 CPU3 CPU4 CPU5 CPU6 CPU7
- 65: 0 0 0 0 0 0 0 0 GIC dma-pl330.2
- 66: 0 0 0 0 0 0 0 0 GIC dma-pl330.0
- 67: 0 0 0 0 0 0 0 0 GIC dma-pl330.1
- 74: 0 0 0 0 0 0 0 0 GIC s3c2410-wdt
- 85: 0 0 0 0 0 0 0 0 GIC exynos4210-uart
- 89: 0 0 0 0 0 0 0 0 GIC s3c2440-i2c.1
- 90: 0 0 0 0 0 0 0 0 GIC s3c2440-i2c.2
- 92: 90 0 0 0 0 0 0 0 GIC exynos5-hs-i2c.0
- 95: 0 0 0 0 0 0 0 0 GIC exynos5-hs-i2c.3
- 103: 0 0 0 0 0 0 0 0 GIC ehci_hcd:usb1, ohci_hcd:usb2
- 104: 105 0 0 0 0 0 0 0 GIC xhci_hcd:usb3, exynos-ss-udc.0
- 105: 0 0 0 0 0 0 0 0 GIC xhci_hcd:usb5
- 106: 0 0 0 0 0 0 0 0 GIC mali.0
- 107: 0 0 0 0 0 0 0 0 GIC dw-mci
- 108: 0 0 0 0 0 0 0 0 GIC dw-mci
- 109: 0 0 0 0 0 0 0 0 GIC dw-mci
- 114: 1865 0 0 0 0 0 0 0 GIC mipi-dsi
- 117: 0 0 0 0 0 0 0 0 GIC exynos-gsc
- 118: 0 0 0 0 0 0 0 0 GIC exynos-gsc
- 121: 0 0 0 0 0 0 0 0 GIC exynos5-jpeg-hx
- 123: 0 0 0 0 0 0 0 0 GIC s5p-fimg2d
- 126: 0 0 0 0 0 0 0 0 GIC s5p-mixer
- 127: 0 0 0 0 0 0 0 0 GIC hdmi-int
- 128: 0 0 0 0 0 0 0 0 GIC s5p-mfc-v6
- 142: 0 0 0 0 0 0 0 0 GIC dma-pl330.3
- 146: 0 0 0 0 0 0 0 0 GIC s5p-tvout-cec
- 149: 31 0 0 0 0 0 0 0 GIC mali.0
- 152: 204 0 0 0 0 0 0 0 GIC mct_tick0
- 153: 0 312 0 0 0 0 0 0 GIC mct_tick1
- 154: 0 0 243 0 0 0 0 0 GIC mct_tick2
- 155: 0 0 0 123 0 0 0 0 GIC mct_tick3
-> 160: 0 0 0 0 8038 0 0 0 GIC mct_tick4
- 161: 0 0 0 0 0 5920 0 0 GIC mct_tick5
- 162: 0 0 0 0 0 0 5931 0 GIC mct_tick6
- 163: 0 0 0 0 0 0 0 5899 GIC mct_tick7
- 200: 0 0 0 0 0 0 0 0 GIC exynos5-jpeg-hx
- 201: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.29
- 218: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.25
- 220: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.27
- 224: 0 0 0 0 0 0 0 0 GIC exynos-sysmmu.19
- 251: 8 0 0 0 0 0 0 0 GIC mali.0
- 252: 0 0 0 0 0 0 0 0 GIC exynos5-scaler
- 253: 0 0 0 0 0 0 0 0 GIC exynos5-scaler
- 254: 0 0 0 0 0 0 0 0 GIC exynos5-scaler
- 272: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.5
- 274: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.6
- 280: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.11
- 282: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.30
- 284: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.12
- 286: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.17
- 288: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.4
- 290: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.20
- 294: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
- 296: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
- 298: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
- 300: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
- 302: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
- 306: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.0
- 316: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.2
- 325: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.0
- 332: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
- 340: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
- 342: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.9
- 344: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.16
- 405: 5 0 0 0 0 0 0 0 combiner s3c_fb
- 409: 0 0 0 0 0 0 0 0 combiner mcuctl
- 414: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.28
- 434: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.22
- 436: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.23
- 438: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.26
- 443: 0 0 0 0 0 0 0 0 combiner mct_comp_irq
- 446: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.21
- 449: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.13
- 453: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.15
- 474: 0 0 0 0 0 0 0 0 combiner exynos-sysmmu.24
- 512: 0 0 0 0 0 0 0 0 exynos-eint gpio-keys: KEY_POWER
- 518: 0 0 0 0 0 0 0 0 exynos-eint drd_switch_vbus
- 524: 0 0 0 0 0 0 0 0 exynos-eint gpio-keys: KEY_HOMEPAGE
- 526: 0 0 0 0 0 0 0 0 exynos-eint HOST_DETECT
- 527: 0 0 0 0 0 0 0 0 exynos-eint drd_switch_id
- 531: 0 0 0 0 0 0 0 0 exynos-eint drd_switch_vbus
- 532: 0 0 0 0 0 0 0 0 exynos-eint drd_switch_id
- 537: 0 0 0 0 0 0 0 0 exynos-eint mxt540e_ts
- 538: 0 0 0 0 0 0 0 0 exynos-eint sec-pmic-irq
- 543: 0 0 0 0 0 0 0 0 exynos-eint hdmi-ext
- 544: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_VOLUMEDOWN
- 545: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_VOLUMEUP
- 546: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_MENU
- 547: 0 0 0 0 0 0 0 0 s5p_gpioint gpio-keys: KEY_BACK
- 655: 0 0 0 0 0 0 0 0 sec-pmic rtc-alarm0
- IPI0: 0 0 0 0 0 0 0 0 Timer broadcast interrupts
- IPI1: 72 38 27 29 36 3 3 3 Rescheduling interrupts
- IPI2: 1 1 1 0 1 1 1 1 Function call interrupts
- IPI3: 0 0 0 0 0 0 0 0 Single function call interrupts
- IPI4: 0 0 0 0 0 0 0 0 CPU stop interrupts
- IPI5: 0 0 0 0 0 0 0 0 CPU backtrace
- Err: 0
diff --git a/wlauto/tests/data/logcat.2.log b/wlauto/tests/data/logcat.2.log
deleted file mode 100644
index eafed2b8..00000000
--- a/wlauto/tests/data/logcat.2.log
+++ /dev/null
@@ -1,14 +0,0 @@
---------- beginning of /dev/log/main
-D/TextView( 2468): 7:07
-D/TextView( 2468): 7:07
-D/TextView( 2468): Thu, June 27
---------- beginning of /dev/log/system
-D/TextView( 3099): CaffeineMark results
-D/TextView( 3099): Overall score:
-D/TextView( 3099): Rating
-D/TextView( 3099): Rank
-D/TextView( 3099): 0
-D/TextView( 3099): Details
-D/TextView( 3099): Publish
-D/TextView( 3099): Top 10
-D/TextView( 3099): 3672
diff --git a/wlauto/tests/data/logcat.log b/wlauto/tests/data/logcat.log
deleted file mode 100644
index 48703402..00000000
--- a/wlauto/tests/data/logcat.log
+++ /dev/null
@@ -1,10 +0,0 @@
---------- beginning of /dev/log/main
---------- beginning of /dev/log/system
-D/TextView( 2462): 5:05
-D/TextView( 2462): 5:05
-D/TextView( 2462): Mon, June 24
-D/TextView( 3072): Stop Test
-D/TextView( 3072): Testing CPU and memory…
-D/TextView( 3072): 0%
-D/TextView( 3072): Testing CPU and memory…
-
diff --git a/wlauto/tests/data/test-agenda-bad-syntax.yaml b/wlauto/tests/data/test-agenda-bad-syntax.yaml
deleted file mode 100644
index 11d0e2ef..00000000
--- a/wlauto/tests/data/test-agenda-bad-syntax.yaml
+++ /dev/null
@@ -1 +0,0 @@
-[ewqh
diff --git a/wlauto/tests/data/test-agenda-not-dict.yaml b/wlauto/tests/data/test-agenda-not-dict.yaml
deleted file mode 100644
index 345e6aef..00000000
--- a/wlauto/tests/data/test-agenda-not-dict.yaml
+++ /dev/null
@@ -1 +0,0 @@
-Test
diff --git a/wlauto/tests/data/test-agenda.yaml b/wlauto/tests/data/test-agenda.yaml
deleted file mode 100644
index 85163a40..00000000
--- a/wlauto/tests/data/test-agenda.yaml
+++ /dev/null
@@ -1,25 +0,0 @@
-global:
- iterations: 8
- boot_parameters:
- os_mode: mp_a15_bootcluster
- runtime_parameters:
- a7_governor: Interactive
- a15_governor: Interactive2
- a7_cores: 3
- a15_cores: 2
-workloads:
- - id: 1c
- workload_name: bbench_with_audio
- - id: 1d
- workload_name: Bbench_with_audio
- runtime_parameters:
- os_mode: mp_a7_only
- a7_cores: 0
- iterations: 4
- - id: 1e
- workload_name: audio
- - id: 1f
- workload_name: antutu
- runtime_parameters:
- a7_cores: 1
- a15_cores: 1
diff --git a/wlauto/tests/data/test-config.py b/wlauto/tests/data/test-config.py
deleted file mode 100644
index 56c3288b..00000000
--- a/wlauto/tests/data/test-config.py
+++ /dev/null
@@ -1,17 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-device = 'TEST'
diff --git a/wlauto/tests/test_agenda.py b/wlauto/tests/test_agenda.py
deleted file mode 100644
index 7b05d03d..00000000
--- a/wlauto/tests/test_agenda.py
+++ /dev/null
@@ -1,172 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E0611
-# pylint: disable=R0201
-import os
-from StringIO import StringIO
-from unittest import TestCase
-
-from nose.tools import assert_equal, assert_in, raises
-
-from wlauto.core.agenda import Agenda
-from wlauto.exceptions import ConfigError
-from wlauto.utils.serializer import SerializerSyntaxError
-
-
-YAML_TEST_FILE = os.path.join(os.path.dirname(__file__), 'data', 'test-agenda.yaml')
-
-invalid_agenda_text = """
-workloads:
- - id: 1
- workload_parameters:
- test: 1
-"""
-invalid_agenda = StringIO(invalid_agenda_text)
-invalid_agenda.name = 'invalid1.yaml'
-
-duplicate_agenda_text = """
-global:
- iterations: 1
-workloads:
- - id: 1
- workload_name: antutu
- workload_parameters:
- test: 1
- - id: 1
- workload_name: andebench
-"""
-duplicate_agenda = StringIO(duplicate_agenda_text)
-duplicate_agenda.name = 'invalid2.yaml'
-
-short_agenda_text = """
-workloads: [antutu, linpack, andebench]
-"""
-short_agenda = StringIO(short_agenda_text)
-short_agenda.name = 'short.yaml'
-
-default_ids_agenda_text = """
-workloads:
- - antutu
- - id: 1
- name: linpack
- - id: test
- name: andebench
- params:
- number_of_threads: 1
- - vellamo
-"""
-default_ids_agenda = StringIO(default_ids_agenda_text)
-default_ids_agenda.name = 'default_ids.yaml'
-
-sectioned_agenda_text = """
-sections:
- - id: sec1
- runtime_params:
- dp: one
- workloads:
- - antutu
- - andebench
- - name: linpack
- runtime_params:
- dp: two
- - id: sec2
- runtime_params:
- dp: three
- workloads:
- - antutu
-workloads:
- - nenamark
-"""
-sectioned_agenda = StringIO(sectioned_agenda_text)
-sectioned_agenda.name = 'sectioned.yaml'
-
-dup_sectioned_agenda_text = """
-sections:
- - id: sec1
- workloads:
- - antutu
- - id: sec1
- workloads:
- - andebench
-workloads:
- - nenamark
-"""
-dup_sectioned_agenda = StringIO(dup_sectioned_agenda_text)
-dup_sectioned_agenda.name = 'dup-sectioned.yaml'
-
-caps_agenda_text = """
-config:
- device: TC2
-global:
- runtime_parameters:
- sysfile_values:
- /sys/test/MyFile: 1
- /sys/test/other file: 2
-workloads:
- - id: 1
- name: linpack
-"""
-caps_agenda = StringIO(caps_agenda_text)
-caps_agenda.name = 'caps.yaml'
-
-bad_syntax_agenda_text = """
-config:
- # tab on the following line
- reboot_policy: never
-workloads:
- - antutu
-"""
-bad_syntax_agenda = StringIO(bad_syntax_agenda_text)
-bad_syntax_agenda.name = 'bad_syntax.yaml'
-
-section_ids_test_text = """
-config:
- device: TC2
- reboot_policy: never
-workloads:
- - name: bbench
- id: bbench
- - name: audio
-sections:
- - id: foo
- - id: bar
-"""
-section_ids_agenda = StringIO(section_ids_test_text)
-section_ids_agenda.name = 'section_ids.yaml'
-
-
-class AgendaTest(TestCase):
-
- def test_yaml_load(self):
- agenda = Agenda(YAML_TEST_FILE)
- assert_equal(len(agenda.workloads), 4)
-
- def test_yaml_missing_field(self):
- try:
- Agenda(invalid_agenda)
- except ConfigError, e:
- assert_in('workload name', e.message)
- else:
- raise Exception('ConfigError was not raised for an invalid agenda.')
-
- @raises(ConfigError)
- def test_dup_sections(self):
- Agenda(dup_sectioned_agenda)
-
- @raises(SerializerSyntaxError)
- def test_bad_syntax(self):
- Agenda(bad_syntax_agenda)
diff --git a/wlauto/tests/test_config.py b/wlauto/tests/test_config.py
deleted file mode 100644
index 584d9dd9..00000000
--- a/wlauto/tests/test_config.py
+++ /dev/null
@@ -1,199 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E0611,R0201
-import os
-import tempfile
-from unittest import TestCase
-
-from nose.tools import assert_equal, assert_in, raises
-
-from wlauto.core.bootstrap import ConfigLoader
-from wlauto.core.agenda import AgendaWorkloadEntry, AgendaGlobalEntry, Agenda
-from wlauto.core.configuration import RunConfiguration
-from wlauto.exceptions import ConfigError
-
-
-DATA_DIR = os.path.join(os.path.dirname(__file__), 'data')
-
-BAD_CONFIG_TEXT = """device = 'TEST
-device_config = 'TEST-CONFIG'"""
-
-
-LIST_PARAMS_AGENDA_TEXT = """
-config:
- instrumentation: [list_params]
- list_params:
- param: [0.1, 0.1, 0.1]
-workloads:
- - dhrystone
-"""
-
-INSTRUMENTATION_AGENDA_TEXT = """
-config:
- instrumentation: [execution_time]
-workloads:
- - dhrystone
- - name: angrybirds
- instrumentation: [fsp]
-"""
-
-
-class MockPluginLoader(object):
-
- def __init__(self):
- self.aliases = {}
- self.global_param_aliases = {}
- self.plugins = {
- 'defaults_workload': DefaultsWorkload(),
- 'list_params': ListParamstrument(),
- }
-
- def get_plugin_class(self, name, kind=None): # pylint: disable=unused-argument
- return self.plugins.get(name, NamedMock(name))
-
- def resolve_alias(self, name):
- return name, {}
-
- def get_default_config(self, name): # pylint: disable=unused-argument
- ec = self.get_plugin_class(name)
- return {p.name: p.default for p in ec.parameters}
-
- def has_plugin(self, name):
- return name in self.aliases or name in self.plugins
-
-
-class MockAgenda(object):
-
- def __init__(self, *args):
- self.config = {}
- self.global_ = AgendaGlobalEntry()
- self.sections = []
- self.workloads = args
-
-
-class NamedMock(object):
-
- def __init__(self, name):
- self.__attrs = {
- 'global_alias': None
- }
- self.name = name
- self.parameters = []
-
- def __getattr__(self, name):
- if name not in self.__attrs:
- self.__attrs[name] = NamedMock(name)
- return self.__attrs[name]
-
-
-class DefaultsWorkload(object):
-
- def __init__(self):
- self.name = 'defaults_workload'
- self.parameters = [NamedMock('param')]
- self.parameters[0].default = [1, 2]
-
-
-class ListParamstrument(object):
-
- def __init__(self):
- self.name = 'list_params'
- self.parameters = [NamedMock('param')]
- self.parameters[0].default = []
-
-
-class ConfigLoaderTest(TestCase):
-
- def setUp(self):
- self.filepath = tempfile.mktemp()
- with open(self.filepath, 'w') as wfh:
- wfh.write(BAD_CONFIG_TEXT)
-
- def test_load(self):
- test_cfg_file = os.path.join(DATA_DIR, 'test-config.py')
- config = ConfigLoader()
- config.update(test_cfg_file)
- assert_equal(config.device, 'TEST')
-
- @raises(ConfigError)
- def test_load_bad(self):
- config_loader = ConfigLoader()
- config_loader.update(self.filepath)
-
- def test_load_duplicate(self):
- config_loader = ConfigLoader()
- config_loader.update(dict(instrumentation=['test']))
- config_loader.update(dict(instrumentation=['test']))
- assert_equal(config_loader.instrumentation, ['test'])
-
- def tearDown(self):
- os.unlink(self.filepath)
-
-
-class ConfigTest(TestCase):
-
- def setUp(self):
- self.config = RunConfiguration(MockPluginLoader())
- self.config.load_config({'device': 'MockDevice'})
-
- def test_case(self):
- devparams = {
- 'sysfile_values': {
- '/sys/test/MyFile': 1,
- '/sys/test/other file': 2,
- }
- }
- ws = AgendaWorkloadEntry(id='a', iterations=1, name='linpack', runtime_parameters=devparams)
- self.config.set_agenda(MockAgenda(ws))
- spec = self.config.workload_specs[0]
- assert_in('/sys/test/MyFile', spec.runtime_parameters['sysfile_values'])
- assert_in('/sys/test/other file', spec.runtime_parameters['sysfile_values'])
-
- def test_list_defaults_params(self):
- ws = AgendaWorkloadEntry(id='a', iterations=1,
- name='defaults_workload', workload_parameters={'param': [3]})
- self.config.set_agenda(MockAgenda(ws))
- spec = self.config.workload_specs[0]
- assert_equal(spec.workload_parameters, {'param': [3]})
-
- def test_exetension_params_lists(self):
- a = Agenda(LIST_PARAMS_AGENDA_TEXT)
- self.config.set_agenda(a)
- self.config.finalize()
- assert_equal(self.config.instrumentation['list_params']['param'], [0.1, 0.1, 0.1])
-
- def test_instrumentation_specification(self):
- a = Agenda(INSTRUMENTATION_AGENDA_TEXT)
- self.config.set_agenda(a)
- self.config.finalize()
- assert_equal(self.config.workload_specs[0].instrumentation, ['execution_time'])
- assert_equal(self.config.workload_specs[1].instrumentation, ['fsp', 'execution_time'])
-
- def test_remove_instrument(self):
- self.config.load_config({'instrumentation': ['list_params']})
- a = Agenda('{config: {instrumentation: [~list_params] }}')
- self.config.set_agenda(a)
- self.config.finalize()
- assert_equal(self.config.instrumentation, {})
-
- def test_global_instrumentation(self):
- self.config.load_config({'instrumentation': ['global_instrument']})
- ws = AgendaWorkloadEntry(id='a', iterations=1, name='linpack', instrumentation=['local_instrument'])
- self.config.set_agenda(MockAgenda(ws))
- self.config.finalize()
- assert_equal(self.config.workload_specs[0].instrumentation,
- ['local_instrument', 'global_instrument'])
diff --git a/wlauto/tests/test_configuration.py b/wlauto/tests/test_configuration.py
deleted file mode 100644
index 3884763c..00000000
--- a/wlauto/tests/test_configuration.py
+++ /dev/null
@@ -1,621 +0,0 @@
-# pylint: disable=R0201
-from copy import deepcopy, copy
-
-from unittest import TestCase
-
-from nose.tools import assert_equal, assert_is
-from mock.mock import Mock
-
-from wlauto.exceptions import ConfigError
-from wlauto.core.configuration.tree import SectionNode
-from wlauto.core.configuration.configuration import (ConfigurationPoint,
- Configuration,
- RunConfiguration,
- merge_using_priority_specificity,
- get_type_name)
-from wlauto.core.configuration.plugin_cache import PluginCache, GENERIC_CONFIGS
-from wlauto.utils.types import obj_dict
-# A1
-# / \
-# B1 B2
-# / \ / \
-# C1 C2 C3 C4
-# \
-# D1
-a1 = SectionNode({"id": "A1"})
-b1 = a1.add_section({"id": "B1"})
-b2 = a1.add_section({"id": "B2"})
-c1 = b1.add_section({"id": "C1"})
-c2 = b1.add_section({"id": "C2"})
-c3 = b2.add_section({"id": "C3"})
-c4 = b2.add_section({"id": "C4"})
-d1 = c2.add_section({"id": "D1"})
-
-DEFAULT_PLUGIN_CONFIG = {
- "device_config": {
- "a": {
- "test3": ["there"],
- "test5": [5, 4, 3],
- },
- "b": {
- "test4": 1234,
- },
- },
- "some_device": {
- "a": {
- "test3": ["how are"],
- "test2": "MANDATORY",
- },
- "b": {
- "test3": ["you?"],
- "test5": [1, 2, 3],
- }
- }
-}
-
-
-def _construct_mock_plugin_cache(values=None):
- if values is None:
- values = deepcopy(DEFAULT_PLUGIN_CONFIG)
-
- plugin_cache = Mock(spec=PluginCache)
- plugin_cache.sources = ["a", "b", "c", "d", "e"]
-
- def get_plugin_config(plugin_name):
- return values[plugin_name]
- plugin_cache.get_plugin_config.side_effect = get_plugin_config
-
- def get_plugin_parameters(_):
- return TestConfiguration.configuration
- plugin_cache.get_plugin_parameters.side_effect = get_plugin_parameters
-
- return plugin_cache
-
-
-class TreeTest(TestCase):
-
- def test_node(self):
- node = SectionNode(1)
- assert_equal(node.config, 1)
- assert_is(node.parent, None)
- assert_equal(node.workload_entries, [])
- assert_equal(node.children, [])
-
- def test_add_workload(self):
- node = SectionNode(1)
- node.add_workload(2)
- assert_equal(len(node.workload_entries), 1)
- wk = node.workload_entries[0]
- assert_equal(wk.config, 2)
- assert_is(wk.parent, node)
-
- def test_add_section(self):
- node = SectionNode(1)
- new_node = node.add_section(2)
- assert_equal(len(node.children), 1)
- assert_is(node.children[0], new_node)
- assert_is(new_node.parent, node)
- assert_equal(node.is_leaf, False)
- assert_equal(new_node.is_leaf, True)
-
- def test_descendants(self):
- for got, expected in zip(b1.descendants(), [c1, d1, c2]):
- assert_equal(got.config, expected.config)
- for got, expected in zip(a1.descendants(), [c1, d1, c2, b1, c3, c4, b2]):
- assert_equal(got.config, expected.config)
-
- def test_ancestors(self):
- for got, expected in zip(d1.ancestors(), [c2, b1, a1]):
- assert_equal(got.config, expected.config)
- for _ in a1.ancestors():
- raise Exception("A1 is the root, it shouldn't have ancestors")
-
- def test_leaves(self):
- for got, expected in zip(a1.leaves(), [c1, d1, c3, c4]):
- assert_equal(got.config, expected.config)
- for got, expected in zip(d1.leaves(), [d1]):
- assert_equal(got.config, expected.config)
-
- def test_source_name(self):
- assert_equal(a1.name, 'section "A1"')
- global_section = SectionNode({"id": "global"})
- assert_equal(global_section.name, "globally specified configuration")
-
- a1.add_workload({'id': 'wk1'})
- assert_equal(a1.workload_entries[0].name, 'workload "wk1" from section "A1"')
- global_section.add_workload({'id': 'wk2'})
- assert_equal(global_section.workload_entries[0].name, 'workload "wk2"')
-
-
-class ConfigurationPointTest(TestCase):
-
- def test_match(self):
- cp1 = ConfigurationPoint("test1", aliases=["foo", "bar"])
- cp2 = ConfigurationPoint("test2", aliases=["fizz", "buzz"])
-
- assert_equal(cp1.match("test1"), True)
- assert_equal(cp1.match("foo"), True)
- assert_equal(cp1.match("bar"), True)
- assert_equal(cp1.match("fizz"), False)
- assert_equal(cp1.match("NOT VALID"), False)
-
- assert_equal(cp2.match("test2"), True)
- assert_equal(cp2.match("fizz"), True)
- assert_equal(cp2.match("buzz"), True)
- assert_equal(cp2.match("foo"), False)
- assert_equal(cp2.match("NOT VALID"), False)
-
- def test_set_value(self):
- cp1 = ConfigurationPoint("test", default="hello")
- cp2 = ConfigurationPoint("test", mandatory=True)
- cp3 = ConfigurationPoint("test", mandatory=True, default="Hello")
- cp4 = ConfigurationPoint("test", default=["hello"], merge=True, kind=list)
- cp5 = ConfigurationPoint("test", kind=int)
- cp6 = ConfigurationPoint("test5", kind=list, allowed_values=[1, 2, 3, 4, 5])
-
- mock = Mock()
- mock.name = "ConfigurationPoint Unit Test"
-
- # Testing defaults and basic functionality
- cp1.set_value(mock)
- assert_equal(mock.test, "hello")
- cp1.set_value(mock, value="there")
- assert_equal(mock.test, "there")
-
- # Testing mandatory flag
- err_msg = 'No values specified for mandatory parameter "test" in ' \
- 'ConfigurationPoint Unit Test'
- with self.assertRaisesRegexp(ConfigError, err_msg):
- cp2.set_value(mock)
- cp3.set_value(mock) # Should ignore mandatory
- assert_equal(mock.test, "Hello")
-
- # Testing Merging - not in depth that is done in the unit test for merge_config
- cp4.set_value(mock, value=["there"])
- assert_equal(mock.test, ["Hello", "there"])
-
- # Testing type conversion
- cp5.set_value(mock, value="100")
- assert_equal(isinstance(mock.test, int), True)
- msg = 'Bad value "abc" for test; must be an integer'
- with self.assertRaisesRegexp(ConfigError, msg):
- cp5.set_value(mock, value="abc")
-
- # Testing that validation is not called when no value is set
- # if it is it will error because it cannot iterate over None
- cp6.set_value(mock)
-
- def test_validation(self):
- #Test invalid default
- with self.assertRaises(ValueError):
- # pylint: disable=W0612
- bad_cp = ConfigurationPoint("test", allowed_values=[1], default=100)
-
- def is_even(value):
- if value % 2:
- return False
- return True
-
- cp1 = ConfigurationPoint("test", kind=int, allowed_values=[1, 2, 3, 4, 5])
- cp2 = ConfigurationPoint("test", kind=list, allowed_values=[1, 2, 3, 4, 5])
- cp3 = ConfigurationPoint("test", kind=int, constraint=is_even)
- cp4 = ConfigurationPoint("test", kind=list, mandatory=True, allowed_values=[1, 99])
- mock = obj_dict()
- mock.name = "ConfigurationPoint Validation Unit Test"
-
- # Test allowed values
- cp1.validate_value(mock.name, 1)
- with self.assertRaises(ConfigError):
- cp1.validate_value(mock.name, 100)
- with self.assertRaises(ConfigError):
- cp1.validate_value(mock.name, [1, 2, 3])
-
- # Test allowed values for lists
- cp2.validate_value(mock.name, [1, 2, 3])
- with self.assertRaises(ConfigError):
- cp2.validate_value(mock.name, [1, 2, 100])
-
- # Test constraints
- cp3.validate_value(mock.name, 2)
- cp3.validate_value(mock.name, 4)
- cp3.validate_value(mock.name, 6)
- msg = '"3" failed constraint validation for "test" in "ConfigurationPoint' \
- ' Validation Unit Test".'
- with self.assertRaisesRegexp(ConfigError, msg):
- cp3.validate_value(mock.name, 3)
-
- with self.assertRaises(ValueError):
- ConfigurationPoint("test", constraint=100)
-
- # Test "validate" methods
- mock.test = None
- # Mandatory config point not set
- with self.assertRaises(ConfigError):
- cp4.validate(mock)
- cp1.validate(mock) # cp1 doesnt have mandatory set
- cp4.set_value(mock, value=[99])
- cp4.validate(mock)
-
- def test_get_type_name(self):
- def dummy():
- pass
- types = [str, list, int, dummy]
- names = ["str", "list", "integer", "dummy"]
- for kind, name in zip(types, names):
- cp = ConfigurationPoint("test", kind=kind)
- assert_equal(get_type_name(cp.kind), name)
-
-
-# Subclass to add some config points for use in testing
-class TestConfiguration(Configuration):
- name = "Test Config"
- __configuration = [
- ConfigurationPoint("test1", default="hello"),
- ConfigurationPoint("test2", mandatory=True),
- ConfigurationPoint("test3", default=["hello"], merge=True, kind=list),
- ConfigurationPoint("test4", kind=int, default=123),
- ConfigurationPoint("test5", kind=list, allowed_values=[1, 2, 3, 4, 5]),
- ]
- configuration = {cp.name: cp for cp in __configuration}
-
-
-class ConfigurationTest(TestCase):
-
- def test_merge_using_priority_specificity(self):
- # Test good configs
- plugin_cache = _construct_mock_plugin_cache()
- expected_result = {
- "test1": "hello",
- "test2": "MANDATORY",
- "test3": ["hello", "there", "how are", "you?"],
- "test4": 1234,
- "test5": [1, 2, 3],
- }
- result = merge_using_priority_specificity("device_config", "some_device", plugin_cache)
- assert_equal(result, expected_result)
-
- # Test missing mandatory parameter
- plugin_cache = _construct_mock_plugin_cache(values={
- "device_config": {
- "a": {
- "test1": "abc",
- },
- },
- "some_device": {
- "b": {
- "test5": [1, 2, 3],
- }
- }
- })
- msg = 'No value specified for mandatory parameter "test2" in some_device.'
- with self.assertRaisesRegexp(ConfigError, msg):
- merge_using_priority_specificity("device_config", "some_device", plugin_cache)
-
- # Test conflict
- plugin_cache = _construct_mock_plugin_cache(values={
- "device_config": {
- "e": {
- 'test2': "NOT_CONFLICTING"
- }
- },
- "some_device": {
- 'a': {
- 'test2': "CONFLICT1"
- },
- 'b': {
- 'test2': "CONFLICT2"
- },
- 'c': {
- 'test2': "CONFLICT3"
- },
- },
- })
- msg = ('Error in "e":\n'
- '\t"device_config" configuration "test2" has already been specified more specifically for some_device in:\n'
- '\t\ta\n'
- '\t\tb\n'
- '\t\tc')
- with self.assertRaisesRegexp(ConfigError, msg):
- merge_using_priority_specificity("device_config", "some_device", plugin_cache)
-
- # Test invalid entries
- plugin_cache = _construct_mock_plugin_cache(values={
- "device_config": {
- "a": {
- "NOT_A_CFG_POINT": "nope"
- }
- },
- "some_device": {}
- })
- msg = ('Error in "a":\n\t'
- 'Invalid entry\(ies\) for "some_device" in "device_config": "NOT_A_CFG_POINT"')
- with self.assertRaisesRegexp(ConfigError, msg):
- merge_using_priority_specificity("device_config", "some_device", plugin_cache)
-
- plugin_cache = _construct_mock_plugin_cache(values={
- "some_device": {
- "a": {
- "NOT_A_CFG_POINT": "nope"
- }
- },
- "device_config": {}
- })
- msg = ('Error in "a":\n\t'
- 'Invalid entry\(ies\) for "some_device": "NOT_A_CFG_POINT"')
- with self.assertRaisesRegexp(ConfigError, msg):
- merge_using_priority_specificity("device_config", "some_device", plugin_cache)
-
- # pylint: disable=no-member
- def test_configuration(self):
- # Test loading defaults
- cfg = TestConfiguration()
- expected = {
- "test1": "hello",
- "test3": ["hello"],
- "test4": 123,
- }
- assert_equal(cfg.to_pod(), expected)
- # If a cfg point is not set an attribute with value None should still be created
- assert_is(cfg.test2, None)
- assert_is(cfg.test5, None)
-
- # Testing set
- # Good value
- cfg.set("test1", "there")
- assert_equal(cfg.test1, "there") # pylint: disable=E1101
- # Unknown value
- with self.assertRaisesRegexp(ConfigError, 'Unknown Test Config configuration "nope"'):
- cfg.set("nope", 123)
- # check_mandatory
- with self.assertRaises(ConfigError):
- cfg.set("test2", value=None)
- cfg.set("test2", value=None, check_mandatory=False)
- # parameter constraints are tested in the ConfigurationPoint unit test
- # since this just calls through to `ConfigurationPoint.set_value`
-
- # Test validation
- msg = 'No value specified for mandatory parameter "test2" in Test Config'
- with self.assertRaisesRegexp(ConfigError, msg):
- cfg.validate()
- cfg.set("test2", 1)
- cfg.validate()
-
- # Testing setting values from a dict
- new_values = {
- "test1": "This",
- "test2": "is",
- "test3": ["a"],
- "test4": 7357,
- "test5": [5],
- }
- cfg.update_config(new_values)
- new_values["test3"] = ["hello", "a"] # This cfg point has merge == True
- for k, v in new_values.iteritems():
- assert_equal(getattr(cfg, k), v)
-
- #Testing podding
- pod = cfg.to_pod()
- new_pod = TestConfiguration.from_pod(copy(pod), None).to_pod()
- assert_equal(pod, new_pod)
-
- #invalid pod entry
- pod = {'invalid_entry': "nope"}
- msg = 'Invalid entry\(ies\) for "Test Config": "invalid_entry"'
- with self.assertRaisesRegexp(ConfigError, msg):
- TestConfiguration.from_pod(pod, None)
-
- #failed pod validation
- pod = {"test1": "testing"}
- msg = 'No value specified for mandatory parameter "test2" in Test Config.'
- with self.assertRaisesRegexp(ConfigError, msg):
- TestConfiguration.from_pod(pod, None)
-
- def test_run_configuration(self):
- plugin_cache = _construct_mock_plugin_cache()
-
- # Test `merge_device_config``
- run_config = RunConfiguration()
- run_config.set("device", "some_device")
- run_config.merge_device_config(plugin_cache)
-
- # Test `to_pod`
- expected_pod = {
- "device": "some_device",
- "device_config": {
- "test1": "hello",
- "test2": "MANDATORY",
- "test3": ["hello", "there", "how are", "you?"],
- "test4": 1234,
- "test5": [1, 2, 3],
- },
- "execution_order": "by_iteration",
- "reboot_policy": "as_needed",
- "retry_on_status": ['FAILED', 'PARTIAL'],
- "max_retries": 3,
- }
- pod = run_config.to_pod()
- assert_equal(pod, expected_pod)
-
- # Test to_pod > from_pod
- new_pod = RunConfiguration.from_pod(copy(pod), plugin_cache).to_pod()
- assert_equal(pod, new_pod)
-
- # from_pod with invalid device_config
- pod['device_config']['invalid_entry'] = "nope"
- msg = 'Invalid entry "invalid_entry" for device "some_device".'
- with self.assertRaisesRegexp(ConfigError, msg):
- RunConfiguration.from_pod(copy(pod), plugin_cache)
-
- # from_pod with no device_config
- pod.pop("device_config")
- msg = 'No value specified for mandatory parameter "device_config".'
- with self.assertRaisesRegexp(ConfigError, msg):
- RunConfiguration.from_pod(copy(pod), plugin_cache)
-
- def test_generate_job_spec(self):
- pass
-
-
-class PluginCacheTest(TestCase):
-
- param1 = ConfigurationPoint("param1", aliases="test_global_alias")
- param2 = ConfigurationPoint("param2", aliases="some_other_alias")
- param3 = ConfigurationPoint("param3")
-
- plugin1 = obj_dict(values={
- "name": "plugin 1",
- "parameters": [
- param1,
- param2,
- ]
- })
- plugin2 = obj_dict(values={
- "name": "plugin 2",
- "parameters": [
- param1,
- param3,
- ]
- })
-
- def get_plugin(self, name):
- if name == "plugin 1":
- return self.plugin1
- if name == "plugin 2":
- return self.plugin2
-
- def has_plugin(self, name):
- return name in ["plugin 1", "plugin 2"]
-
- def make_mock_cache(self):
- mock_loader = Mock()
- mock_loader.get_plugin_class.side_effect = self.get_plugin
- mock_loader.list_plugins = Mock(return_value=[self.plugin1, self.plugin2])
- mock_loader.has_plugin.side_effect = self.has_plugin
- return PluginCache(loader=mock_loader)
-
- def test_get_params(self):
- plugin_cache = self.make_mock_cache()
-
- expected_params = {
- self.param1.name: self.param1,
- self.param2.name: self.param2,
- }
-
- assert_equal(expected_params, plugin_cache.get_plugin_parameters("plugin 1"))
-
- def test_global_aliases(self):
- plugin_cache = self.make_mock_cache()
-
- # Check the alias map
- expected_map = {
- "plugin 1": {
- self.param1.aliases: self.param1,
- self.param2.aliases: self.param2,
- },
- "plugin 2": {
- self.param1.aliases: self.param1,
- }
- }
- expected_set = set(["test_global_alias", "some_other_alias"])
-
- assert_equal(expected_map, plugin_cache._global_alias_map)
- assert_equal(expected_set, plugin_cache._list_of_global_aliases)
- assert_equal(True, plugin_cache.is_global_alias("test_global_alias"))
- assert_equal(False, plugin_cache.is_global_alias("not_a_global_alias"))
-
- # Error when adding to unknown source
- with self.assertRaises(RuntimeError):
- plugin_cache.add_global_alias("adding", "too", "early")
-
- # Test adding sources
- for x in xrange(5):
- plugin_cache.add_source(x)
- assert_equal([0, 1, 2, 3, 4], plugin_cache.sources)
-
- # Error when adding non plugin/global alias/generic
- with self.assertRaises(RuntimeError):
- plugin_cache.add_global_alias("unknow_alias", "some_value", 0)
-
- # Test adding global alias values
- plugin_cache.add_global_alias("test_global_alias", "some_value", 0)
- expected_aliases = {"test_global_alias": {0: "some_value"}}
- assert_equal(expected_aliases, plugin_cache.global_alias_values)
-
- def test_add_config(self):
- plugin_cache = self.make_mock_cache()
-
- # Test adding sources
- for x in xrange(5):
- plugin_cache.add_source(x)
- assert_equal([0, 1, 2, 3, 4], plugin_cache.sources)
-
- # Test adding plugin config
- plugin_cache.add_config("plugin 1", "param1", "some_other_value", 0)
- expected_plugin_config = {"plugin 1": {0: {"param1": "some_other_value"}}}
- assert_equal(expected_plugin_config, plugin_cache.plugin_configs)
-
- # Test adding generic config
- for name in GENERIC_CONFIGS:
- plugin_cache.add_config(name, "param1", "some_value", 0)
- expected_plugin_config[name] = {}
- expected_plugin_config[name][0] = {"param1": "some_value"}
- assert_equal(expected_plugin_config, plugin_cache.plugin_configs)
-
- def test_get_plugin_config(self):
- plugin_cache = self.make_mock_cache()
- for x in xrange(5):
- plugin_cache.add_source(x)
-
- # Add some global aliases
- plugin_cache.add_global_alias("test_global_alias", "1", 0)
- plugin_cache.add_global_alias("test_global_alias", "2", 4)
- plugin_cache.add_global_alias("test_global_alias", "3", 3)
-
- # Test if they are being merged in source order
- expected_config = {
- "param1": "2",
- "param2": None,
- }
- assert_equal(expected_config, plugin_cache.get_plugin_config("plugin 1"))
-
- # Add some plugin specific config
- plugin_cache.add_config("plugin 1", "param1", "3", 0)
- plugin_cache.add_config("plugin 1", "param1", "4", 2)
- plugin_cache.add_config("plugin 1", "param1", "5", 1)
-
- # Test if they are being merged in source order on top of the global aliases
- expected_config = {
- "param1": "4",
- "param2": None,
- }
- assert_equal(expected_config, plugin_cache.get_plugin_config("plugin 1"))
-
- def test_merge_using_priority_specificity(self):
- plugin_cache = self.make_mock_cache()
- for x in xrange(5):
- plugin_cache.add_source(x)
-
- # Add generic configs
- plugin_cache.add_config("device_config", "param1", '1', 1)
- plugin_cache.add_config("device_config", "param1", '2', 2)
- assert_equal(plugin_cache.get_plugin_config("plugin 1", generic_name="device_config"),
- {"param1": '2', "param2": None})
-
- # Add specific configs at same level as generic config
- plugin_cache.add_config("plugin 1", "param1", '3', 2)
- assert_equal(plugin_cache.get_plugin_config("plugin 1", generic_name="device_config"),
- {"param1": '3', "param2": None})
-
- # Add specific config at higher level
- plugin_cache.add_config("plugin 1", "param1", '4', 3)
- assert_equal(plugin_cache.get_plugin_config("plugin 1", generic_name="device_config"),
- {"param1": '4', "param2": None})
-
- # Add generic config at higher level - should be an error
- plugin_cache.add_config("device_config", "param1", '5', 4)
- msg = 'Error in "4":\n' \
- '\t"device_config" configuration "param1" has already been specified' \
- ' more specifically for plugin 1 in:\n' \
- '\t\t2, 3'
- with self.assertRaisesRegexp(ConfigError, msg):
- plugin_cache.get_plugin_config("plugin 1", generic_name="device_config")
diff --git a/wlauto/tests/test_device.py b/wlauto/tests/test_device.py
deleted file mode 100644
index 28c47e9b..00000000
--- a/wlauto/tests/test_device.py
+++ /dev/null
@@ -1,99 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=abstract-method,no-self-use,no-name-in-module
-from collections import defaultdict, OrderedDict
-from unittest import TestCase
-
-from nose.tools import raises, assert_equal
-
-from wlauto import Device, Parameter, RuntimeParameter, CoreParameter
-from wlauto.exceptions import ConfigError
-
-
-class TestDevice(Device):
-
- name = 'test-device'
- path_module = 'posixpath'
-
- parameters = [
- Parameter('core_names', default=['a7', 'a7', 'a15'], override=True),
- Parameter('core_clusters', default=[0, 0, 1], override=True),
- ]
-
- runtime_parameters = [
- RuntimeParameter('test_param', 'getter', 'setter'),
- RuntimeParameter('test_param2', 'getter', 'setter'),
- CoreParameter('${core}_param', 'core_getter', 'core_setter'),
- ]
-
- def __init__(self, *args, **kwargs):
- super(TestDevice, self).__init__(*args, **kwargs)
- self.value = None
- self.core_values = defaultdict()
-
- def getter(self):
- return self.value
-
- def setter(self, value):
- if self.value is None:
- self.value = value
-
- def core_getter(self, core):
- return self.core_values.get(core)
-
- def core_setter(self, core, value):
- self.core_values[core] = value
-
-
-class RuntimeParametersTest(TestCase):
-
- def test_runtime_param(self):
- device = _instantiate(TestDevice)
- device.set_runtime_parameters(dict(test_param=5))
- assert_equal(device.value, 5)
- assert_equal(device.get_runtime_parameters().get('test_param'), 5)
-
- def test_core_param(self):
- device = _instantiate(TestDevice)
- device.set_runtime_parameters(dict(a15_param=1, a7_param=2))
- assert_equal(device.core_values, {'a15': 1, 'a7': 2})
- assert_equal(device.get_runtime_parameters().get('a15_param'), 1)
- assert_equal(device.get_runtime_parameters().get('a7_param'), 2)
-
- @raises(ConfigError)
- def test_bad_runtime_param(self):
- device = _instantiate(TestDevice)
- device.set_runtime_parameters(dict(bad_param=1))
-
- def test_get_unset_runtime_params(self):
- device = _instantiate(TestDevice)
- expected = {'test_param': None, 'test_param2': None, 'a15_param': None, 'a7_param': None}
- assert_equal(device.get_runtime_parameters(), expected)
-
- def test_param_set_order(self):
- device = _instantiate(TestDevice)
- device.set_runtime_parameters(OrderedDict([('test_param2', 1), ('test_param', 5)]))
- assert_equal(device.value, 1)
- device.value = None
- device.set_runtime_parameters(OrderedDict([('test_param', 5), ('test_param2', 1)]))
- assert_equal(device.value, 5)
-
-
-def _instantiate(cls, *args, **kwargs):
- # Needed to get around Plugin's __init__ checks
- return cls(*args, **kwargs)
-
diff --git a/wlauto/tests/test_diff.py b/wlauto/tests/test_diff.py
deleted file mode 100644
index cc1683cc..00000000
--- a/wlauto/tests/test_diff.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E0611
-# pylint: disable=R0201
-import os
-import tempfile
-from unittest import TestCase
-
-from nose.tools import assert_equal
-
-from wlauto.instrumentation.misc import _diff_interrupt_files
-
-
-class InterruptDiffTest(TestCase):
-
- def test_interrupt_diff(self):
- file_dir = os.path.join(os.path.dirname(__file__), 'data', 'interrupts')
- before_file = os.path.join(file_dir, 'before')
- after_file = os.path.join(file_dir, 'after')
- expected_result_file = os.path.join(file_dir, 'result')
- output_file = tempfile.mktemp()
-
- _diff_interrupt_files(before_file, after_file, output_file)
- with open(output_file) as fh:
- output_diff = fh.read()
- with open(expected_result_file) as fh:
- expected_diff = fh.read()
- assert_equal(output_diff, expected_diff)
-
-
diff --git a/wlauto/tests/test_exec_control.py b/wlauto/tests/test_exec_control.py
deleted file mode 100644
index 490239d2..00000000
--- a/wlauto/tests/test_exec_control.py
+++ /dev/null
@@ -1,269 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0231,W0613,E0611,W0603,R0201
-from unittest import TestCase
-
-from nose.tools import assert_equal, assert_raises
-
-from wlauto.utils.exec_control import (init_environment, reset_environment,
- activate_environment, once,
- once_per_class, once_per_instance)
-
-class TestClass(object):
-
- def __init__(self):
- self.count = 0
-
- @once
- def initilize_once(self):
- self.count += 1
-
- @once_per_class
- def initilize_once_per_class(self):
- self.count += 1
-
- @once_per_instance
- def initilize_once_per_instance(self):
- self.count += 1
-
-
-class SubClass(TestClass):
-
- def __init__(self):
- super(SubClass, self).__init__()
-
-
-class SubSubClass(SubClass):
-
- def __init__(self):
- super(SubSubClass, self).__init__()
-
-
-class AnotherClass(object):
-
- def __init__(self):
- self.count = 0
-
- @once
- def initilize_once(self):
- self.count += 1
-
- @once_per_class
- def initilize_once_per_class(self):
- self.count += 1
-
- @once_per_instance
- def initilize_once_per_instance(self):
- self.count += 1
-
-
-class EnvironmentManagementTest(TestCase):
-
- def test_duplicate_environment(self):
- init_environment('ENVIRONMENT')
- assert_raises(ValueError, init_environment, 'ENVIRONMENT')
-
- def test_reset_missing_environment(self):
- assert_raises(ValueError, reset_environment, 'MISSING')
-
- def test_reset_current_environment(self):
- activate_environment('CURRENT_ENVIRONMENT')
- t1 = TestClass()
- t1.initilize_once()
- assert_equal(t1.count, 1)
-
- reset_environment()
- t1.initilize_once()
- assert_equal(t1.count, 2)
-
- def test_switch_environment(self):
- activate_environment('ENVIRONMENT1')
- t1 = TestClass()
- t1.initilize_once()
- assert_equal(t1.count, 1)
-
- activate_environment('ENVIRONMENT2')
- t1.initilize_once()
- assert_equal(t1.count, 2)
-
- activate_environment('ENVIRONMENT1')
- t1.initilize_once()
- assert_equal(t1.count, 2)
-
- def test_reset_environment_name(self):
- activate_environment('ENVIRONMENT')
- t1 = TestClass()
- t1.initilize_once()
- assert_equal(t1.count, 1)
-
- reset_environment('ENVIRONMENT')
- t1.initilize_once()
- assert_equal(t1.count, 2)
-
-
-class OnlyOnceEnvironmentTest(TestCase):
-
- def setUp(self):
- activate_environment('TEST_ENVIRONMENT')
-
- def tearDown(self):
- reset_environment('TEST_ENVIRONMENT')
-
- def test_single_instance(self):
- t1 = TestClass()
- ac = AnotherClass()
-
- t1.initilize_once()
- assert_equal(t1.count, 1)
-
- t1.initilize_once()
- assert_equal(t1.count, 1)
-
- ac.initilize_once()
- assert_equal(ac.count, 1)
-
-
- def test_mulitple_instances(self):
- t1 = TestClass()
- t2 = TestClass()
-
- t1.initilize_once()
- assert_equal(t1.count, 1)
-
- t2.initilize_once()
- assert_equal(t2.count, 0)
-
-
- def test_sub_classes(self):
- t1 = TestClass()
- sc = SubClass()
- ss = SubSubClass()
-
- t1.initilize_once()
- assert_equal(t1.count, 1)
-
- sc.initilize_once()
- sc.initilize_once()
- assert_equal(sc.count, 0)
-
- ss.initilize_once()
- ss.initilize_once()
- assert_equal(ss.count, 0)
-
-
-class OncePerClassEnvironmentTest(TestCase):
-
- def setUp(self):
- activate_environment('TEST_ENVIRONMENT')
-
- def tearDown(self):
- reset_environment('TEST_ENVIRONMENT')
-
- def test_single_instance(self):
- t1 = TestClass()
- ac = AnotherClass()
-
- t1.initilize_once_per_class()
- assert_equal(t1.count, 1)
-
- t1.initilize_once_per_class()
- assert_equal(t1.count, 1)
-
- ac.initilize_once_per_class()
- assert_equal(ac.count, 1)
-
-
- def test_mulitple_instances(self):
- t1 = TestClass()
- t2 = TestClass()
-
- t1.initilize_once_per_class()
- assert_equal(t1.count, 1)
-
- t2.initilize_once_per_class()
- assert_equal(t2.count, 0)
-
-
- def test_sub_classes(self):
- t1 = TestClass()
- sc1 = SubClass()
- sc2 = SubClass()
- ss1 = SubSubClass()
- ss2 = SubSubClass()
-
- t1.initilize_once_per_class()
- assert_equal(t1.count, 1)
-
- sc1.initilize_once_per_class()
- sc2.initilize_once_per_class()
- assert_equal(sc1.count, 1)
- assert_equal(sc2.count, 0)
-
- ss1.initilize_once_per_class()
- ss2.initilize_once_per_class()
- assert_equal(ss1.count, 1)
- assert_equal(ss2.count, 0)
-
-
-class OncePerInstanceEnvironmentTest(TestCase):
-
- def setUp(self):
- activate_environment('TEST_ENVIRONMENT')
-
- def tearDown(self):
- reset_environment('TEST_ENVIRONMENT')
-
- def test_single_instance(self):
- t1 = TestClass()
- ac = AnotherClass()
-
- t1.initilize_once_per_instance()
- assert_equal(t1.count, 1)
-
- t1.initilize_once_per_instance()
- assert_equal(t1.count, 1)
-
- ac.initilize_once_per_instance()
- assert_equal(ac.count, 1)
-
-
- def test_mulitple_instances(self):
- t1 = TestClass()
- t2 = TestClass()
-
- t1.initilize_once_per_instance()
- assert_equal(t1.count, 1)
-
- t2.initilize_once_per_instance()
- assert_equal(t2.count, 1)
-
-
- def test_sub_classes(self):
- t1 = TestClass()
- sc = SubClass()
- ss = SubSubClass()
-
- t1.initilize_once_per_instance()
- assert_equal(t1.count, 1)
-
- sc.initilize_once_per_instance()
- sc.initilize_once_per_instance()
- assert_equal(sc.count, 1)
-
- ss.initilize_once_per_instance()
- ss.initilize_once_per_instance()
- assert_equal(ss.count, 1)
diff --git a/wlauto/tests/test_execution.py b/wlauto/tests/test_execution.py
deleted file mode 100644
index 583af009..00000000
--- a/wlauto/tests/test_execution.py
+++ /dev/null
@@ -1,1069 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E0611
-# pylint: disable=R0201
-# pylint: disable=protected-access
-# pylint: disable=abstract-method
-# pylint: disable=attribute-defined-outside-init
-# pylint: disable=no-member
-from unittest import TestCase
-from nose.tools import assert_equal, assert_raises, raises
-
-from wlauto.core.execution import BySpecRunner, ByIterationRunner
-from wlauto.exceptions import DeviceError
-from wlauto.core.configuration import WorkloadRunSpec, RebootPolicy
-from wlauto.core.instrumentation import Instrument
-from wlauto.core.device import Device, DeviceMeta
-from wlauto.core import instrumentation, signal
-from wlauto.core.workload import Workload
-from wlauto.core.result import IterationResult
-from wlauto.core.signal import Signal
-
-
-class SignalCatcher(Instrument):
- name = 'Signal Catcher'
-
- def __init__(self):
- Instrument.__init__(self, None)
- self.signals_received = []
- for sig in signal.__dict__.values():
- if isinstance(sig, Signal):
- signal.connect(self.handler, sig)
-
- def handler(self, *_, **kwargs):
- self.signals_received.append(kwargs.pop('signal').name)
-
-
-class Mock(object):
- def __init__(self):
- self.__members = {}
-
- def __getattr__(self, name):
- if name not in self.__members:
- self.__members[name] = Mock()
- return self.__members[name]
-
- def __call__(self, *args, **kwargs):
- pass
-
- def __iter__(self):
- return iter([])
-
-
-class BadDeviceMeta(DeviceMeta):
-
- @classmethod
- def _implement_virtual(mcs, cls, bases):
- """
- This version of _implement_virtual does not inforce "call global virutals only once"
- policy, so that intialize() and finalize() my be invoked multiple times to test that
- the errors they generated are handled correctly.
-
- """
- # pylint: disable=cell-var-from-loop,unused-argument
- methods = {}
- for vmname in mcs.virtual_methods:
- clsmethod = getattr(cls, vmname, None)
- if clsmethod:
- basemethods = [getattr(b, vmname) for b in bases if hasattr(b, vmname)]
- methods[vmname] = [bm for bm in basemethods if bm != clsmethod]
- methods[vmname].append(clsmethod)
-
- def generate_method_wrapper(vname):
- name__ = vmname
-
- def wrapper(self, *args, **kwargs):
- for dm in methods[name__]:
- dm(self, *args, **kwargs)
- return wrapper
- setattr(cls, vmname, generate_method_wrapper(vmname))
-
-
-class BadDevice(Device):
-
- __metaclass__ = BadDeviceMeta
-
- def __init__(self, when_to_fail, exception=DeviceError):
- #pylint: disable=super-init-not-called
- self.when_to_fail = when_to_fail
- self.exception = exception
-
- def connect(self):
- if self.when_to_fail == 'connect':
- raise self.exception("Connection failure")
-
- def initialize(self, _):
- if self.when_to_fail == 'initialize':
- raise self.exception("Initialisation failure")
-
- def get_properties(self, _):
- if self.when_to_fail == 'get_properties':
- raise self.exception("Failure getting propeties")
-
- def start(self):
- if self.when_to_fail == 'start':
- raise self.exception("Start failure")
-
- def set_device_parameters(self, **_):
- if self.when_to_fail == 'set_device_parameters':
- raise self.exception("Failure setting parameter")
-
- def stop(self):
- if self.when_to_fail == 'stop':
- raise self.exception("Stop failure")
-
- def disconnect(self):
- if self.when_to_fail == 'disconnect':
- raise self.exception("Disconnection failure")
-
- def ping(self):
- return True
-
-
-class BadWorkload(Workload):
-
- def __init__(self, exception, when_to_fail):
- #pylint: disable=super-init-not-called
- self.exception = exception
- self.when_to_fail = when_to_fail
-
- def setup(self, _):
- if "setup" in self.when_to_fail:
- raise self.exception("Setup failed")
-
- def run(self, _):
- if "run" in self.when_to_fail:
- raise self.exception("Run failed")
-
- def update_result(self, _):
- if "update_result" in self.when_to_fail:
- raise self.exception("Result update failed")
-
- def teardown(self, _):
- if "teardown" in self.when_to_fail:
- raise self.exception("Teardown failed")
-
-
-class RunnerTest(TestCase):
-
- errors = 0
-
- def signal_check(self, expected_signals, workloads, reboot_policy="never", runner_class=BySpecRunner):
- context = Mock()
- context.reboot_policy = RebootPolicy(reboot_policy)
- context.config.workload_specs = workloads
- context.config.retry_on_status = []
-
- instrument = _instantiate(SignalCatcher)
- instrumentation.install(instrument)
-
- runner = runner_class(Mock(), context, Mock())
- runner.init_queue(context.config.workload_specs)
-
- try:
- runner.run()
- finally:
- instrumentation.uninstall(instrument)
-
- assert_equal(instrument.signals_received, expected_signals)
-
- def test_single_run(self):
- expected_signals = [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
- workloads = [WorkloadRunSpec(id='1', number_of_iterations=1, instrumentation=['Signal Catcher'])]
- workloads[0]._workload = Mock()
-
- self.signal_check(expected_signals, workloads)
-
- def test_multiple_run_byspec(self):
- expected_signals = [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
- workloads = [
- WorkloadRunSpec(id='1', number_of_iterations=1, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='2', number_of_iterations=2, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='3', number_of_iterations=3, instrumentation=['Signal Catcher'])
- ]
- workloads[0]._workload = Mock()
- workloads[1]._workload = Mock()
- workloads[2]._workload = Mock()
-
- self.signal_check(expected_signals, workloads)
-
- def test_multiple_run_byiteration(self):
- expected_signals = [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
- workloads = [
- WorkloadRunSpec(id='1', number_of_iterations=1, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='2', number_of_iterations=2, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='3', number_of_iterations=3, instrumentation=['Signal Catcher']),
- ]
- workloads[0]._workload = Mock()
- workloads[1]._workload = Mock()
- workloads[2]._workload = Mock()
-
- self.signal_check(expected_signals, workloads, runner_class=ByIterationRunner)
-
- def test_reboot_policies(self):
- expected_never = [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
-
- expected_initial = [
- signal.RUN_START.name,
- signal.BEFORE_INITIAL_BOOT.name,
- signal.BEFORE_BOOT.name,
- signal.SUCCESSFUL_BOOT.name,
- signal.AFTER_BOOT.name,
- signal.SUCCESSFUL_INITIAL_BOOT.name,
- signal.AFTER_INITIAL_BOOT.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
-
- expected_each_spec = [
- signal.RUN_START.name,
- signal.BEFORE_INITIAL_BOOT.name,
- signal.BEFORE_BOOT.name,
- signal.SUCCESSFUL_BOOT.name,
- signal.AFTER_BOOT.name,
- signal.SUCCESSFUL_INITIAL_BOOT.name,
- signal.AFTER_INITIAL_BOOT.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.BEFORE_BOOT.name,
- signal.SUCCESSFUL_BOOT.name,
- signal.AFTER_BOOT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
-
- expected_each_iteration = [
- signal.RUN_START.name,
- signal.BEFORE_INITIAL_BOOT.name,
- signal.BEFORE_BOOT.name,
- signal.SUCCESSFUL_BOOT.name,
- signal.AFTER_BOOT.name,
- signal.SUCCESSFUL_INITIAL_BOOT.name,
- signal.AFTER_INITIAL_BOOT.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.BEFORE_BOOT.name,
- signal.SUCCESSFUL_BOOT.name,
- signal.AFTER_BOOT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.BEFORE_BOOT.name,
- signal.SUCCESSFUL_BOOT.name,
- signal.AFTER_BOOT.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
-
- workloads = [
- WorkloadRunSpec(id='1', number_of_iterations=1, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='2', number_of_iterations=1, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='3', number_of_iterations=2, instrumentation=['Signal Catcher'])
- ]
- workloads[0]._workload = Mock()
- workloads[1]._workload = Mock()
- workloads[2]._workload = Mock()
-
- self.signal_check(expected_never, workloads[0:1], reboot_policy="never")
- self.signal_check(expected_initial, workloads[0:1], reboot_policy="initial")
- self.signal_check(expected_each_spec, workloads[0:2], reboot_policy="each_spec")
- self.signal_check(expected_each_iteration, workloads[1:3], reboot_policy="each_iteration")
-
- def test_spec_skipping(self):
- expected_signals = [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
-
- workloads = [
- WorkloadRunSpec(id='1', number_of_iterations=5, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='2', number_of_iterations=1, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='3', number_of_iterations=4, instrumentation=['Signal Catcher'])
- ]
-
- workloads[0]._workload = Mock()
- workloads[1]._workload = Mock()
- workloads[2]._workload = Mock()
- workloads[0].enabled = False
- workloads[2].enabled = False
-
- self.signal_check(expected_signals, workloads)
-
- def test_bad_workload_status(self):
- workloads = [
- WorkloadRunSpec(id='1', number_of_iterations=2, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='2', number_of_iterations=2, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='3', number_of_iterations=2, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='4', number_of_iterations=2, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='5', number_of_iterations=2, instrumentation=['Signal Catcher'])
- ]
-
- workloads[0]._workload = BadWorkload(Exception, ["setup"])
- workloads[1]._workload = BadWorkload(Exception, ["run"])
- workloads[2]._workload = BadWorkload(Exception, ["update_result"])
- workloads[3]._workload = BadWorkload(Exception, ["teardown"])
- workloads[4]._workload = Mock()
-
- context = Mock()
- context.reboot_policy = RebootPolicy("never")
- context.config.workload_specs = workloads
-
- runner = BySpecRunner(Mock(), context, Mock())
- runner.init_queue(context.config.workload_specs)
-
- instrument = _instantiate(SignalCatcher)
- instrumentation.install(instrument)
-
- try:
- runner.run()
- finally:
- instrumentation.uninstall(instrument)
-
- #Check queue was handled correctly
- assert_equal(len(runner.completed_jobs), 10)
- assert_equal(len(runner.job_queue), 0)
-
- #Check job status'
- expected_status = [
- IterationResult.FAILED, IterationResult.SKIPPED,
- IterationResult.FAILED, IterationResult.FAILED,
- IterationResult.PARTIAL, IterationResult.PARTIAL,
- IterationResult.NONCRITICAL, IterationResult.NONCRITICAL,
- IterationResult.OK, IterationResult.OK
- ]
- for i in range(0, len(runner.completed_jobs)):
- assert_equal(runner.completed_jobs[i].result.status, expected_status[i])
-
- #Check signals were sent correctly
- expected_signals = [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name, # Fail Setup
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.ITERATION_END.name,
- #Skipped iteration
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name, # Fail Run
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- #signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name, - not sent because run failed
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- #signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name, - not sent because run failed
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name, # Fail Result Update
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name, # Fail Teardown
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.WORKLOAD_SPEC_START.name, # OK
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
-
- assert_equal(expected_signals, instrument.signals_received)
-
- def test_CTRL_C(self):
- workloads = [
- WorkloadRunSpec(id='1', number_of_iterations=2, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='2', number_of_iterations=2, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='3', number_of_iterations=2, instrumentation=['Signal Catcher']),
- WorkloadRunSpec(id='4', number_of_iterations=2, instrumentation=['Signal Catcher']),
- ]
-
- workloads[0]._workload = BadWorkload(KeyboardInterrupt, ["setup"])
- workloads[1]._workload = BadWorkload(KeyboardInterrupt, ["run"])
- workloads[2]._workload = BadWorkload(KeyboardInterrupt, ["update_result"])
- workloads[3]._workload = BadWorkload(KeyboardInterrupt, ["teardown"])
-
- expected_status = [IterationResult.ABORTED, IterationResult.ABORTED]
-
- expected_signals = [
- [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ],
- [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ],
- [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ],
- [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.SUCCESSFUL_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.SUCCESSFUL_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ],
- ]
-
- for i in xrange(0, len(workloads)):
- context = Mock()
- context.reboot_policy = RebootPolicy("never")
- context.config.workload_specs = [workloads[i]]
-
- runner = BySpecRunner(Mock(), context, Mock())
- runner.init_queue(context.config.workload_specs)
-
- instrument = _instantiate(SignalCatcher)
- instrumentation.install(instrument)
-
- try:
- runner.run()
- finally:
- instrumentation.uninstall(instrument)
-
- #Check queue was handled correctly
- assert_equal(len(runner.completed_jobs), 2)
- assert_equal(len(runner.job_queue), 0)
-
- #check correct signals were sent
- assert_equal(expected_signals[i], instrument.signals_received)
-
- #Check job status'
- for j in range(0, len(runner.completed_jobs)):
- assert_equal(runner.completed_jobs[j].result.status, expected_status[j])
-
- def test_no_teardown_after_setup_fail(self):
- expected_signals = [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
-
- workloads = [WorkloadRunSpec(id='1', number_of_iterations=1, instrumentation=['Signal Catcher'])]
- workloads[0]._workload = BadWorkload(Exception, ["setup"])
-
- self.signal_check(expected_signals, workloads)
-
- def test_teardown_on_run_and_result_update_fail(self):
- expected_signals = [
- signal.RUN_START.name,
- signal.RUN_INIT.name,
- signal.WORKLOAD_SPEC_START.name,
- signal.ITERATION_START.name,
- signal.BEFORE_WORKLOAD_SETUP.name,
- signal.SUCCESSFUL_WORKLOAD_SETUP.name,
- signal.AFTER_WORKLOAD_SETUP.name,
- signal.BEFORE_WORKLOAD_EXECUTION.name,
- signal.AFTER_WORKLOAD_EXECUTION.name,
- signal.BEFORE_WORKLOAD_RESULT_UPDATE.name,
- signal.AFTER_WORKLOAD_RESULT_UPDATE.name,
- signal.BEFORE_WORKLOAD_TEARDOWN.name,
- signal.SUCCESSFUL_WORKLOAD_TEARDOWN.name,
- signal.AFTER_WORKLOAD_TEARDOWN.name,
- signal.ITERATION_END.name,
- signal.WORKLOAD_SPEC_END.name,
- signal.RUN_FIN.name,
- signal.BEFORE_OVERALL_RESULTS_PROCESSING.name,
- signal.SUCCESSFUL_OVERALL_RESULTS_PROCESSING.name,
- signal.AFTER_OVERALL_RESULTS_PROCESSING.name,
- signal.RUN_END.name
- ]
- workloads = [WorkloadRunSpec(id='1', number_of_iterations=1, instrumentation=['Signal Catcher'])]
- workloads[0]._workload = BadWorkload(Exception, ["run", "update_result"])
-
- self.signal_check(expected_signals, workloads)
-
- def bad_device(self, method):
- workloads = [WorkloadRunSpec(id='1', number_of_iterations=1, instrumentation=[])]
- workloads[0]._workload = Mock()
-
- context = Mock()
- context.reboot_policy = RebootPolicy("never")
- context.config.workload_specs = workloads
-
- runner = BySpecRunner(BadDevice(method), context, Mock())
- runner.init_queue(context.config.workload_specs)
- runner.run()
-
- @raises(DeviceError)
- def test_bad_connect(self):
- assert_raises(DeviceError, self.bad_device('connect'))
-
- @raises(DeviceError)
- def test_bad_initialize(self):
- assert_raises(DeviceError, self.bad_device('initialize'))
-
- def test_bad_start(self):
- self.bad_device('start') # error must not propagate
-
- def test_bad_stop(self):
- self.bad_device('stop') # error must not propagate
-
- def test_bad_disconnect(self):
- self.bad_device('disconnect') # error must not propagate
-
- @raises(DeviceError)
- def test_bad_get_properties(self):
- assert_raises(DeviceError, self.bad_device('get_properties'))
-
-
-def _instantiate(cls, *args, **kwargs):
- # Needed to get around Plugin's __init__ checks
- return cls(*args, **kwargs)
diff --git a/wlauto/tests/test_extension.py b/wlauto/tests/test_extension.py
deleted file mode 100644
index a38750cb..00000000
--- a/wlauto/tests/test_extension.py
+++ /dev/null
@@ -1,349 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E0611,R0201,E1101
-from unittest import TestCase
-
-from nose.tools import assert_equal, raises, assert_true
-
-from wlauto.core.plugin import Plugin, Parameter, Param, PluginMeta, Module
-from wlauto.utils.types import list_of_ints
-from wlauto.exceptions import ConfigError
-
-
-class MyMeta(PluginMeta):
-
- virtual_methods = ['validate', 'virtual1', 'virtual2']
-
-
-class MyBasePlugin(Plugin):
-
- __metaclass__ = MyMeta
-
- name = 'base'
-
- parameters = [
- Parameter('base'),
- ]
-
- def __init__(self, **kwargs):
- super(MyBasePlugin, self).__init__(**kwargs)
- self.v1 = 0
- self.v2 = 0
- self.v3 = ''
-
- def virtual1(self):
- self.v1 += 1
- self.v3 = 'base'
-
- def virtual2(self):
- self.v2 += 1
-
-
-class MyAcidPlugin(MyBasePlugin):
-
- name = 'acid'
-
- parameters = [
- Parameter('hydrochloric', kind=list_of_ints, default=[1, 2]),
- 'citric',
- ('carbonic', int),
- ]
-
- def __init__(self, **kwargs):
- super(MyAcidPlugin, self).__init__(**kwargs)
- self.vv1 = 0
- self.vv2 = 0
-
- def virtual1(self):
- self.vv1 += 1
- self.v3 = 'acid'
-
- def virtual2(self):
- self.vv2 += 1
-
-
-class MyOtherPlugin(MyBasePlugin):
-
- name = 'other'
-
- parameters = [
- Param('mandatory', mandatory=True),
- Param('optional', allowed_values=['test', 'check']),
- ]
-
-class MyOtherOtherPlugin(MyOtherPlugin):
-
- name = 'otherother'
-
- parameters = [
- Param('mandatory', override=True),
- ]
-
-
-class MyOverridingPlugin(MyAcidPlugin):
-
- name = 'overriding'
-
- parameters = [
- Parameter('hydrochloric', override=True, default=[3, 4]),
- ]
-
-
-class MyThirdTeerPlugin(MyOverridingPlugin):
-
- name = 'thirdteer'
-
-
-class MultiValueParamExt(Plugin):
-
- name = 'multivalue'
-
- parameters = [
- Parameter('test', kind=list_of_ints, allowed_values=[42, 7, 73]),
- ]
-
-
-class MyCoolModule(Module):
-
- name = 'cool_module'
-
- capabilities = ['fizzle']
-
- def initialize(self, context):
- self.fizzle_factor = 0 # pylint: disable=attribute-defined-outside-init
-
- def fizzle(self):
- self.fizzle_factor += 1
-
-
-class MyEvenCoolerModule(Module):
-
- name = 'even_cooler_module'
-
- capabilities = ['fizzle']
-
- def fizzle(self):
- self.owner.self_fizzle_factor += 2
-
-
-class MyModularPlugin(Plugin):
-
- name = 'modular'
-
- parameters = [
- Parameter('modules', override=True, default=['cool_module']),
- ]
-
-
-class MyOtherModularPlugin(Plugin):
-
- name = 'other_modular'
-
- parameters = [
- Parameter('modules', override=True, default=[
- 'cool_module',
- 'even_cooler_module',
- ]),
- ]
-
- def __init__(self, **kwargs):
- super(MyOtherModularPlugin, self).__init__(**kwargs)
- self.self_fizzle_factor = 0
-
-
-class FakeLoader(object):
-
- modules = [
- MyCoolModule,
- MyEvenCoolerModule,
- ]
-
- def get_module(self, name, owner, **kwargs): # pylint: disable=unused-argument
- for module in self.modules:
- if module.name == name:
- return _instantiate(module, owner)
-
-
-class PluginMetaTest(TestCase):
-
- def test_propagation(self):
- acid_params = [p.name for p in MyAcidPlugin.parameters]
- assert_equal(acid_params, ['modules', 'base', 'hydrochloric', 'citric', 'carbonic'])
-
- @raises(ValueError)
- def test_duplicate_param_spec(self):
- class BadPlugin(MyBasePlugin): # pylint: disable=W0612
- parameters = [
- Parameter('base'),
- ]
-
- def test_param_override(self):
- class OverridingPlugin(MyBasePlugin): # pylint: disable=W0612
- parameters = [
- Parameter('base', override=True, default='cheese'),
- ]
- assert_equal(OverridingPlugin.parameters['base'].default, 'cheese')
-
- @raises(ValueError)
- def test_invalid_param_spec(self):
- class BadPlugin(MyBasePlugin): # pylint: disable=W0612
- parameters = [
- 7,
- ]
-
- def test_virtual_methods(self):
- acid = _instantiate(MyAcidPlugin)
- acid.virtual1()
- assert_equal(acid.v1, 1)
- assert_equal(acid.vv1, 1)
- assert_equal(acid.v2, 0)
- assert_equal(acid.vv2, 0)
- assert_equal(acid.v3, 'acid')
- acid.virtual2()
- acid.virtual2()
- assert_equal(acid.v1, 1)
- assert_equal(acid.vv1, 1)
- assert_equal(acid.v2, 2)
- assert_equal(acid.vv2, 2)
-
- def test_initialization(self):
- class MyExt(Plugin):
- name = 'myext'
- values = {'a': 0}
- def __init__(self, *args, **kwargs):
- super(MyExt, self).__init__(*args, **kwargs)
- self.instance_init = 0
- def initialize(self, context):
- self.values['a'] += 1
-
- class MyChildExt(MyExt):
- name = 'mychildext'
- def initialize(self, context):
- self.instance_init += 1
-
- ext = _instantiate(MyChildExt)
- ext.initialize(None)
-
- assert_equal(MyExt.values['a'], 1)
- assert_equal(ext.instance_init, 1)
-
- def test_initialization_happens_once(self):
- class MyExt(Plugin):
- name = 'myext'
- values = {'a': 0}
- def __init__(self, *args, **kwargs):
- super(MyExt, self).__init__(*args, **kwargs)
- self.instance_init = 0
- self.instance_validate = 0
- def initialize(self, context):
- self.values['a'] += 1
- def validate(self):
- self.instance_validate += 1
-
- class MyChildExt(MyExt):
- name = 'mychildext'
- def initialize(self, context):
- self.instance_init += 1
- def validate(self):
- self.instance_validate += 1
-
- ext1 = _instantiate(MyExt)
- ext2 = _instantiate(MyExt)
- ext3 = _instantiate(MyChildExt)
- ext1.initialize(None)
- ext2.initialize(None)
- ext3.initialize(None)
- ext1.validate()
- ext2.validate()
- ext3.validate()
-
- assert_equal(MyExt.values['a'], 1)
- assert_equal(ext1.instance_init, 0)
- assert_equal(ext3.instance_init, 1)
- assert_equal(ext1.instance_validate, 1)
- assert_equal(ext3.instance_validate, 2)
-
-
-class ParametersTest(TestCase):
-
- def test_setting(self):
- myext = _instantiate(MyAcidPlugin, hydrochloric=[5, 6], citric=5, carbonic=42)
- assert_equal(myext.hydrochloric, [5, 6])
- assert_equal(myext.citric, '5')
- assert_equal(myext.carbonic, 42)
-
- def test_validation_ok(self):
- myext = _instantiate(MyOtherPlugin, mandatory='check', optional='check')
- myext.validate()
-
- def test_default_override(self):
- myext = _instantiate(MyOverridingPlugin)
- assert_equal(myext.hydrochloric, [3, 4])
- myotherext = _instantiate(MyThirdTeerPlugin)
- assert_equal(myotherext.hydrochloric, [3, 4])
-
- def test_multivalue_param(self):
- myext = _instantiate(MultiValueParamExt, test=[7, 42])
- myext.validate()
- assert_equal(myext.test, [7, 42])
-
- @raises(ConfigError)
- def test_bad_multivalue_param(self):
- myext = _instantiate(MultiValueParamExt, test=[5])
- myext.validate()
-
- @raises(ConfigError)
- def test_validation_no_mandatory(self):
- myext = _instantiate(MyOtherPlugin, optional='check')
- myext.validate()
-
- @raises(ConfigError)
- def test_validation_no_mandatory_in_derived(self):
- _instantiate(MyOtherOtherPlugin)
-
- @raises(ConfigError)
- def test_validation_bad_value(self):
- myext = _instantiate(MyOtherPlugin, mandatory=1, optional='invalid')
- myext.validate()
-
- @raises(ValueError)
- def test_duplicate_param_override(self):
- class DuplicateParamPlugin(MyBasePlugin): # pylint: disable=W0612
- parameters = [
- Parameter('food', override=True, default='cheese'),
- ]
-
-
-class ModuleTest(TestCase):
-
- def test_fizzle(self):
- myext = _instantiate(MyModularPlugin)
- myext.load_modules(FakeLoader())
- assert_true(myext.can('fizzle'))
- myext.fizzle()
- assert_equal(myext.fizzle_factor, 1)
-
- def test_self_fizzle(self):
- myext = _instantiate(MyOtherModularPlugin)
- myext.load_modules(FakeLoader())
- myext.fizzle()
- assert_equal(myext.self_fizzle_factor, 2)
-
-
-def _instantiate(cls, *args, **kwargs):
- # Needed to get around Plugin's __init__ checks
- return cls(*args, **kwargs)
diff --git a/wlauto/tests/test_extension_loader.py b/wlauto/tests/test_extension_loader.py
deleted file mode 100644
index 97fea938..00000000
--- a/wlauto/tests/test_extension_loader.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E0611,R0201
-import os
-from unittest import TestCase
-
-from nose.tools import assert_equal, assert_greater
-
-from wlauto.core.pluginloader import PluginLoader
-
-
-EXTDIR = os.path.join(os.path.dirname(__file__), 'data', 'plugins')
-
-
-class PluginLoaderTest(TestCase):
-
- def test_load_device(self):
- loader = PluginLoader(paths=[EXTDIR, ], load_defaults=False)
- device = loader.get_device('test-device')
- assert_equal(device.name, 'test-device')
-
- def test_list_by_kind(self):
- loader = PluginLoader(paths=[EXTDIR, ], load_defaults=False)
- exts = loader.list_devices()
- assert_equal(len(exts), 1)
- assert_equal(exts[0].name, 'test-device')
-
- def test_clear_and_reload(self):
- loader = PluginLoader()
- assert_greater(len(loader.list_devices()), 1)
- loader.clear()
- loader.update(paths=[EXTDIR, ])
- devices = loader.list_devices()
- assert_equal(len(devices), 1)
- assert_equal(devices[0].name, 'test-device')
- assert_equal(len(loader.list_plugins()), 1)
-
diff --git a/wlauto/tests/test_instrumentation.py b/wlauto/tests/test_instrumentation.py
deleted file mode 100644
index da58fbe7..00000000
--- a/wlauto/tests/test_instrumentation.py
+++ /dev/null
@@ -1,236 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0231,W0613,E0611,W0603,R0201
-from unittest import TestCase
-
-from nose.tools import assert_equal, raises, assert_true, assert_false
-
-from wlauto import Instrument
-from wlauto.core import signal, instrumentation
-from wlauto.instrumentation import instrument_is_installed, instrument_is_enabled, clear_instrumentation
-
-
-class MockInstrument(Instrument):
-
- name = 'mock'
-
- def __init__(self):
- Instrument.__init__(self, None)
- self.before = 0
- self.after = 0
-
- def before_workload_execution(self, context):
- self.before += 1
-
- def after_workload_execution(self, context):
- self.after += 1
-
-
-class MockInstrument2(Instrument):
-
- name = 'mock_2'
-
- def __init__(self):
- Instrument.__init__(self, None)
- self.before = 0
- self.after = 0
- self.result = 0
-
- def before_workload_execution(self, context):
- self.before += 1
-
- def after_workload_execution(self, context):
- self.after += 1
-
- def after_workload_result_update(self, context):
- self.result += 1
-
-
-class MockInstrument3(Instrument):
-
- name = 'mock_3'
-
- def __init__(self):
- Instrument.__init__(self, None)
-
- def slow_before_workload_execution(self, context):
- global counter
- counter += 1
-
-
-class MockInstrument4(Instrument):
-
- name = 'mock_4'
-
- def __init__(self):
- Instrument.__init__(self, None)
-
- def slow_before_first_iteration_boot(self, context):
- global counter
- counter = 4
-
-
-class MockInstrument5(Instrument):
-
- name = 'mock_5'
-
- def __init__(self):
- Instrument.__init__(self, None)
-
- def fast_before_first_iteration_boot(self, context):
- global counter
- counter += 2
-
-
-class MockInstrument6(Instrument):
-
- name = 'mock_6'
-
- def __init__(self):
- Instrument.__init__(self, None)
-
- def before_first_iteration_boot(self, context):
- global counter
- counter *= 10
-
-
-class BadInstrument(Instrument):
-
- name = 'bad'
-
- def __init__(self):
- pass
-
- # Not specifying the context argument.
- def teardown(self):
- pass
-
-
-counter = 0
-
-
-class InstrumentationTest(TestCase):
-
- def tearDown(self):
- clear_instrumentation()
-
- def test_install(self):
- instrument = _instantiate(MockInstrument)
- instrument2 = _instantiate(MockInstrument2)
- instrumentation.install(instrument)
- instrumentation.install(instrument2)
- signal.send(signal.BEFORE_WORKLOAD_EXECUTION, self, context=None)
- signal.send(signal.AFTER_WORKLOAD_EXECUTION, self, context=None)
- signal.send(signal.AFTER_WORKLOAD_RESULT_UPDATE, self, context=None)
- assert_equal(instrument.before, 1)
- assert_equal(instrument.after, 1)
- assert_equal(instrument2.before, 1)
- assert_equal(instrument2.after, 1)
- assert_equal(instrument2.result, 1)
-
- def test_enable_disable(self):
- instrument = _instantiate(MockInstrument)
- instrument2 = _instantiate(MockInstrument2)
- instrumentation.install(instrument)
- instrumentation.install(instrument2)
-
- instrumentation.disable_all()
- signal.send(signal.BEFORE_WORKLOAD_EXECUTION, self, context=None)
- signal.send(signal.AFTER_WORKLOAD_EXECUTION, self, context=None)
- signal.send(signal.AFTER_WORKLOAD_RESULT_UPDATE, self, context=None)
- assert_equal(instrument.before, 0)
- assert_equal(instrument.after, 0)
- assert_equal(instrument2.before, 0)
- assert_equal(instrument2.after, 0)
- assert_equal(instrument2.result, 0)
-
- instrumentation.enable(instrument)
- signal.send(signal.BEFORE_WORKLOAD_EXECUTION, self, context=None)
- signal.send(signal.AFTER_WORKLOAD_EXECUTION, self, context=None)
- signal.send(signal.AFTER_WORKLOAD_RESULT_UPDATE, self, context=None)
- assert_equal(instrument.before, 1)
- assert_equal(instrument.after, 1)
- assert_equal(instrument2.before, 0)
- assert_equal(instrument2.after, 0)
- assert_equal(instrument2.result, 0)
-
- instrumentation.enable_all()
- signal.send(signal.BEFORE_WORKLOAD_EXECUTION, self, context=None)
- signal.send(signal.AFTER_WORKLOAD_EXECUTION, self, context=None)
- signal.send(signal.AFTER_WORKLOAD_RESULT_UPDATE, self, context=None)
- assert_equal(instrument.before, 2)
- assert_equal(instrument.after, 2)
- assert_equal(instrument2.before, 1)
- assert_equal(instrument2.after, 1)
- assert_equal(instrument2.result, 1)
-
- def test_check_enabled(self):
- instrument = _instantiate(MockInstrument)
- instrumentation.install(instrument)
- instrumentation.enable(instrument)
- assert_true(instrument_is_enabled(instrument))
- assert_true(instrument_is_enabled(instrument.name))
- instrumentation.disable(instrument)
- assert_false(instrument_is_enabled(instrument))
- assert_false(instrument_is_enabled(instrument.name))
-
- def test_local_instrument(self):
- global counter
- counter = 0
- self.install_local_instrument()
- signal.send(signal.BEFORE_WORKLOAD_EXECUTION, self, context=None)
- assert_equal(counter, 1)
-
- def test_priority_prefix_instrument(self):
- global counter
- counter = 0
- instrument1 = _instantiate(MockInstrument4)
- instrument2 = _instantiate(MockInstrument5)
- instrument3 = _instantiate(MockInstrument6)
- instrumentation.install(instrument1)
- instrumentation.install(instrument2)
- instrumentation.install(instrument3)
- signal.send(signal.BEFORE_FIRST_ITERATION_BOOT, self, context=None)
- assert_equal(counter, 42)
-
- @raises(ValueError)
- def test_bad_argspec(self):
- instrument = _instantiate(BadInstrument)
- instrumentation.install(instrument)
-
- def test_check_installed(self):
- instrumentation.install(_instantiate(MockInstrument))
- assert_true(instrument_is_installed('mock'))
- assert_true(instrument_is_installed(MockInstrument))
- assert_false(instrument_is_installed(MockInstrument2))
-
- def install_local_instrument(self):
- instrument = _instantiate(MockInstrument3)
- instrumentation.install(instrument)
-
- @raises(ValueError)
- def test_duplicate_install(self):
- instrument = _instantiate(MockInstrument)
- instrument2 = _instantiate(MockInstrument)
- instrumentation.install(instrument)
- instrumentation.install(instrument2)
-
-
-def _instantiate(cls):
- # Needed to get around Plugin's __init__ checks
- return cls()
-
diff --git a/wlauto/tests/test_parsers.py b/wlauto/tests/test_parsers.py
deleted file mode 100644
index 77d8bba4..00000000
--- a/wlauto/tests/test_parsers.py
+++ /dev/null
@@ -1,381 +0,0 @@
-import os
-from unittest import TestCase
-from copy import copy
-
-from nose.tools import assert_equal # pylint: disable=E0611
-from mock.mock import Mock, MagicMock, call
-
-from wlauto.exceptions import ConfigError
-from wlauto.core.configuration.parsers import * # pylint: disable=wildcard-import
-from wlauto.core.configuration.parsers import _load_file, _collect_valid_id, _resolve_params_alias
-from wlauto.core.configuration import RunConfiguration, JobGenerator, PluginCache, ConfigurationPoint
-from wlauto.core.configuration.configuration import MetaConfiguration
-from wlauto.utils.types import toggle_set, reset_counter
-
-
-class TestFunctions(TestCase):
-
- def test_load_file(self):
- # This does not test read_pod
-
- # Non-existant file
- with self.assertRaises(ValueError):
- _load_file("THIS-IS-NOT-A-FILE", "test file")
- base_path = os.path.dirname(os.path.realpath(__file__))
-
- # Top level entry not a dict
- with self.assertRaisesRegexp(ConfigError, r".+ does not contain a valid test file structure; top level must be a dict\."):
- _load_file(os.path.join(base_path, "data", "test-agenda-not-dict.yaml"), "test file")
-
- # Yaml syntax error
- with self.assertRaisesRegexp(ConfigError, r"Error parsing test file .+: Syntax Error on line 1"):
- _load_file(os.path.join(base_path, "data", "test-agenda-bad-syntax.yaml"), "test file")
-
- # Ideal case
- _load_file(os.path.join(base_path, "data", "test-agenda.yaml"), "test file")
-
- def test_get_aliased_param(self):
- # Ideal case
- cp1 = ConfigurationPoint("test", aliases=[
- 'workload_parameters',
- 'workload_params',
- 'params'
- ])
-
- d_correct = {"workload_parameters": [1, 2, 3],
- "instruments": [2, 3, 4],
- "some_other_param": 1234}
- assert_equal(get_aliased_param(cp1, d_correct, default=[], pop=False), [1, 2, 3])
-
- # Two aliases for the same parameter given
- d_duplicate = {"workload_parameters": [1, 2, 3],
- "workload_params": [2, 3, 4]}
- with self.assertRaises(ConfigError):
- get_aliased_param(cp1, d_duplicate, default=[])
-
- # Empty dict
- d_none = {}
- assert_equal(get_aliased_param(cp1, d_none, default=[]), [])
-
- # Aliased parameter not present in dict
- d_not_present = {"instruments": [2, 3, 4],
- "some_other_param": 1234}
- assert_equal(get_aliased_param(cp1, d_not_present, default=1), 1)
-
- # Testing pop functionality
- assert_equal("workload_parameters" in d_correct, True)
- get_aliased_param(cp1, d_correct, default=[])
- assert_equal("workload_parameters" in d_correct, False)
-
- def test_merge_result_processor_instruments(self):
- non_merge = {
- "instrumentation": toggle_set(["one", "two"]),
- }
- expected_non_merge = copy(non_merge)
- merge_result_processors_instruments(non_merge)
- assert_equal(non_merge, expected_non_merge)
-
- no_overlap = {
- "instrumentation": ["one", "two"],
- "result_processors": ["three", "~four"],
- }
- expected_no_overlap = {"instrumentation": toggle_set(["one", "two", "three", "~four"])}
- merge_result_processors_instruments(no_overlap)
- assert_equal(no_overlap, expected_no_overlap)
-
- non_conflicting = {
- "instrumentation": ["one", "two"],
- "result_processors": ["two", "three"],
- }
- expected_non_conflicting = {"instrumentation": toggle_set(["one", "two", "three"])}
- merge_result_processors_instruments(non_conflicting)
- assert_equal(non_conflicting, expected_non_conflicting)
-
- conflict = {
- "instrumentation": ["one", "two"],
- "result_processors": ["~two", "three"],
- }
- with self.assertRaises(ConfigError):
- merge_result_processors_instruments(conflict)
-
- def test_collect_valid_id(self):
-
- msg = 'Invalid unit_test ID "uses-a-dash"; IDs cannot contain a "-"'
- with self.assertRaisesRegexp(ConfigError, msg):
- _collect_valid_id("uses-a-dash", set(), "unit_test")
-
- msg = 'Invalid unit_test ID "global"; is a reserved ID'
- with self.assertRaisesRegexp(ConfigError, msg):
- _collect_valid_id("global", set(), "unit_test")
-
- msg = 'Duplicate unit_test ID "duplicate"'
- with self.assertRaisesRegexp(ConfigError, msg):
- _collect_valid_id("duplicate", set(["duplicate"]), "unit_test")
-
- def test_resolve_params_alias(self):
- test = {"params": "some_value"}
- _resolve_params_alias(test, "new_name")
- assert_equal(test, {"new_name_parameters": "some_value"})
-
- # Test it only affects "params"
- _resolve_params_alias(test, "new_name")
- assert_equal(test, {"new_name_parameters": "some_value"})
-
- test["params"] = "some_other_value"
- with self.assertRaises(ConfigError):
- _resolve_params_alias(test, "new_name")
-
-
-class TestConfigParser(TestCase):
-
- def test_error_cases(self):
- wa_config = Mock(spec=MetaConfiguration)
- wa_config.configuration = MetaConfiguration.configuration
- run_config = Mock(spec=RunConfiguration)
- run_config.configuration = RunConfiguration.configuration
- config_parser = ConfigParser(wa_config,
- run_config,
- Mock(spec=JobGenerator),
- Mock(spec=PluginCache))
-
- # "run_name" can only be in agenda config sections
- #' and is handled by AgendaParser
- err = 'Error in "Unit test":\n' \
- '"run_name" can only be specified in the config section of an agenda'
- with self.assertRaisesRegexp(ConfigError, err):
- config_parser.load({"run_name": "test"}, "Unit test")
-
- # Instrument and result_processor lists in the same config cannot
- # have conflicting entries.
- err = 'Error in "Unit test":\n' \
- '"instrumentation" and "result_processors" have conflicting entries:'
- with self.assertRaisesRegexp(ConfigError, err):
- config_parser.load({"instruments": ["one", "two", "three"],
- "result_processors": ["~one", "~two", "~three"]},
- "Unit test")
-
- def test_config_points(self):
- wa_config = Mock(spec=MetaConfiguration)
- wa_config.configuration = MetaConfiguration.configuration
-
- run_config = Mock(spec=RunConfiguration)
- run_config.configuration = RunConfiguration.configuration
-
- jobs_config = Mock(spec=JobGenerator)
- plugin_cache = Mock(spec=PluginCache)
- config_parser = ConfigParser(wa_config, run_config, jobs_config, plugin_cache)
-
- cfg = {
- "assets_repository": "/somewhere/",
- "logging": "verbose",
- "project": "some project",
- "project_stage": "stage 1",
- "iterations": 9001,
- "workload_name": "name"
- }
- config_parser.load(cfg, "Unit test")
- wa_config.set.assert_has_calls([
- call("assets_repository", "/somewhere/"),
- call("logging", "verbose")
- ], any_order=True)
- run_config.set.assert_has_calls([
- call("project", "some project"),
- call("project_stage", "stage 1")
- ], any_order=True)
- jobs_config.set_global_value.assert_has_calls([
- call("iterations", 9001),
- call("workload_name", "name"),
- call("instrumentation", toggle_set())
- ], any_order=True)
-
- # Test setting global instruments including a non-conflicting duplicate ("two")
- jobs_config.reset_mock()
- instruments_and_result_processors = {
- "instruments": ["one", "two"],
- "result_processors": ["two", "three"]
- }
- config_parser.load(instruments_and_result_processors, "Unit test")
- jobs_config.set_global_value.assert_has_calls([
- call("instrumentation", toggle_set(["one", "two", "three"]))
- ], any_order=True)
-
- # Testing a empty config
- jobs_config.reset_mock()
- config_parser.load({}, "Unit test")
- jobs_config.set_global_value.assert_has_calls([], any_order=True)
- wa_config.set.assert_has_calls([], any_order=True)
- run_config.set.assert_has_calls([], any_order=True)
-
-
-class TestAgendaParser(TestCase):
-
- # Tests Phase 1 & 2
- def test_valid_structures(self):
- wa_config = Mock(spec=MetaConfiguration)
- wa_config.configuration = MetaConfiguration.configuration
- run_config = Mock(spec=RunConfiguration)
- run_config.configuration = RunConfiguration.configuration
- jobs_config = Mock(spec=JobGenerator)
- plugin_cache = Mock(spec=PluginCache)
- agenda_parser = AgendaParser(wa_config, run_config, jobs_config, plugin_cache)
-
- msg = 'Error in "Unit Test":\n\tInvalid agenda, top level entry must be a dict'
- with self.assertRaisesRegexp(ConfigError, msg):
- agenda_parser.load(123, "Unit Test")
-
- def _test_bad_type(name, source, msg):
- error_msg = msg.format(source=source, name=name)
- with self.assertRaisesRegexp(ConfigError, error_msg):
- agenda_parser.load({name: 123}, source)
-
- msg = 'Error in "{source}":\n\tInvalid entry "{name}" - must be a dict'
- _test_bad_type("config", "Unit Test", msg)
- _test_bad_type("global", "Unit Test", msg)
-
- msg = 'Error in "Unit Test":\n\tInvalid entry "{name}" - must be a list'
- _test_bad_type("sections", "Unit Test", msg)
- _test_bad_type("workloads", "Unit Test", msg)
-
- msg = 'Error in "Unit Test":\n\tInvalid top level agenda entry\(ies\): "{name}"'
- _test_bad_type("not_a_valid_entry", "Unit Test", msg)
-
- # Test Phase 3
- def test_id_collection(self):
- wa_config = Mock(spec=MetaConfiguration)
- wa_config.configuration = MetaConfiguration.configuration
- run_config = Mock(spec=RunConfiguration)
- run_config.configuration = RunConfiguration.configuration
- jobs_config = Mock(spec=JobGenerator)
- plugin_cache = Mock(spec=PluginCache)
- agenda_parser = AgendaParser(wa_config, run_config, jobs_config, plugin_cache)
-
- agenda = {
- "workloads": [
- {"id": "test1"},
- {"id": "test2"},
- ],
- "sections": [
- {"id": "section1",
- "workloads": [
- {"id": "section1_workload"}
- ]}
- ]
- }
- workloads, sections = agenda_parser.load(agenda, "Unit Test")
- assert_equal(sections, set(["section1"]))
- assert_equal(workloads, set(["test1", "test2", "section1_workload"]))
-
- # Test Phase 4
- def test_id_assignment(self):
- wa_config = Mock(spec=MetaConfiguration)
- wa_config.configuration = MetaConfiguration.configuration
- run_config = Mock(spec=RunConfiguration)
- run_config.configuration = RunConfiguration.configuration
- jobs_config = Mock(spec=JobGenerator)
- plugin_cache = Mock(spec=PluginCache)
- agenda_parser = AgendaParser(wa_config, run_config, jobs_config, plugin_cache)
-
- # Helper function
- def _assert_ids(ids, expected):
- ids_set = set(ids)
- assert_equal(len(ids), len(ids_set))
- assert_equal(ids_set, set(expected))
-
- def _assert_workloads_sections(jobs_config, expected_sect, expected_wk):
- wk_ids = [wk[0][0]['id'] for wk in jobs_config.add_workload.call_args_list]
- # section workloads
- for s in jobs_config.add_section.call_args_list:
- wk_ids += [wk['id'] for wk in s[0][1]]
- #sections
- sec_ids = set([s[0][0]['id'] for s in jobs_config.add_section.call_args_list])
- _assert_ids(wk_ids, set(expected_wk))
- _assert_ids(sec_ids, set(expected_sect))
- _reset_jobs_config(jobs_config)
-
- def _reset_jobs_config(jobs_config):
- jobs_config.reset_mock()
- reset_counter("wk")
- reset_counter("s")
-
- # Test auto id assignment
- auto_id = {
- "workloads": [
- {"name": 1},
- {"name": 2},
- {"name": 3},
- ],
- "sections": [
- {"name": 4,
- "workloads": [
- {"name": 7},
- {"name": 8},
- {"name": 9},
- ]},
- {"name": 5},
- {"name": 6},
- ]
- }
- agenda_parser.load(auto_id, "Unit Test")
- _assert_workloads_sections(jobs_config, ["s1", "s2", "s3"],
- ["wk1", "wk2", "wk3", "wk4", "wk5", "wk6"])
-
- # Test user defined IDs
- user_ids = {
- "workloads": [
- {"id": "user1"},
- {"name": "autoid1"},
- ],
- "sections": [
- {"id": "user_section1",
- "workloads": [
- {"name": "autoid2"}
- ]}
- ]
- }
- agenda_parser.load(user_ids, "Unit Test")
- _assert_workloads_sections(jobs_config, ["user_section1"],
- ["user1", "wk1", "wk2"])
-
- # Test auto asigned ID already present
- used_auto_id = {
- "workloads": [
- {"id": "wk2"},
- {"name": 2},
- {"name": 3},
- ],
- }
- agenda_parser.load(used_auto_id, "Unit Test")
- _assert_workloads_sections(jobs_config, [], ["wk1", "wk2", "wk3"])
-
- # Test string workload
- string = {
- "workloads": [
- "test"
- ]
- }
- agenda_parser.load(string, "Unit Test")
- workload = jobs_config.add_workload.call_args_list[0][0][0]
- assert_equal(isinstance(workload, dict), True)
- assert_equal(workload['workload_name'], "test")
-
-
-
-
-class TestCommandLineArgsParser(TestCase):
- wa_config = Mock(spec=MetaConfiguration)
- run_config = Mock(spec=RunConfiguration)
- jobs_config = Mock(spec=JobGenerator)
-
- cmd_args = MagicMock(
- verbosity=1,
- output_directory="my_results",
- instruments_to_disable=["abc", "def", "ghi"],
- only_run_ids=["wk1", "s1_wk4"],
- some_other_setting="value123"
- )
- CommandLineArgsParser(cmd_args, wa_config, jobs_config)
- wa_config.set.assert_has_calls([call("verbosity", 1)], any_order=True)
- jobs_config.disable_instruments.assert_has_calls([
- call(toggle_set(["~abc", "~def", "~ghi"]))
- ], any_order=True)
- jobs_config.only_run_ids.assert_has_calls([call(["wk1", "s1_wk4"])], any_order=True)
diff --git a/wlauto/tests/test_results_manager.py b/wlauto/tests/test_results_manager.py
deleted file mode 100644
index edf7e508..00000000
--- a/wlauto/tests/test_results_manager.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=W0231,W0613,E0611,W0603,R0201
-from unittest import TestCase
-
-from nose.tools import assert_equal, assert_true, assert_false, assert_raises
-
-from wlauto.core.result import ResultProcessor, ResultManager
-from wlauto.exceptions import WAError
-
-
-class MockResultProcessor1(ResultProcessor):
-
- name = 'result_processor_with_exception'
-
- def process_iteration_result(self, result, context):
- raise Exception()
-
- def process_run_result(self, result, context):
- raise Exception()
-
-
-class MockResultProcessor2(ResultProcessor):
-
- name = 'result_processor_with_wa_error'
-
- def process_iteration_result(self, result, context):
- raise WAError()
-
- def process_run_result(self, result, context):
- raise WAError()
-
-
-class MockResultProcessor3(ResultProcessor):
-
- name = 'result_processor_with_keybaord_interrupt'
-
- def process_iteration_result(self, result, context):
- raise KeyboardInterrupt()
-
- def process_run_result(self, result, context):
- raise KeyboardInterrupt()
-
-
-class MockResultProcessor4(ResultProcessor):
-
- name = 'result_processor'
-
- def __init__(self):
- super(MockResultProcessor4, self).__init__()
- self.is_invoked = False
-
- def process_iteration_result(self, result, context):
- self.is_invoked = True
-
- def process_run_result(self, result, context):
- self.is_invoked = True
-
-
-class ResultManagerTest(TestCase):
-
- def test_keyboard_interrupt(self):
- processor_keyboard_interrupt = _instantiate(MockResultProcessor3)
-
- # adding the results processor to the result manager
- manager = ResultManager()
- assert_false(manager.processors)
-
- # adding the results processor to the result manager
- manager.install(processor_keyboard_interrupt)
-
- assert_equal(len(manager.processors), 1)
- assert_raises(KeyboardInterrupt, manager.add_result, None, None)
-
- def test_add_result(self):
- processor_generic_exception = _instantiate(MockResultProcessor1)
- processor_wa_error = _instantiate(MockResultProcessor2)
- processor = _instantiate(MockResultProcessor4)
-
- # adding the results processor to the result manager
- manager = ResultManager()
- assert_false(manager.processors)
-
- # adding the results processor to the result manager
- manager.install(processor_generic_exception)
- manager.install(processor_wa_error)
- manager.install(processor)
-
- assert_equal(len(manager.processors), 3)
- manager.add_result(None, None)
-
- assert_true(processor.is_invoked)
-
- def test_process_results(self):
- processor_generic_exception = _instantiate(MockResultProcessor1)
- processor_wa_error = _instantiate(MockResultProcessor2)
- processor = _instantiate(MockResultProcessor4)
-
- # adding the results processor to the result manager
- manager = ResultManager()
- assert_false(manager.processors)
-
- # adding the results processor to the result manager
- manager.install(processor_generic_exception)
- manager.install(processor_wa_error)
- manager.install(processor)
-
- assert_equal(len(manager.processors), 3)
- manager.process_run_result(None, None)
-
- assert_true(processor.is_invoked)
-
-
-def _instantiate(cls):
- # Needed to get around Plugin's __init__ checks
- return cls()
diff --git a/wlauto/tests/test_utils.py b/wlauto/tests/test_utils.py
deleted file mode 100644
index 63f45661..00000000
--- a/wlauto/tests/test_utils.py
+++ /dev/null
@@ -1,109 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=R0201
-from unittest import TestCase
-
-from nose.tools import raises, assert_equal, assert_not_equal # pylint: disable=E0611
-
-from wlauto.utils.android import check_output
-from wlauto.utils.misc import merge_dicts, merge_lists, TimeoutError
-from wlauto.utils.types import list_or_integer, list_or_bool, caseless_string, arguments, toggle_set
-
-
-class TestCheckOutput(TestCase):
-
- def test_ok(self):
- check_output("python -c 'import time; time.sleep(0.1)'", timeout=0.5, shell=True)
-
- @raises(TimeoutError)
- def test_bad(self):
- check_output("python -c 'import time; time.sleep(1)'", timeout=0.5, shell=True)
-
-
-class TestMerge(TestCase):
-
- def test_dict_merge(self):
- base = {'a': 1, 'b': {'x': 9, 'z': 10}}
- other = {'b': {'x': 7, 'y': 8}, 'c': [1, 2, 3]}
- result = merge_dicts(base, other)
- assert_equal(result['a'], 1)
- assert_equal(result['b']['x'], 7)
- assert_equal(result['b']['y'], 8)
- assert_equal(result['b']['z'], 10)
- assert_equal(result['c'], [1, 2, 3])
-
- def test_merge_dict_lists(self):
- base = {'a': [1, 3, 2]}
- other = {'a': [3, 4, 5]}
- result = merge_dicts(base, other)
- assert_equal(result['a'], [1, 3, 2, 3, 4, 5])
- result = merge_dicts(base, other, list_duplicates='first')
- assert_equal(result['a'], [1, 3, 2, 4, 5])
- result = merge_dicts(base, other, list_duplicates='last')
- assert_equal(result['a'], [1, 2, 3, 4, 5])
-
- def test_merge_lists(self):
- result = merge_lists([1, 2, 3], 7)
- assert_equal(result, [1, 2, 3, 7])
- result = merge_lists([1, 2, 3], 1, duplicates='last')
- assert_equal(result, [2, 3, 1])
-
- @raises(ValueError)
- def test_type_mismatch(self):
- base = {'a': [1, 2, 3]}
- other = {'a': 'test'}
- merge_dicts(base, other, match_types=True)
-
-
-class TestTypes(TestCase):
-
- def test_list_or_conversion(self):
- assert_equal(list_or_integer([1, '2', 3]), [1, 2, 3])
- assert_equal(list_or_integer('0xF'), [15,])
- assert_equal(list_or_bool('False'), [False,])
-
- def test_caseless_string(self):
- cs1 = caseless_string('TeSt')
- assert_equal(cs1, 'TeSt')
- assert_equal('test', cs1)
- assert_equal(cs1[0], 'T')
- assert_not_equal(cs1[0], 't')
- assert_not_equal(cs1, 'test2')
-
- def test_arguments(self):
- assert_equal(arguments('--foo 7 --bar "fizz buzz"'),
- ['--foo', '7', '--bar', 'fizz buzz'])
- assert_equal(arguments(['test', 42]), ['test', '42'])
-
- def toggle_set_test():
-
- a = toggle_set(['qaz', 'qwert', 'asd', '~fgh', '~seb'])
- b = toggle_set(['qaz', 'xyz', '~asd', 'fgh', '~seb'])
-
- a_into_b = ['qaz', 'xyz', '~seb', 'qwert', 'asd', '~fgh']
- assert_equal(a.merge_into(b), a_into_b)
- assert_equal(b.merge_with(a), a_into_b)
-
- b_into_a = ['qaz', 'qwert', '~seb', 'xyz', '~asd', 'fgh']
- assert_equal(b.merge_into(a), b_into_a)
- assert_equal(a.merge_with(b), b_into_a)
-
- assert_equal(a.values(), ['qaz', 'qwert', 'asd'])
- assert_equal(b.merge_with(a).values(), ['qaz', 'xyz', 'qwert', 'asd'])
-
- assert_equal(a.values(), ['qaz', 'qwert', 'asd'])
- assert_equal(a.conflicts_with(b), ['~asd', '~fgh'])
diff --git a/wlauto/tools/__init__.py b/wlauto/tools/__init__.py
deleted file mode 100644
index cd5d64d6..00000000
--- a/wlauto/tools/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/tools/extdoc.py b/wlauto/tools/extdoc.py
deleted file mode 100644
index a2f3634d..00000000
--- a/wlauto/tools/extdoc.py
+++ /dev/null
@@ -1,134 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-This module contains utilities for generating user documentation for Workload
-Automation Plugins.
-
-"""
-import re
-import inspect
-
-
-PARAGRAPH_SEP = re.compile(r'\n\n+')
-LINE_START = re.compile(r'\n\s*')
-
-
-def get_paragraphs(text):
- """returns a list of paragraphs contained in the text"""
- return [LINE_START.sub(' ', p) for p in PARAGRAPH_SEP.split(text)]
-
-
-class PluginDocumenter(object):
-
- @property
- def name(self):
- return self.ext.name
-
- @property
- def summary(self):
- """Returns the summary description for this Plugin, which, by
- convention, is the first paragraph of the description."""
- return get_paragraphs(self.description)[0]
-
- @property
- def description(self):
- """
- The description for an plugin is specified in the ``description``
- attribute, or (legacy) as a docstring for the plugin's class. If
- neither method is used in the Plugin, an empty string is returned.
-
- Description is assumed to be formed as reStructuredText. Leading and
- trailing whitespace will be stripped away.
-
- """
- if hasattr(self.ext, 'description'):
- return self.ext.description.strip()
- elif self.ext.__class__.__doc__:
- return self.ext.__class__.__doc__.strip()
- else:
- return ''
-
- @property
- def parameters(self):
- return [PluginParameterDocumenter(p) for p in self.ext.parameters]
-
- def __init__(self, ext):
- self.ext = ext
-
-
-class PluginParameterDocumenter(object):
-
- @property
- def name(self):
- return self.param.name
-
- @property
- def kind(self):
- return self.param.get_type_name()
-
- @property
- def default(self):
- return self.param.default
-
- @property
- def description(self):
- return self.param.description
-
- @property
- def constraint(self):
- constraints = []
- if self.param.allowed_values:
- constraints.append('value must be in {}'.format(self.param.allowed_values))
- if self.param.constraint:
- constraint_text = self.param.constraint.__name__
- if constraint_text == '<lambda>':
- constraint_text = _parse_lambda(inspect.getsource(self.param.constraint))
- constraints.append(constraint_text)
- return ' and '.join(constraints)
-
- def __init__(self, param):
- self.param = param
-
-
-# Utility functions
-
-
-def _parse_lambda(text):
- """Parse the definition of a lambda function in to a readable string."""
- text = text.split('lambda')[1]
- param, rest = text.split(':')
- param = param.strip()
- # There are three things that could terminate a lambda: an (unparenthesized)
- # comma, a new line and an (unmatched) close paren.
- term_chars = [',', '\n', ')']
- func_text = ''
- inside_paren = 0 # an int rather than a bool to keep track of nesting
- for c in rest:
- if c in term_chars and not inside_paren:
- break
- elif c == ')': # must be inside paren
- inside_paren -= 1
- elif c == '(':
- inside_paren += 1
- func_text += c
-
- # Rename the lambda parameter to 'value' so that the resulting
- # "description" makes more sense.
- func_text = re.sub(r'\b{}\b'.format(param), 'value', func_text)
-
- return func_text
-
diff --git a/wlauto/utils/__init__.py b/wlauto/utils/__init__.py
deleted file mode 100644
index 3e74b613..00000000
--- a/wlauto/utils/__init__.py
+++ /dev/null
@@ -1,14 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
diff --git a/wlauto/utils/android.py b/wlauto/utils/android.py
deleted file mode 100644
index 79a4cdcd..00000000
--- a/wlauto/utils/android.py
+++ /dev/null
@@ -1,69 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-Utility functions for working with Android devices through adb.
-
-"""
-# pylint: disable=E1103
-import os
-import time
-import subprocess
-import logging
-import re
-
-from wlauto.exceptions import DeviceError, ConfigError, HostError
-from wlauto.utils.misc import check_output, escape_single_quotes, escape_double_quotes, get_null
-
-from devlib.utils.android import ANDROID_VERSION_MAP, adb_command, ApkInfo
-
-# See:
-# http://developer.android.com/guide/topics/security/normal-permissions.html
-ANDROID_NORMAL_PERMISSIONS = [
- 'ACCESS_LOCATION_EXTRA_COMMANDS',
- 'ACCESS_NETWORK_STATE',
- 'ACCESS_NOTIFICATION_POLICY',
- 'ACCESS_WIFI_STATE',
- 'BLUETOOTH',
- 'BLUETOOTH_ADMIN',
- 'BROADCAST_STICKY',
- 'CHANGE_NETWORK_STATE',
- 'CHANGE_WIFI_MULTICAST_STATE',
- 'CHANGE_WIFI_STATE',
- 'DISABLE_KEYGUARD',
- 'EXPAND_STATUS_BAR',
- 'GET_PACKAGE_SIZE',
- 'INTERNET',
- 'KILL_BACKGROUND_PROCESSES',
- 'MODIFY_AUDIO_SETTINGS',
- 'NFC',
- 'READ_SYNC_SETTINGS',
- 'READ_SYNC_STATS',
- 'RECEIVE_BOOT_COMPLETED',
- 'REORDER_TASKS',
- 'REQUEST_INSTALL_PACKAGES',
- 'SET_TIME_ZONE',
- 'SET_WALLPAPER',
- 'SET_WALLPAPER_HINTS',
- 'TRANSMIT_IR',
- 'USE_FINGERPRINT',
- 'VIBRATE',
- 'WAKE_LOCK',
- 'WRITE_SYNC_SETTINGS',
- 'SET_ALARM',
- 'INSTALL_SHORTCUT',
- 'UNINSTALL_SHORTCUT',
-]
diff --git a/wlauto/utils/cpuinfo.py b/wlauto/utils/cpuinfo.py
deleted file mode 100644
index 0bfc4863..00000000
--- a/wlauto/utils/cpuinfo.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-class Cpuinfo(object):
-
- @property
- def architecture(self):
- for section in self.sections:
- if 'CPU architecture' in section:
- return section['CPU architecture']
- if 'architecture' in section:
- return section['architecture']
-
- def __init__(self, text):
- self.sections = None
- self.text = None
- self.parse(text)
-
- def parse(self, text):
- self.sections = []
- current_section = {}
- self.text = text.strip()
- for line in self.text.split('\n'):
- line = line.strip()
- if line:
- key, value = line.split(':', 1)
- current_section[key.strip()] = value.strip()
- else: # not line
- self.sections.append(current_section)
- current_section = {}
- self.sections.append(current_section)
diff --git a/wlauto/utils/doc.py b/wlauto/utils/doc.py
deleted file mode 100644
index eef5b8e4..00000000
--- a/wlauto/utils/doc.py
+++ /dev/null
@@ -1,307 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-Utilities for working with and formatting documentation.
-
-"""
-import os
-import re
-import inspect
-from itertools import cycle
-
-USER_HOME = os.path.expanduser('~')
-
-BULLET_CHARS = '-*'
-
-
-def get_summary(aclass):
- """
- Returns the summary description for an plugin class. The summary is the
- first paragraph (separated by blank line) of the description taken either from
- the ``descripton`` attribute of the class, or if that is not present, from the
- class' docstring.
-
- """
- return get_description(aclass).split('\n\n')[0]
-
-
-def get_description(aclass):
- """
- Return the description of the specified plugin class. The description is taken
- either from ``description`` attribute of the class or its docstring.
-
- """
- if hasattr(aclass, 'description') and aclass.description:
- return inspect.cleandoc(aclass.description)
- if aclass.__doc__:
- return inspect.getdoc(aclass)
- else:
- return 'no documentation found for {}'.format(aclass.__name__)
-
-
-def get_type_name(obj):
- """Returns the name of the type object or function specified. In case of a lambda,
- the definiition is returned with the parameter replaced by "value"."""
- match = re.search(r"<(type|class|function) '?(.*?)'?>", str(obj))
- if isinstance(obj, tuple):
- name = obj[1]
- elif match.group(1) == 'function':
- text = str(obj)
- name = text.split()[1]
- if name == '<lambda>':
- source = inspect.getsource(obj).strip().replace('\n', ' ')
- match = re.search(r'lambda\s+(\w+)\s*:\s*(.*?)\s*[\n,]', source)
- if not match:
- raise ValueError('could not get name for {}'.format(obj))
- name = match.group(2).replace(match.group(1), 'value')
- else:
- name = match.group(2)
- if '.' in name:
- name = name.split('.')[-1]
- return name
-
-
-def count_leading_spaces(text):
- """
- Counts the number of leading space characters in a string.
-
- TODO: may need to update this to handle whitespace, but shouldn't
- be necessary as there should be no tabs in Python source.
-
- """
- nspaces = 0
- for c in text:
- if c == ' ':
- nspaces += 1
- else:
- break
- return nspaces
-
-
-def format_column(text, width):
- """
- Formats text into a column of specified width. If a line is too long,
- it will be broken on a word boundary. The new lines will have the same
- number of leading spaces as the original line.
-
- Note: this will not attempt to join up lines that are too short.
-
- """
- formatted = []
- for line in text.split('\n'):
- line_len = len(line)
- if line_len <= width:
- formatted.append(line)
- else:
- words = line.split(' ')
- new_line = words.pop(0)
- while words:
- next_word = words.pop(0)
- if (len(new_line) + len(next_word) + 1) < width:
- new_line += ' ' + next_word
- else:
- formatted.append(new_line)
- new_line = ' ' * count_leading_spaces(new_line) + next_word
- formatted.append(new_line)
- return '\n'.join(formatted)
-
-
-def format_bullets(text, width, char='-', shift=3, outchar=None):
- """
- Formats text into bulleted list. Assumes each line of input that starts with
- ``char`` (possibly preceeded with whitespace) is a new bullet point. Note: leading
- whitespace in the input will *not* be preserved. Instead, it will be determined by
- ``shift`` parameter.
-
- :text: the text to be formated
- :width: format width (note: must be at least ``shift`` + 4).
- :char: character that indicates a new bullet point in the input text.
- :shift: How far bulleted entries will be indented. This indicates the indentation
- level of the bullet point. Text indentation level will be ``shift`` + 3.
- :outchar: character that will be used to mark bullet points in the output. If
- left as ``None``, ``char`` will be used.
-
- """
- bullet_lines = []
- output = ''
-
- def __process_bullet(bullet_lines):
- if bullet_lines:
- bullet = format_paragraph(indent(' '.join(bullet_lines), shift + 2), width)
- bullet = bullet[:3] + outchar + bullet[4:]
- del bullet_lines[:]
- return bullet + '\n'
- else:
- return ''
-
- if outchar is None:
- outchar = char
- for line in text.split('\n'):
- line = line.strip()
- if line.startswith(char): # new bullet
- output += __process_bullet(bullet_lines)
- line = line[1:].strip()
- bullet_lines.append(line)
- output += __process_bullet(bullet_lines)
- return output
-
-
-def format_simple_table(rows, headers=None, align='>', show_borders=True, borderchar='='): # pylint: disable=R0914
- """Formats a simple table."""
- if not rows:
- return ''
- rows = [map(str, r) for r in rows]
- num_cols = len(rows[0])
-
- # cycle specified alignments until we have num_cols of them. This is
- # consitent with how such cases are handled in R, pandas, etc.
- it = cycle(align)
- align = [it.next() for _ in xrange(num_cols)]
-
- cols = zip(*rows)
- col_widths = [max(map(len, c)) for c in cols]
- if headers:
- col_widths = [max(len(h), cw) for h, cw in zip(headers, col_widths)]
- row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])
- row_format += '\n'
-
- border = row_format.format(*[borderchar * cw for cw in col_widths])
-
- result = border if show_borders else ''
- if headers:
- result += row_format.format(*headers)
- result += border
- for row in rows:
- result += row_format.format(*row)
- if show_borders:
- result += border
- return result
-
-
-def format_paragraph(text, width):
- """
- Format the specified text into a column of specified with. The text is
- assumed to be a single paragraph and existing line breaks will not be preserved.
- Leading spaces (of the initial line), on the other hand, will be preserved.
-
- """
- text = re.sub('\n\n*\\s*', ' ', text.strip('\n'))
- return format_column(text, width)
-
-
-def format_body(text, width):
- """
- Format the specified text into a column of specified width. The text is
- assumed to be a "body" of one or more paragraphs separated by one or more
- blank lines. The initial indentation of the first line of each paragraph
- will be presevered, but any other formatting may be clobbered.
-
- """
- text = re.sub('\n\\s*\n', '\n\n', text.strip('\n')) # get rid of all-whitespace lines
- paragraphs = re.split('\n\n+', text)
- formatted_paragraphs = []
- for p in paragraphs:
- if p.strip() and p.strip()[0] in BULLET_CHARS:
- formatted_paragraphs.append(format_bullets(p, width))
- else:
- formatted_paragraphs.append(format_paragraph(p, width))
- return '\n\n'.join(formatted_paragraphs)
-
-
-def strip_inlined_text(text):
- """
- This function processes multiline inlined text (e.g. form docstrings)
- to strip away leading spaces and leading and trailing new lines.
-
- """
- text = text.strip('\n')
- lines = [ln.rstrip() for ln in text.split('\n')]
-
- # first line is special as it may not have the indet that follows the
- # others, e.g. if it starts on the same as the multiline quote (""").
- nspaces = count_leading_spaces(lines[0])
-
- if len([ln for ln in lines if ln]) > 1:
- to_strip = min(count_leading_spaces(ln) for ln in lines[1:] if ln)
- if nspaces >= to_strip:
- stripped = [lines[0][to_strip:]]
- else:
- stripped = [lines[0][nspaces:]]
- stripped += [ln[to_strip:] for ln in lines[1:]]
- else:
- stripped = [lines[0][nspaces:]]
- return '\n'.join(stripped).strip('\n')
-
-
-def indent(text, spaces=4):
- """Indent the lines i the specified text by ``spaces`` spaces."""
- indented = []
- for line in text.split('\n'):
- if line:
- indented.append(' ' * spaces + line)
- else: # do not indent emtpy lines
- indented.append(line)
- return '\n'.join(indented)
-
-
-def format_literal(lit):
- if isinstance(lit, basestring):
- return '``\'{}\'``'.format(lit)
- elif hasattr(lit, 'pattern'): # regex
- return '``r\'{}\'``'.format(lit.pattern)
- else:
- return '``{}``'.format(lit)
-
-
-def get_params_rst(ext):
- text = ''
- for param in ext.parameters:
- text += '{} : {} {}\n'.format(param.name, get_type_name(param.kind),
- param.mandatory and '(mandatory)' or ' ')
- desc = strip_inlined_text(param.description or '')
- text += indent('{}\n'.format(desc))
- if param.allowed_values:
- text += indent('\nallowed values: {}\n'.format(', '.join(map(format_literal, param.allowed_values))))
- elif param.constraint:
- text += indent('\nconstraint: ``{}``\n'.format(get_type_name(param.constraint)))
- if param.default:
- value = param.default
- if isinstance(value, basestring) and value.startswith(USER_HOME):
- value = value.replace(USER_HOME, '~')
- text += indent('\ndefault: {}\n'.format(format_literal(value)))
- text += '\n'
- return text
-
-
-def underline(text, symbol='='):
- return '{}\n{}\n\n'.format(text, symbol * len(text))
-
-
-def get_rst_from_plugin(ext):
- text = underline(ext.name, '-')
- if hasattr(ext, 'description'):
- desc = strip_inlined_text(ext.description or '')
- elif ext.__doc__:
- desc = strip_inlined_text(ext.__doc__)
- else:
- desc = ''
- text += desc + '\n\n'
- params_rst = get_params_rst(ext)
- if params_rst:
- text += underline('parameters', '~') + params_rst
- return text + '\n'
-
diff --git a/wlauto/utils/exec_control.py b/wlauto/utils/exec_control.py
deleted file mode 100644
index 01025ee0..00000000
--- a/wlauto/utils/exec_control.py
+++ /dev/null
@@ -1,117 +0,0 @@
-from inspect import getmro
-
-# "environment" management:
-__environments = {}
-__active_environment = None
-
-
-def activate_environment(name):
- """
- Sets the current tracking environment to ``name``. If an
- environment with that name does not already exist, it will be
- created.
- """
- #pylint: disable=W0603
- global __active_environment
-
- if name not in __environments.keys():
- init_environment(name)
- __active_environment = name
-
-
-def init_environment(name):
- """
- Create a new environment called ``name``, but do not set it as the
- current environment.
-
- :raises: ``ValueError`` if an environment with name ``name``
- already exists.
- """
- if name in __environments.keys():
- msg = "Environment {} already exists".format(name)
- raise ValueError(msg)
- __environments[name] = []
-
-
-def reset_environment(name=None):
- """
- Reset method call tracking for environment ``name``. If ``name`` is
- not specified or is ``None``, reset the current active environment.
-
- :raises: ``ValueError`` if an environment with name ``name``
- does not exist.
- """
-
- if name is not None:
- if name not in __environments.keys():
- msg = "Environment {} does not exist".format(name)
- raise ValueError(msg)
- __environments[name] = []
- else:
- if __active_environment is None:
- raise ValueError("No Environment Active")
- __environments[__active_environment] = []
-
-# The decorators:
-
-
-def once_per_instance(method):
- """
- The specified method will be invoked only once for every bound
- instance within the environment.
- """
- def wrapper(*args, **kwargs):
- if __active_environment is None:
- raise ValueError("No Environment Active")
- func_id = repr(args[0])
- if func_id in __environments[__active_environment]:
- return
- else:
- __environments[__active_environment].append(func_id)
- return method(*args, **kwargs)
-
- return wrapper
-
-
-def once_per_class(method):
- """
- The specified method will be invoked only once for all instances
- of a class within the environment.
- """
- def wrapper(*args, **kwargs):
- if __active_environment is None:
- raise ValueError("No Environment Active")
-
- func_id = repr(method.func_name) + repr(args[0].__class__)
-
- if func_id in __environments[__active_environment]:
- return
- else:
- __environments[__active_environment].append(func_id)
- return method(*args, **kwargs)
-
- return wrapper
-
-
-def once(method):
-
- """
- The specified method will be invoked only once within the
- environment.
- """
- def wrapper(*args, **kwargs):
- if __active_environment is None:
- raise ValueError("No Environment Active")
-
- func_id = repr(method.func_name)
- # Store the least derived class, which isn't object, to account
- # for subclasses.
- func_id += repr(getmro(args[0].__class__)[-2])
-
- if func_id in __environments[__active_environment]:
- return
- else:
- __environments[__active_environment].append(func_id)
- return method(*args, **kwargs)
-
- return wrapper
diff --git a/wlauto/utils/formatter.py b/wlauto/utils/formatter.py
deleted file mode 100644
index 11b2154d..00000000
--- a/wlauto/utils/formatter.py
+++ /dev/null
@@ -1,147 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto.utils.terminalsize import get_terminal_size
-
-
-INDENTATION_FROM_TITLE = 4
-
-
-class TextFormatter(object):
-
- """
- This is a base class for text formatting. It mainly ask to implement two
- methods which are add_item and format_data. The formar will add new text to
- the formatter, whereas the latter will return a formatted text. The name
- attribute represents the name of the foramtter.
- """
-
- name = None
- data = None
-
- def __init__(self):
- pass
-
- def add_item(self, new_data, item_title):
- """
- Add new item to the text formatter.
-
- :param new_data: The data to be added
- :param item_title: A title for the added data
- """
- raise NotImplementedError()
-
- def format_data(self):
- """
- It returns a formatted text
- """
- raise NotImplementedError()
-
-
-class DescriptionListFormatter(TextFormatter):
-
- name = 'description_list_formatter'
- data = None
-
- def get_text_width(self):
- if not self._text_width:
- self._text_width, _ = get_terminal_size() # pylint: disable=unpacking-non-sequence
- return self._text_width
-
- def set_text_width(self, value):
- self._text_width = value
-
- text_width = property(get_text_width, set_text_width)
-
- def __init__(self, title=None, width=None):
- super(DescriptionListFormatter, self).__init__()
- self.data_title = title
- self._text_width = width
- self.longest_word_length = 0
- self.data = []
-
- def add_item(self, new_data, item_title):
- if len(item_title) > self.longest_word_length:
- self.longest_word_length = len(item_title)
- self.data[len(self.data):] = [(item_title, self._remove_newlines(new_data))]
-
- def format_data(self):
- parag_indentation = self.longest_word_length + INDENTATION_FROM_TITLE
- string_formatter = '{}:<{}{} {}'.format('{', parag_indentation, '}', '{}')
-
- formatted_data = ''
- if self.data_title:
- formatted_data += self.data_title
-
- line_width = self.text_width - parag_indentation
- for title, paragraph in self.data:
- if paragraph:
- formatted_data += '\n'
- title_len = self.longest_word_length - len(title)
- title += ':'
- if title_len > 0:
- title = (' ' * title_len) + title
-
- parag_lines = self._break_lines(paragraph, line_width).splitlines()
- if parag_lines:
- formatted_data += string_formatter.format(title, parag_lines[0])
- for line in parag_lines[1:]:
- formatted_data += '\n' + string_formatter.format('', line)
-
- self.text_width = None
- return formatted_data
-
- # Return text's paragraphs sperated in a list, such that each index in the
- # list is a single text paragraph with no new lines
- def _remove_newlines(self, new_data): # pylint: disable=R0201
- parag_list = ['']
- parag_num = 0
- prv_parag = None
- # For each paragraph sperated by a new line
- for paragraph in new_data.splitlines():
- if paragraph:
- parag_list[parag_num] += ' ' + paragraph
- # if the previous line is NOT empty, then add new empty index for
- # the next paragraph
- elif prv_parag:
- parag_num = 1
- parag_list.append('')
- prv_parag = paragraph
-
- # sometimes, we end up with an empty string as the last item so we reomve it
- if not parag_list[-1]:
- return parag_list[:-1]
- return parag_list
-
- def _break_lines(self, parag_list, line_width): # pylint: disable=R0201
- formatted_paragraphs = []
- for para in parag_list:
- words = para.split()
- if words:
- formatted_text = words.pop(0)
- current_width = len(formatted_text)
- # for each word in the paragraph, line width is an accumlation of
- # word length + 1 (1 is for the space after each word).
- for word in words:
- word = word.strip()
- if current_width + len(word) + 1 >= line_width:
- formatted_text += '\n' + word
- current_width = len(word)
- else:
- formatted_text += ' ' + word
- current_width += len(word) + 1
- formatted_paragraphs.append(formatted_text)
- return '\n\n'.join(formatted_paragraphs)
diff --git a/wlauto/utils/hwmon.py b/wlauto/utils/hwmon.py
deleted file mode 100644
index 9375497d..00000000
--- a/wlauto/utils/hwmon.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto.exceptions import DeviceError
-
-
-HWMON_ROOT = '/sys/class/hwmon'
-
-
-class HwmonSensor(object):
-
- def __init__(self, device, kind, label, filepath):
- self.device = device
- self.kind = kind
- self.label = label
- self.filepath = filepath
- self.readings = []
-
- def take_reading(self):
- reading = self.device.get_sysfile_value(self.filepath, int)
- self.readings.append(reading)
-
- def clear_readings(self):
- self.readings = []
-
-
-def discover_sensors(device, sensor_kinds):
- """
- Discovers HWMON sensors available on the device.
-
- :device: Device on which to discover HWMON sensors. Must be an instance
- of :class:`AndroidDevice`.
- :sensor_kinds: A list of names of sensor types to be discovered. The names
- must be as they appear prefixed to ``*_input`` files in
- sysfs. E.g. ``'energy'``.
-
- :returns: A list of :class:`HwmonSensor` instantces for each found sensor. If
- no sensors of the specified types were discovered, an empty list
- will be returned.
-
- """
- hwmon_devices = device.list_directory(HWMON_ROOT)
- path = device.path
- sensors = []
- for hwmon_device in hwmon_devices:
- try:
- device_path = path.join(HWMON_ROOT, hwmon_device, 'device')
- name = device.get_sysfile_value(path.join(device_path, 'name'))
- except DeviceError: # probably a virtual device
- device_path = path.join(HWMON_ROOT, hwmon_device)
- name = device.get_sysfile_value(path.join(device_path, 'name'))
-
- for sensor_kind in sensor_kinds:
- i = 1
- input_path = path.join(device_path, '{}{}_input'.format(sensor_kind, i))
- while device.file_exists(input_path):
- label_path = path.join(device_path, '{}{}_label'.format(sensor_kind, i))
- if device.file_exists(label_path):
- name += ' ' + device.get_sysfile_value(label_path)
- sensors.append(HwmonSensor(device, sensor_kind, name, input_path))
- i += 1
- input_path = path.join(device_path, '{}{}_input'.format(sensor_kind, i))
- return sensors
-
diff --git a/wlauto/utils/ipython.py b/wlauto/utils/ipython.py
deleted file mode 100644
index c1439f40..00000000
--- a/wlauto/utils/ipython.py
+++ /dev/null
@@ -1,188 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=no-name-in-module,import-error,no-member
-
-import os
-import subprocess
-from distutils.version import LooseVersion
-
-# pylint: disable=wrong-import-position,ungrouped-imports
-import_error_str = ''
-try:
- import IPython
-except ImportError as import_error:
- IPython = None
- # Importing IPython can fail for a variety of reasons, report the actual
- # failure unless it's just that the package is not present
- if import_error.message.startswith("No module named"): # pylint: disable=E1101
- import_error_str = 'ipynb_exporter requires ipython package to be installed'
- else:
- import_error_str = import_error.message # pylint: disable=redefined-variable-type
-
-# The current code generates notebooks version 3
-NBFORMAT_VERSION = 3
-
-
-if IPython:
- if LooseVersion('5.0.0') > LooseVersion(IPython.__version__) >= LooseVersion('4.0.0'):
- import nbformat
- from jupyter_client.manager import KernelManager
-
- def read_notebook(notebook_in): # pylint: disable=function-redefined
- return nbformat.reads(notebook_in, NBFORMAT_VERSION) # pylint: disable=E1101
-
- def write_notebook(notebook, fout): # pylint: disable=function-redefined
- nbformat.write(notebook, fout) # pylint: disable=E1101
-
- NotebookNode = nbformat.NotebookNode # pylint: disable=E1101
-
- IPYTHON_NBCONVERT_HTML = ['jupyter', 'nbconvert', '--to html']
- IPYTHON_NBCONVERT_PDF = ['jupyter', 'nbconvert', '--to pdf']
-
- elif LooseVersion('4.0.0') > LooseVersion(IPython.__version__) >= LooseVersion('3.0.0'):
- from IPython.kernel import KernelManager
- import IPython.nbformat
-
- def read_notebook(notebook_in): # pylint: disable=function-redefined
- return IPython.nbformat.reads(notebook_in, NBFORMAT_VERSION) # pylint: disable=E1101
-
- def write_notebook(notebook, fout): # pylint: disable=function-redefined
- IPython.nbformat.write(notebook, fout) # pylint: disable=E1101
-
- NotebookNode = IPython.nbformat.NotebookNode # pylint: disable=E1101
-
- IPYTHON_NBCONVERT_HTML = ['ipython', 'nbconvert', '--to=html']
- IPYTHON_NBCONVERT_PDF = ['ipython', 'nbconvert', '--to=pdf']
- elif LooseVersion('3.0.0') > LooseVersion(IPython.__version__) >= LooseVersion('2.0.0'):
- from IPython.kernel import KernelManager
- import IPython.nbformat.v3
-
- def read_notebook(notebook_in): # pylint: disable=function-redefined
- return IPython.nbformat.v3.reads_json(notebook_in) # pylint: disable=E1101
-
- def write_notebook(notebook, fout): # pylint: disable=function-redefined
- IPython.nbformat.v3.nbjson.JSONWriter().write(notebook, fout) # pylint: disable=E1101
-
- NotebookNode = IPython.nbformat.v3.NotebookNode # pylint: disable=E1101
-
- IPYTHON_NBCONVERT_HTML = ['ipython', 'nbconvert', '--to=html']
- IPYTHON_NBCONVERT_PDF = ['ipython', 'nbconvert', '--to=latex',
- '--post=PDF']
- else:
- # Unsupported IPython version
- import_error_str = 'Unsupported IPython version {}'.format(IPython.__version__)
-
-
-def parse_valid_output(msg):
- """Parse a valid result from an execution of a cell in an ipython kernel"""
- msg_type = msg["msg_type"]
- if msg_type == 'error':
- msg_type = 'pyerr'
- elif msg_type == 'execute_result':
- msg_type = 'pyout'
-
- content = msg["content"]
- out = NotebookNode(output_type=msg_type)
-
- if msg_type == "stream":
- out.stream = content["name"]
- try:
- out.text = content['data']
- except KeyError:
- out.text = content['text']
- elif msg_type in ("display_data", "pyout"):
- for mime, data in content["data"].iteritems():
- if mime == "text/plain":
- attr = "text"
- else:
- attr = mime.split("/")[-1]
- setattr(out, attr, data)
- elif msg_type == "pyerr":
- out.ename = content["ename"]
- out.evalue = content["evalue"]
- out.traceback = content["traceback"]
- else:
- raise ValueError("Unknown msg_type {}".format(msg_type))
-
- return out
-
-
-def run_cell(kernel_client, cell):
- """Run a cell of a notebook in an ipython kernel and return its output"""
- kernel_client.execute(cell.input)
-
- input_acknowledged = False
- outs = []
- while True:
- msg = kernel_client.get_iopub_msg()
-
- if msg["msg_type"] == "status":
- if msg["content"]["execution_state"] == "idle" and input_acknowledged:
- break
- elif msg["msg_type"] in ('pyin', 'execute_input'):
- input_acknowledged = True
- else:
- out = parse_valid_output(msg)
- outs.append(out)
-
- return outs
-
-
-def run_notebook(notebook):
- """Run the notebook"""
- kernel_manager = KernelManager()
- kernel_manager.start_kernel(stderr=open(os.devnull, 'w'))
- kernel_client = kernel_manager.client()
- kernel_client.start_channels()
-
- for sheet in notebook.worksheets:
- for (prompt_number, cell) in enumerate(sheet.cells, 1):
- if cell.cell_type != "code":
- continue
-
- cell.outputs = run_cell(kernel_client, cell)
-
- cell.prompt_number = prompt_number
- if cell.outputs and cell.outputs[0]['output_type'] == 'pyout':
- cell.outputs[0]["prompt_number"] = prompt_number
-
- kernel_manager.shutdown_kernel()
-
-
-def export_notebook(nbbasename, output_directory, output_format):
- """Generate a PDF or HTML from the ipython notebook
-
- output_format has to be either 'pdf' or 'html'. These are the
- only formats currently supported.
-
- ipython nbconvert claims that the CLI is not stable, so keep this
- function here to be able to cope with inconsistencies
-
- """
-
- if output_format == "html":
- ipython_command = IPYTHON_NBCONVERT_HTML
- elif output_format == "pdf":
- ipython_command = IPYTHON_NBCONVERT_PDF
- else:
- raise ValueError("Unknown output format: {}".format(output_format))
-
- prev_dir = os.getcwd()
- os.chdir(output_directory)
-
- with open(os.devnull, 'w') as devnull:
- subprocess.check_call(ipython_command + [nbbasename], stderr=devnull)
-
- os.chdir(prev_dir)
diff --git a/wlauto/utils/log.py b/wlauto/utils/log.py
deleted file mode 100644
index a4b5d51d..00000000
--- a/wlauto/utils/log.py
+++ /dev/null
@@ -1,223 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-# pylint: disable=E1101
-import logging
-import string
-import threading
-
-import colorama
-
-from wlauto.core.configuration import settings
-import wlauto.core.signal as signal
-
-
-COLOR_MAP = {
- logging.DEBUG: colorama.Fore.BLUE,
- logging.INFO: colorama.Fore.GREEN,
- logging.WARNING: colorama.Fore.YELLOW,
- logging.ERROR: colorama.Fore.RED,
- logging.CRITICAL: colorama.Style.BRIGHT + colorama.Fore.RED,
-}
-
-RESET_COLOR = colorama.Style.RESET_ALL
-
-
-def init_logging(verbosity):
- root_logger = logging.getLogger()
- root_logger.setLevel(logging.DEBUG)
-
- error_handler = ErrorSignalHandler(logging.DEBUG)
- root_logger.addHandler(error_handler)
-
- console_handler = logging.StreamHandler()
- if verbosity == 1:
- console_handler.setLevel(logging.DEBUG)
- if 'colour' in settings.logging and not settings.logging['colour']:
- console_handler.setFormatter(LineFormatter(settings.logging['verbose_format']))
- else:
- console_handler.setFormatter(ColorFormatter(settings.logging['verbose_format']))
- else:
- console_handler.setLevel(logging.INFO)
- if 'colour' in settings.logging and not settings.logging['colour']:
- console_handler.setFormatter(LineFormatter(settings.logging['regular_format']))
- else:
- console_handler.setFormatter(ColorFormatter(settings.logging['regular_format']))
- root_logger.addHandler(console_handler)
-
- logging.basicConfig(level=logging.DEBUG)
-
-
-def add_log_file(filepath, level=logging.DEBUG):
- root_logger = logging.getLogger()
- file_handler = logging.FileHandler(filepath)
- file_handler.setLevel(level)
- file_handler.setFormatter(LineFormatter(settings.logging['file_format']))
- root_logger.addHandler(file_handler)
-
-
-class ErrorSignalHandler(logging.Handler):
- """
- Emits signals for ERROR and WARNING level traces.
-
- """
-
- def emit(self, record):
- if record.levelno == logging.ERROR:
- signal.send(signal.ERROR_LOGGED, self)
- elif record.levelno == logging.WARNING:
- signal.send(signal.WARNING_LOGGED, self)
-
-
-class ColorFormatter(logging.Formatter):
- """
- Formats logging records with color and prepends record info
- to each line of the message.
-
- BLUE for DEBUG logging level
- GREEN for INFO logging level
- YELLOW for WARNING logging level
- RED for ERROR logging level
- BOLD RED for CRITICAL logging level
-
- """
-
- def __init__(self, fmt=None, datefmt=None):
- super(ColorFormatter, self).__init__(fmt, datefmt)
- template_text = self._fmt.replace('%(message)s', RESET_COLOR + '%(message)s${color}')
- template_text = '${color}' + template_text + RESET_COLOR
- self.fmt_template = string.Template(template_text)
-
- def format(self, record):
- self._set_color(COLOR_MAP[record.levelno])
-
- record.message = record.getMessage()
- if self.usesTime():
- record.asctime = self.formatTime(record, self.datefmt)
-
- d = record.__dict__
- parts = []
- for line in record.message.split('\n'):
- d.update({'message': line.strip('\r')})
- parts.append(self._fmt % d)
-
- return '\n'.join(parts)
-
- def _set_color(self, color):
- self._fmt = self.fmt_template.substitute(color=color)
-
-
-class LineFormatter(logging.Formatter):
- """
- Logs each line of the message separately.
-
- """
-
- def __init__(self, fmt=None, datefmt=None):
- super(LineFormatter, self).__init__(fmt, datefmt)
-
- def format(self, record):
- record.message = record.getMessage()
- if self.usesTime():
- record.asctime = self.formatTime(record, self.datefmt)
-
- d = record.__dict__
- parts = []
- for line in record.message.split('\n'):
- d.update({'message': line.strip('\r')})
- parts.append(self._fmt % d)
-
- return '\n'.join(parts)
-
-
-class BaseLogWriter(object):
-
- def __init__(self, name, level=logging.DEBUG):
- """
- File-like object class designed to be used for logging from streams
- Each complete line (terminated by new line character) gets logged
- at DEBUG level. In complete lines are buffered until the next new line.
-
- :param name: The name of the logger that will be used.
-
- """
- self.logger = logging.getLogger(name)
- self.buffer = ''
- if level == logging.DEBUG:
- self.do_write = self.logger.debug
- elif level == logging.INFO:
- self.do_write = self.logger.info
- elif level == logging.WARNING:
- self.do_write = self.logger.warning
- elif level == logging.ERROR:
- self.do_write = self.logger.error
- else:
- raise Exception('Unknown logging level: {}'.format(level))
-
- def flush(self):
- # Defined to match the interface expected by pexpect.
- return self
-
- def close(self):
- if self.buffer:
- self.logger.debug(self.buffer)
- self.buffer = ''
- return self
-
- def __del__(self):
- # Ensure we don't lose bufferd output
- self.close()
-
-
-class LogWriter(BaseLogWriter):
-
- def write(self, data):
- data = data.replace('\r\n', '\n').replace('\r', '\n')
- if '\n' in data:
- parts = data.split('\n')
- parts[0] = self.buffer + parts[0]
- for part in parts[:-1]:
- self.do_write(part)
- self.buffer = parts[-1]
- else:
- self.buffer += data
- return self
-
-
-class LineLogWriter(BaseLogWriter):
-
- def write(self, data):
- self.do_write(data)
-
-
-class StreamLogger(threading.Thread):
- """
- Logs output from a stream in a thread.
-
- """
-
- def __init__(self, name, stream, level=logging.DEBUG, klass=LogWriter):
- super(StreamLogger, self).__init__()
- self.writer = klass(name, level)
- self.stream = stream
- self.daemon = True
-
- def run(self):
- line = self.stream.readline()
- while line:
- self.writer.write(line.rstrip('\n'))
- line = self.stream.readline()
- self.writer.close()
diff --git a/wlauto/utils/misc.py b/wlauto/utils/misc.py
deleted file mode 100644
index 368bd30a..00000000
--- a/wlauto/utils/misc.py
+++ /dev/null
@@ -1,600 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-Miscellaneous functions that don't fit anywhere else.
-
-"""
-from __future__ import division
-import os
-import sys
-import re
-import math
-import imp
-import string
-import threading
-import signal
-import subprocess
-import pkgutil
-import traceback
-import logging
-import random
-import hashlib
-from datetime import datetime, timedelta
-from operator import mul, itemgetter
-from StringIO import StringIO
-from itertools import cycle, groupby, chain
-from functools import partial
-from distutils.spawn import find_executable
-
-import yaml
-from dateutil import tz
-
-from devlib.utils.misc import (ABI_MAP, check_output, walk_modules,
- ensure_directory_exists, ensure_file_directory_exists,
- normalize, convert_new_lines, get_cpu_mask, unique,
- escape_quotes, escape_single_quotes, escape_double_quotes,
- isiterable, getch, as_relative, ranges_to_list,
- list_to_ranges, list_to_mask, mask_to_list, which)
-
-check_output_logger = logging.getLogger('check_output')
-
-
-# Defined here rather than in wlauto.exceptions due to module load dependencies
-class TimeoutError(Exception):
- """Raised when a subprocess command times out. This is basically a ``WAError``-derived version{}
- of ``subprocess.CalledProcessError``, the thinking being that while a timeout could be due to
- programming error (e.g. not setting long enough timers), it is often due to some failure in the
- environment, and there fore should be classed as a "user error"."""
-
- def __init__(self, command, output):
- super(TimeoutError, self).__init__('Timed out: {}'.format(command))
- self.command = command
- self.output = output
-
- def __str__(self):
- return '\n'.join([self.message, 'OUTPUT:', self.output or ''])
-
-
-def diff_tokens(before_token, after_token):
- """
- Creates a diff of two tokens.
-
- If the two tokens are the same it just returns returns the token
- (whitespace tokens are considered the same irrespective of type/number
- of whitespace characters in the token).
-
- If the tokens are numeric, the difference between the two values
- is returned.
-
- Otherwise, a string in the form [before -> after] is returned.
-
- """
- if before_token.isspace() and after_token.isspace():
- return after_token
- elif before_token.isdigit() and after_token.isdigit():
- try:
- diff = int(after_token) - int(before_token)
- return str(diff)
- except ValueError:
- return "[%s -> %s]" % (before_token, after_token)
- elif before_token == after_token:
- return after_token
- else:
- return "[%s -> %s]" % (before_token, after_token)
-
-
-def prepare_table_rows(rows):
- """Given a list of lists, make sure they are prepared to be formatted into a table
- by making sure each row has the same number of columns and stringifying all values."""
- rows = [map(str, r) for r in rows]
- max_cols = max(map(len, rows))
- for row in rows:
- pad = max_cols - len(row)
- for _ in xrange(pad):
- row.append('')
- return rows
-
-
-def write_table(rows, wfh, align='>', headers=None): # pylint: disable=R0914
- """Write a column-aligned table to the specified file object."""
- if not rows:
- return
- rows = prepare_table_rows(rows)
- num_cols = len(rows[0])
-
- # cycle specified alignments until we have max_cols of them. This is
- # consitent with how such cases are handled in R, pandas, etc.
- it = cycle(align)
- align = [it.next() for _ in xrange(num_cols)]
-
- cols = zip(*rows)
- col_widths = [max(map(len, c)) for c in cols]
- row_format = ' '.join(['{:%s%s}' % (align[i], w) for i, w in enumerate(col_widths)])
- row_format += '\n'
-
- if headers:
- wfh.write(row_format.format(*headers))
- underlines = ['-' * len(h) for h in headers]
- wfh.write(row_format.format(*underlines))
-
- for row in rows:
- wfh.write(row_format.format(*row))
-
-
-def get_null():
- """Returns the correct null sink based on the OS."""
- return 'NUL' if os.name == 'nt' else '/dev/null'
-
-
-def get_traceback(exc=None):
- """
- Returns the string with the traceback for the specifiec exc
- object, or for the current exception exc is not specified.
-
- """
- if exc is None:
- exc = sys.exc_info()
- if not exc:
- return None
- tb = exc[2]
- sio = StringIO()
- traceback.print_tb(tb, file=sio)
- del tb # needs to be done explicitly see: http://docs.python.org/2/library/sys.html#sys.exc_info
- return sio.getvalue()
-
-
-def _check_remove_item(the_list, item):
- """Helper function for merge_lists that implements checking wether an items
- should be removed from the list and doing so if needed. Returns ``True`` if
- the item has been removed and ``False`` otherwise."""
- if not isinstance(item, basestring):
- return False
- if not item.startswith('~'):
- return False
- actual_item = item[1:]
- if actual_item in the_list:
- del the_list[the_list.index(actual_item)]
- return True
-
-
-VALUE_REGEX = re.compile(r'(\d+(?:\.\d+)?)\s*(\w*)')
-
-UNITS_MAP = {
- 's': 'seconds',
- 'ms': 'milliseconds',
- 'us': 'microseconds',
- 'ns': 'nanoseconds',
- 'V': 'volts',
- 'A': 'amps',
- 'mA': 'milliamps',
- 'J': 'joules',
-}
-
-
-def parse_value(value_string):
- """parses a string representing a numerical value and returns
- a tuple (value, units), where value will be either int or float,
- and units will be a string representing the units or None."""
- match = VALUE_REGEX.search(value_string)
- if match:
- vs = match.group(1)
- value = float(vs) if '.' in vs else int(vs)
- us = match.group(2)
- units = UNITS_MAP.get(us, us)
- return (value, units)
- else:
- return (value_string, None)
-
-
-def get_meansd(values):
- """Returns mean and standard deviation of the specified values."""
- if not values:
- return float('nan'), float('nan')
- mean = sum(values) / len(values)
- sd = math.sqrt(sum([(v - mean) ** 2 for v in values]) / len(values))
- return mean, sd
-
-
-def geomean(values):
- """Returns the geometric mean of the values."""
- return reduce(mul, values) ** (1.0 / len(values))
-
-
-def capitalize(text):
- """Capitalises the specified text: first letter upper case,
- all subsequent letters lower case."""
- if not text:
- return ''
- return text[0].upper() + text[1:].lower()
-
-
-def utc_to_local(dt):
- """Convert naive datetime to local time zone, assuming UTC."""
- return dt.replace(tzinfo=tz.tzutc()).astimezone(tz.tzlocal())
-
-
-def local_to_utc(dt):
- """Convert naive datetime to UTC, assuming local time zone."""
- return dt.replace(tzinfo=tz.tzlocal()).astimezone(tz.tzutc())
-
-
-def load_class(classpath):
- """Loads the specified Python class. ``classpath`` must be a fully-qualified
- class name (i.e. namspaced under module/package)."""
- modname, clsname = classpath.rsplit('.', 1)
- return getattr(__import__(modname), clsname)
-
-
-def get_pager():
- """Returns the name of the system pager program."""
- pager = os.getenv('PAGER')
- if pager is None:
- pager = find_executable('less')
- if pager is None:
- pager = find_executable('more')
- return pager
-
-
-def enum_metaclass(enum_param, return_name=False, start=0):
- """
- Returns a ``type`` subclass that may be used as a metaclass for
- an enum.
-
- Paremeters:
-
- :enum_param: the name of class attribute that defines enum values.
- The metaclass will add a class attribute for each value in
- ``enum_param``. The value of the attribute depends on the type
- of ``enum_param`` and on the values of ``return_name``. If
- ``return_name`` is ``True``, then the value of the new attribute is
- the name of that attribute; otherwise, if ``enum_param`` is a ``list``
- or a ``tuple``, the value will be the index of that param in
- ``enum_param``, optionally offset by ``start``, otherwise, it will
- be assumed that ``enum_param`` implementa a dict-like inteface and
- the value will be ``enum_param[attr_name]``.
- :return_name: If ``True``, the enum values will the names of enum attributes. If
- ``False``, the default, the values will depend on the type of
- ``enum_param`` (see above).
- :start: If ``enum_param`` is a list or a tuple, and ``return_name`` is ``False``,
- this specifies an "offset" that will be added to the index of the attribute
- within ``enum_param`` to form the value.
-
-
- """
- class __EnumMeta(type):
- def __new__(mcs, clsname, bases, attrs):
- cls = type.__new__(mcs, clsname, bases, attrs)
- values = getattr(cls, enum_param, [])
- if return_name:
- for name in values:
- setattr(cls, name, name)
- else:
- if isinstance(values, list) or isinstance(values, tuple):
- for i, name in enumerate(values):
- setattr(cls, name, i + start)
- else: # assume dict-like
- for name in values:
- setattr(cls, name, values[name])
- return cls
- return __EnumMeta
-
-
-_bash_color_regex = re.compile('\x1b\[[0-9;]+m')
-
-
-def strip_bash_colors(text):
- return _bash_color_regex.sub('', text)
-
-
-def format_duration(seconds, sep=' ', order=['day', 'hour', 'minute', 'second']): # pylint: disable=dangerous-default-value
- """
- Formats the specified number of seconds into human-readable duration.
-
- """
- if isinstance(seconds, timedelta):
- td = seconds
- else:
- td = timedelta(seconds=seconds)
- dt = datetime(1, 1, 1) + td
- result = []
- for item in order:
- value = getattr(dt, item, None)
- if item is 'day':
- value -= 1
- if not value:
- continue
- suffix = '' if value == 1 else 's'
- result.append('{} {}{}'.format(value, item, suffix))
- return sep.join(result)
-
-
-def get_article(word):
- """
- Returns the appropriate indefinite article for the word (ish).
-
- .. note:: Indefinite article assignment in English is based on
- sound rather than spelling, so this will not work correctly
- in all case; e.g. this will return ``"a hour"``.
-
- """
- return'an' if word[0] in 'aoeiu' else 'a'
-
-
-def get_random_string(length):
- """Returns a random ASCII string of the specified length)."""
- return ''.join(random.choice(string.ascii_letters + string.digits) for _ in xrange(length))
-
-
-class LoadSyntaxError(Exception):
-
- def __init__(self, message, filepath, lineno):
- super(LoadSyntaxError, self).__init__(message)
- self.filepath = filepath
- self.lineno = lineno
-
- def __str__(self):
- message = 'Syntax Error in {}, line {}:\n\t{}'
- return message.format(self.filepath, self.lineno, self.message)
-
-
-RAND_MOD_NAME_LEN = 30
-BAD_CHARS = string.punctuation + string.whitespace
-TRANS_TABLE = string.maketrans(BAD_CHARS, '_' * len(BAD_CHARS))
-
-
-def to_identifier(text):
- """Converts text to a valid Python identifier by replacing all
- whitespace and punctuation."""
- return re.sub('_+', '_', text.translate(TRANS_TABLE))
-
-
-def load_struct_from_python(filepath=None, text=None):
- """Parses a config structure from a .py file. The structure should be composed
- of basic Python types (strings, ints, lists, dicts, etc.)."""
- if not (filepath or text) or (filepath and text):
- raise ValueError('Exactly one of filepath or text must be specified.')
- try:
- if filepath:
- modname = to_identifier(filepath)
- mod = imp.load_source(modname, filepath)
- else:
- modname = get_random_string(RAND_MOD_NAME_LEN)
- while modname in sys.modules: # highly unlikely, but...
- modname = get_random_string(RAND_MOD_NAME_LEN)
- mod = imp.new_module(modname)
- exec text in mod.__dict__ # pylint: disable=exec-used
- return dict((k, v)
- for k, v in mod.__dict__.iteritems()
- if not k.startswith('_'))
- except SyntaxError as e:
- raise LoadSyntaxError(e.message, filepath, e.lineno)
-
-
-def load_struct_from_yaml(filepath=None, text=None):
- """Parses a config structure from a .yaml file. The structure should be composed
- of basic Python types (strings, ints, lists, dicts, etc.)."""
- if not (filepath or text) or (filepath and text):
- raise ValueError('Exactly one of filepath or text must be specified.')
- try:
- if filepath:
- with open(filepath) as fh:
- return yaml.load(fh)
- else:
- return yaml.load(text)
- except yaml.YAMLError as e:
- lineno = None
- if hasattr(e, 'problem_mark'):
- lineno = e.problem_mark.line # pylint: disable=no-member
- raise LoadSyntaxError(e.message, filepath=filepath, lineno=lineno)
-
-
-def load_struct_from_file(filepath):
- """
- Attempts to parse a Python structure consisting of basic types from the specified file.
- Raises a ``ValueError`` if the specified file is of unkown format; ``LoadSyntaxError`` if
- there is an issue parsing the file.
-
- """
- extn = os.path.splitext(filepath)[1].lower()
- if (extn == '.py') or (extn == '.pyc') or (extn == '.pyo'):
- return load_struct_from_python(filepath)
- elif extn == '.yaml':
- return load_struct_from_yaml(filepath)
- else:
- raise ValueError('Unknown format "{}": {}'.format(extn, filepath))
-
-
-def open_file(filepath):
- """
- Open the specified file path with the associated launcher in an OS-agnostic way.
-
- """
- if os.name == 'nt': # Windows
- return os.startfile(filepath) # pylint: disable=no-member
- elif sys.platform == 'darwin': # Mac OSX
- return subprocess.call(['open', filepath])
- else: # assume Linux or similar running a freedesktop-compliant GUI
- return subprocess.call(['xdg-open', filepath])
-
-
-def sha256(path, chunk=2048):
- """Calculates SHA256 hexdigest of the file at the specified path."""
- h = hashlib.sha256()
- with open(path, 'rb') as fh:
- buf = fh.read(chunk)
- while buf:
- h.update(buf)
- buf = fh.read(chunk)
- return h.hexdigest()
-
-
-def urljoin(*parts):
- return '/'.join(p.rstrip('/') for p in parts)
-
-
-# From: http://eli.thegreenplace.net/2011/10/19/perls-guess-if-file-is-text-or-binary-implemented-in-python/
-def istextfile(fileobj, blocksize=512):
- """ Uses heuristics to guess whether the given file is text or binary,
- by reading a single block of bytes from the file.
- If more than 30% of the chars in the block are non-text, or there
- are NUL ('\x00') bytes in the block, assume this is a binary file.
- """
- _text_characters = (b''.join(chr(i) for i in range(32, 127)) +
- b'\n\r\t\f\b')
-
- block = fileobj.read(blocksize)
- if b'\x00' in block:
- # Files with null bytes are binary
- return False
- elif not block:
- # An empty file is considered a valid text file
- return True
-
- # Use translate's 'deletechars' argument to efficiently remove all
- # occurrences of _text_characters from the block
- nontext = block.translate(None, _text_characters)
- return float(len(nontext)) / len(block) <= 0.30
-
-
-def categorize(v):
- if hasattr(v, 'merge_with') and hasattr(v, 'merge_into'):
- return 'o'
- elif hasattr(v, 'iteritems'):
- return 'm'
- elif isiterable(v):
- return 's'
- elif v is None:
- return 'n'
- else:
- return 'c'
-
-
-def merge_config_values(base, other):
- """
- This is used to merge two objects, typically when setting the value of a
- ``ConfigurationPoint``. First, both objects are categorized into
-
- c: A scalar value. Basically, most objects. These values
- are treated as atomic, and not mergeable.
- s: A sequence. Anything iterable that is not a dict or
- a string (strings are considered scalars).
- m: A key-value mapping. ``dict`` and its derivatives.
- n: ``None``.
- o: A mergeable object; this is an object that implements both
- ``merge_with`` and ``merge_into`` methods.
-
- The merge rules based on the two categories are then as follows:
-
- (c1, c2) --> c2
- (s1, s2) --> s1 . s2
- (m1, m2) --> m1 . m2
- (c, s) --> [c] . s
- (s, c) --> s . [c]
- (s, m) --> s . [m]
- (m, s) --> [m] . s
- (m, c) --> ERROR
- (c, m) --> ERROR
- (o, X) --> o.merge_with(X)
- (X, o) --> o.merge_into(X)
- (X, n) --> X
- (n, X) --> X
-
- where:
-
- '.' means concatenation (for maps, contcationation of (k, v) streams
- then converted back into a map). If the types of the two objects
- differ, the type of ``other`` is used for the result.
- 'X' means "any category"
- '[]' used to indicate a literal sequence (not necessarily a ``list``).
- when this is concatenated with an actual sequence, that sequencies
- type is used.
-
- notes:
-
- - When a mapping is combined with a sequence, that mapping is
- treated as a scalar value.
- - When combining two mergeable objects, they're combined using
- ``o1.merge_with(o2)`` (_not_ using o2.merge_into(o1)).
- - Combining anything with ``None`` yields that value, irrespective
- of the order. So a ``None`` value is eqivalent to the corresponding
- item being omitted.
- - When both values are scalars, merging is equivalent to overwriting.
- - There is no recursion (e.g. if map values are lists, they will not
- be merged; ``other`` will overwrite ``base`` values). If complicated
- merging semantics (such as recursion) are required, they should be
- implemented within custom mergeable types (i.e. those that implement
- ``merge_with`` and ``merge_into``).
-
- While this can be used as a generic "combine any two arbitry objects"
- function, the semantics have been selected specifically for merging
- configuration point values.
-
- """
- cat_base = categorize(base)
- cat_other = categorize(other)
-
- if cat_base == 'n':
- return other
- elif cat_other == 'n':
- return base
-
- if cat_base == 'o':
- return base.merge_with(other)
- elif cat_other == 'o':
- return other.merge_into(base)
-
- if cat_base == 'm':
- if cat_other == 's':
- return merge_sequencies([base], other)
- elif cat_other == 'm':
- return merge_maps(base, other)
- else:
- message = 'merge error ({}, {}): "{}" and "{}"'
- raise ValueError(message.format(cat_base, cat_other, base, other))
- elif cat_base == 's':
- if cat_other == 's':
- return merge_sequencies(base, other)
- else:
- return merge_sequencies(base, [other])
- else: # cat_base == 'c'
- if cat_other == 's':
- return merge_sequencies([base], other)
- elif cat_other == 'm':
- message = 'merge error ({}, {}): "{}" and "{}"'
- raise ValueError(message.format(cat_base, cat_other, base, other))
- else:
- return other
-
-
-def merge_sequencies(s1, s2):
- return type(s2)(unique(chain(s1, s2)))
-
-
-def merge_maps(m1, m2):
- return type(m2)(chain(m1.iteritems(), m2.iteritems()))
-
-
-def merge_dicts_simple(base, other):
- result = base.copy()
- for key, value in (base or {}).iteritems():
- result[key] = merge_config_values(result.get(key), value)
- return result
-
-
-def touch(path):
- with open(path, 'w'):
- pass
diff --git a/wlauto/utils/netio.py b/wlauto/utils/netio.py
deleted file mode 100644
index a130d8c4..00000000
--- a/wlauto/utils/netio.py
+++ /dev/null
@@ -1,97 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-This module contains utilities for implemening device hard reset
-using Netio 230 series power switches. This utilizes the KSHELL connection.
-
-"""
-
-import telnetlib
-import socket
-import re
-import time
-import logging
-
-
-logger = logging.getLogger('NetIO')
-
-
-class NetioError(Exception):
- pass
-
-
-class KshellConnection(object):
-
- response_regex = re.compile(r'^(\d+) (.*?)\r\n')
- delay = 0.5
-
- def __init__(self, host='ippowerbar', port=1234, timeout=None):
- """Parameters are passed into ``telnetlib.Telnet`` -- see Python docs."""
- self.host = host
- self.port = port
- self.conn = telnetlib.Telnet(host, port, timeout)
- time.sleep(self.delay) # give time to respond
- output = self.conn.read_very_eager()
- if 'HELLO' not in output:
- raise NetioError('Could not connect: did not see a HELLO. Got: {}'.format(output))
-
- def login(self, user, password):
- code, out = self.send_command('login {} {}\r\n'.format(user, password))
- if code != 250:
- raise NetioError('Login failed. Got: {} {}'.format(code, out))
-
- def enable_port(self, port):
- """Enable the power supply at the specified port."""
- self.set_port(port, 1)
-
- def disable_port(self, port):
- """Enable the power supply at the specified port."""
- self.set_port(port, 0)
-
- def set_port(self, port, value):
- code, out = self.send_command('port {} {}'.format(port, value))
- if code != 250:
- raise NetioError('Could not set {} on port {}. Got: {} {}'.format(value, port, code, out))
-
- def send_command(self, command):
- try:
- if command.startswith('login'):
- parts = command.split()
- parts[2] = '*' * len(parts[2])
- logger.debug(' '.join(parts))
- else:
- logger.debug(command)
- self.conn.write('{}\n'.format(command))
- time.sleep(self.delay) # give time to respond
- out = self.conn.read_very_eager()
- match = self.response_regex.search(out)
- if not match:
- raise NetioError('Invalid response: {}'.format(out.strip()))
- logger.debug('response: {} {}'.format(match.group(1), match.group(2)))
- return int(match.group(1)), match.group(2)
- except socket.error as err:
- try:
- time.sleep(self.delay) # give time to respond
- out = self.conn.read_very_eager()
- if out.startswith('130 CONNECTION TIMEOUT'):
- raise NetioError('130 Timed out.')
- except EOFError:
- pass
- raise err
-
- def close(self):
- self.conn.close()
diff --git a/wlauto/utils/power.py b/wlauto/utils/power.py
deleted file mode 100644
index 5af6999d..00000000
--- a/wlauto/utils/power.py
+++ /dev/null
@@ -1,738 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from __future__ import division
-import os
-import sys
-import csv
-import re
-import logging
-from ctypes import c_int32
-from collections import defaultdict
-import argparse
-
-from wlauto.utils.trace_cmd import TraceCmdTrace, TRACE_MARKER_START, TRACE_MARKER_STOP
-
-
-logger = logging.getLogger('power')
-
-UNKNOWN_FREQUENCY = -1
-
-INIT_CPU_FREQ_REGEX = re.compile(r'CPU (?P<cpu>\d+) FREQUENCY: (?P<freq>\d+) kHZ')
-
-
-class CorePowerTransitionEvent(object):
-
- kind = 'transition'
- __slots__ = ['timestamp', 'cpu_id', 'frequency', 'idle_state']
-
- def __init__(self, timestamp, cpu_id, frequency=None, idle_state=None):
- if (frequency is None) == (idle_state is None):
- raise ValueError('Power transition must specify a frequency or an idle_state, but not both.')
- self.timestamp = timestamp
- self.cpu_id = cpu_id
- self.frequency = frequency
- self.idle_state = idle_state
-
- def __str__(self):
- return 'cpu {} @ {} -> freq: {} idle: {}'.format(self.cpu_id, self.timestamp,
- self.frequency, self.idle_state)
-
- def __repr__(self):
- return 'CPTE(c:{} t:{} f:{} i:{})'.format(self.cpu_id, self.timestamp,
- self.frequency, self.idle_state)
-
-
-class CorePowerDroppedEvents(object):
-
- kind = 'dropped_events'
- __slots__ = ['cpu_id']
-
- def __init__(self, cpu_id):
- self.cpu_id = cpu_id
-
- def __str__(self):
- return 'DROPPED EVENTS on CPU{}'.format(self.cpu_id)
-
- __repr__ = __str__
-
-
-class TraceMarkerEvent(object):
-
- kind = 'marker'
- __slots__ = ['name']
-
- def __init__(self, name):
- self.name = name
-
- def __str__(self):
- return 'MARKER: {}'.format(self.name)
-
-
-class CpuPowerState(object):
-
- __slots__ = ['frequency', 'idle_state']
-
- @property
- def is_idling(self):
- return self.idle_state is not None and self.idle_state >= 0
-
- @property
- def is_active(self):
- return self.idle_state == -1
-
- def __init__(self, frequency=None, idle_state=None):
- self.frequency = frequency
- self.idle_state = idle_state
-
- def __str__(self):
- return 'CP(f:{} i:{})'.format(self.frequency, self.idle_state)
-
- __repr__ = __str__
-
-
-class SystemPowerState(object):
-
- __slots__ = ['timestamp', 'cpus']
-
- @property
- def num_cores(self):
- return len(self.cpus)
-
- def __init__(self, num_cores):
- self.timestamp = None
- self.cpus = []
- for _ in xrange(num_cores):
- self.cpus.append(CpuPowerState())
-
- def copy(self):
- new = SystemPowerState(self.num_cores)
- new.timestamp = self.timestamp
- for i, c in enumerate(self.cpus):
- new.cpus[i].frequency = c.frequency
- new.cpus[i].idle_state = c.idle_state
- return new
-
- def __str__(self):
- return 'SP(t:{} Cs:{})'.format(self.timestamp, self.cpus)
-
- __repr__ = __str__
-
-
-class PowerStateProcessor(object):
- """
- This takes a stream of power transition events and yields a timeline stream
- of system power states.
-
- """
-
- @property
- def cpu_states(self):
- return self.power_state.cpus
-
- @property
- def current_time(self):
- return self.power_state.timestamp
-
- @current_time.setter
- def current_time(self, value):
- self.power_state.timestamp = value
-
- def __init__(self, core_clusters, num_idle_states,
- first_cluster_state=sys.maxint, first_system_state=sys.maxint,
- wait_for_start_marker=False):
- self.power_state = SystemPowerState(len(core_clusters))
- self.requested_states = defaultdict(lambda: -1) # cpu_id -> requeseted state
- self.wait_for_start_marker = wait_for_start_marker
- self._saw_start_marker = False
-
- idle_state_domains = build_idle_domains(core_clusters,
- num_states=num_idle_states,
- first_cluster_state=first_cluster_state,
- first_system_state=first_system_state)
- # This tells us what other cpus we need to update when we see an idle
- # state transition event
- self.idle_related_cpus = defaultdict(list) # (cpu, idle_state) --> relate_cpus_list
- for state_id, idle_state_domain in enumerate(idle_state_domains):
- for cpu_group in idle_state_domain:
- for cpu in cpu_group:
- related = set(cpu_group) - set([cpu])
- self.idle_related_cpus[(cpu, state_id)] = related
-
- def process(self, event_stream):
- for event in event_stream:
- next_state = self.update_power_state(event)
- if self._saw_start_marker or not self.wait_for_start_marker:
- yield next_state
-
- def update_power_state(self, event):
- """
- Update the tracked power state based on the specified event and
- return updated power state.
-
- """
- if event.kind == 'transition':
- self._process_transition(event)
- elif event.kind == 'dropped_events':
- self._process_dropped_events(event)
- elif event.kind == 'marker':
- if event.name == 'START':
- self._saw_start_marker = True
- elif event.name == 'STOP':
- self._saw_start_marker = False
- else:
- raise ValueError('Unexpected event type: {}'.format(event.kind))
- return self.power_state.copy()
-
- def _process_transition(self, event):
- self.current_time = event.timestamp
- if event.idle_state is None:
- self.cpu_states[event.cpu_id].frequency = event.frequency
- else:
- if event.idle_state == -1:
- self._process_idle_exit(event)
- else:
- self._process_idle_entry(event)
-
- def _process_dropped_events(self, event):
- self.cpu_states[event.cpu_id].frequency = None
- old_idle_state = self.cpu_states[event.cpu_id].idle_state
- self.cpu_states[event.cpu_id].idle_state = None
-
- related_ids = self.idle_related_cpus[(event.cpu_id, old_idle_state)]
- for rid in related_ids:
- self.cpu_states[rid].idle_state = None
-
- def _process_idle_entry(self, event):
- if self.cpu_states[event.cpu_id].is_idling:
- raise ValueError('Got idle state entry event for an idling core: {}'.format(event))
- self._try_transition_to_idle_state(event.cpu_id, event.idle_state)
-
- def _process_idle_exit(self, event):
- if self.cpu_states[event.cpu_id].is_active:
- raise ValueError('Got idle state exit event for an active core: {}'.format(event))
- self.requested_states.pop(event.cpu_id, None) # remove outstanding request if there is one
- old_state = self.cpu_states[event.cpu_id].idle_state
- self.cpu_states[event.cpu_id].idle_state = -1
- if self.cpu_states[event.cpu_id].frequency is None:
- self.cpu_states[event.cpu_id].frequency = UNKNOWN_FREQUENCY
-
- related_ids = self.idle_related_cpus[(event.cpu_id, old_state)]
- if old_state is not None:
- new_state = old_state - 1
- for rid in related_ids:
- if self.cpu_states[rid].idle_state > new_state:
- self._try_transition_to_idle_state(rid, new_state)
-
- def _try_transition_to_idle_state(self, cpu_id, idle_state):
- related_ids = self.idle_related_cpus[(cpu_id, idle_state)]
- idle_state = idle_state
-
- # Tristate: True - can transition, False - can't transition,
- # None - unknown idle state on at least one related cpu
- transition_check = self._can_enter_state(related_ids, idle_state)
-
- if not transition_check:
- # If we can't enter an idle state right now, record that we've
- # requested it, so that we may enter it later (once all related
- # cpus also want a state at least as deep).
- self.requested_states[cpu_id] = idle_state
-
- if transition_check is None:
- # Unknown state on a related cpu means we're not sure whether we're
- # entering requested state or a shallower one
- self.cpu_states[cpu_id].idle_state = None
- return
-
- # Keep trying shallower states until all related
- while not self._can_enter_state(related_ids, idle_state):
- idle_state -= 1
- related_ids = self.idle_related_cpus[(cpu_id, idle_state)]
-
- self.cpu_states[cpu_id].idle_state = idle_state
- for rid in related_ids:
- self.cpu_states[rid].idle_state = idle_state
- if self.requested_states[rid] == idle_state:
- del self.requested_states[rid] # request satisfied, so remove
-
- def _can_enter_state(self, related_ids, state):
- """
- This is a tri-state check. Returns ``True`` if related cpu states allow transition
- into this state, ``False`` if related cpu states don't allow transition into this
- state, and ``None`` if at least one of the related cpus is in an unknown state
- (so the decision of whether a transition is possible cannot be made).
-
- """
- for rid in related_ids:
- rid_requested_state = self.requested_states[rid]
- rid_current_state = self.cpu_states[rid].idle_state
- if rid_current_state is None:
- return None
- if rid_current_state < state and rid_requested_state < state:
- return False
- return True
-
-
-def stream_cpu_power_transitions(events):
- for event in events:
- if event.name == 'cpu_idle':
- state = c_int32(event.state).value
- yield CorePowerTransitionEvent(event.timestamp, event.cpu_id, idle_state=state)
- elif event.name == 'cpu_frequency':
- yield CorePowerTransitionEvent(event.timestamp, event.cpu_id, frequency=event.state)
- elif event.name == 'DROPPED EVENTS DETECTED':
- yield CorePowerDroppedEvents(event.cpu_id)
- elif event.name == 'print':
- if TRACE_MARKER_START in event.text:
- yield TraceMarkerEvent('START')
- elif TRACE_MARKER_STOP in event.text:
- yield TraceMarkerEvent('STOP')
- else:
- match = INIT_CPU_FREQ_REGEX.search(event.text)
- if match:
- yield CorePowerTransitionEvent(event.timestamp,
- int(match.group('cpu')),
- frequency=int(match.group('freq')))
-
-
-def gather_core_states(system_state_stream, freq_dependent_idle_states=None): # NOQA
- if freq_dependent_idle_states is None:
- freq_dependent_idle_states = [0]
- for system_state in system_state_stream:
- core_states = []
- for cpu in system_state.cpus:
- if cpu.idle_state == -1:
- core_states.append((-1, cpu.frequency))
- elif cpu.idle_state in freq_dependent_idle_states:
- if cpu.frequency is not None:
- core_states.append((cpu.idle_state, cpu.frequency))
- else:
- core_states.append((None, None))
- else:
- core_states.append((cpu.idle_state, None))
- yield (system_state.timestamp, core_states)
-
-
-class PowerStateTimeline(object):
-
- def __init__(self, filepath, core_names, idle_state_names):
- self.filepath = filepath
- self.idle_state_names = idle_state_names
- self._wfh = open(filepath, 'w')
- self.writer = csv.writer(self._wfh)
- if core_names:
- headers = ['ts'] + ['{} CPU{}'.format(c, i)
- for i, c in enumerate(core_names)]
- self.writer.writerow(headers)
-
- def update(self, timestamp, core_states): # NOQA
- row = [timestamp]
- for idle_state, frequency in core_states:
- if frequency is None:
- if idle_state is None or idle_state == -1:
- row.append(None)
- else:
- row.append(self.idle_state_names[idle_state])
- else: # frequency is not None
- if idle_state == -1:
- if frequency == UNKNOWN_FREQUENCY:
- frequency = 'Running (Unknown Hz)'
- row.append(frequency)
- elif idle_state is None:
- row.append(None)
- else:
- if frequency == UNKNOWN_FREQUENCY:
- frequency = 'Unknown Hz'
- row.append('{} ({})'.format(self.idle_state_names[idle_state],
- frequency))
- self.writer.writerow(row)
-
- def report(self):
- self._wfh.close()
-
-
-class ParallelStats(object):
-
- def __init__(self, core_clusters, use_ratios=False):
- self.clusters = defaultdict(set)
- self.use_ratios = use_ratios
- for i, clust in enumerate(core_clusters):
- self.clusters[clust].add(i)
- self.clusters['all'] = set(range(len(core_clusters)))
-
- self.first_timestamp = None
- self.last_timestamp = None
- self.previous_states = None
- self.parallel_times = defaultdict(lambda: defaultdict(int))
- self.running_times = defaultdict(int)
-
- def update(self, timestamp, core_states):
- if self.last_timestamp is not None:
- delta = timestamp - self.last_timestamp
- active_cores = [i for i, c in enumerate(self.previous_states)
- if c and c[0] == -1]
- for cluster, cluster_cores in self.clusters.iteritems():
- clust_active_cores = len(cluster_cores.intersection(active_cores))
- self.parallel_times[cluster][clust_active_cores] += delta
- if clust_active_cores:
- self.running_times[cluster] += delta
- else: # initial update
- self.first_timestamp = timestamp
-
- self.last_timestamp = timestamp
- self.previous_states = core_states
-
- def report(self): # NOQA
- if self.last_timestamp is None:
- return None
-
- report = ParallelReport()
- total_time = self.last_timestamp - self.first_timestamp
- for cluster in sorted(self.parallel_times):
- running_time = self.running_times[cluster]
- for n in xrange(len(self.clusters[cluster]) + 1):
- time = self.parallel_times[cluster][n]
- time_pc = time / total_time
- if not self.use_ratios:
- time_pc *= 100
- if n:
- if running_time:
- running_time_pc = time / running_time
- else:
- running_time_pc = 0
- if not self.use_ratios:
- running_time_pc *= 100
- else:
- running_time_pc = 0
- precision = self.use_ratios and 3 or 1
- fmt = '{{:.{}f}}'.format(precision)
- report.add([cluster, n,
- fmt.format(time),
- fmt.format(time_pc),
- fmt.format(running_time_pc),
- ])
- return report
-
-
-class ParallelReport(object):
-
- def __init__(self):
- self.values = []
-
- def add(self, value):
- self.values.append(value)
-
- def write(self, filepath):
- with open(filepath, 'w') as wfh:
- writer = csv.writer(wfh)
- writer.writerow(['cluster', 'number_of_cores', 'total_time', '%time', '%running_time'])
- writer.writerows(self.values)
-
-
-class PowerStateStats(object):
-
- def __init__(self, core_names, idle_state_names=None, use_ratios=False):
- self.core_names = core_names
- self.idle_state_names = idle_state_names
- self.use_ratios = use_ratios
- self.first_timestamp = None
- self.last_timestamp = None
- self.previous_states = None
- self.cpu_states = defaultdict(lambda: defaultdict(int))
-
- def update(self, timestamp, core_states): # NOQA
- if self.last_timestamp is not None:
- delta = timestamp - self.last_timestamp
- for cpu, (idle, freq) in enumerate(self.previous_states):
- if idle == -1 and freq is not None:
- state = '{:07}KHz'.format(freq)
- elif freq:
- if self.idle_state_names:
- state = '{}-{:07}KHz'.format(self.idle_state_names[idle], freq)
- else:
- state = 'idle{}-{:07}KHz'.format(idle, freq)
- elif idle not in (None, -1):
- if self.idle_state_names:
- state = self.idle_state_names[idle]
- else:
- state = 'idle{}'.format(idle)
- else:
- state = 'unkown'
- self.cpu_states[cpu][state] += delta
- else: # initial update
- self.first_timestamp = timestamp
-
- self.last_timestamp = timestamp
- self.previous_states = core_states
-
- def report(self):
- if self.last_timestamp is None:
- return None
- total_time = self.last_timestamp - self.first_timestamp
- state_stats = defaultdict(lambda: [None] * len(self.core_names))
-
- for cpu, states in self.cpu_states.iteritems():
- for state in states:
- time = states[state]
- time_pc = time / total_time
- if not self.use_ratios:
- time_pc *= 100
- state_stats[state][cpu] = time_pc
-
- precision = self.use_ratios and 3 or 1
- return PowerStateStatsReport(state_stats, self.core_names, precision)
-
-
-class PowerStateStatsReport(object):
-
- def __init__(self, state_stats, core_names, precision=2):
- self.state_stats = state_stats
- self.core_names = core_names
- self.precision = precision
-
- def write(self, filepath):
- with open(filepath, 'w') as wfh:
- writer = csv.writer(wfh)
- headers = ['state'] + ['{} CPU{}'.format(c, i)
- for i, c in enumerate(self.core_names)]
- writer.writerow(headers)
- for state in sorted(self.state_stats):
- stats = self.state_stats[state]
- fmt = '{{:.{}f}}'.format(self.precision)
- writer.writerow([state] + [fmt.format(s if s is not None else 0)
- for s in stats])
-
-
-def build_idle_domains(core_clusters, # NOQA
- num_states,
- first_cluster_state=None,
- first_system_state=None):
- """
- Returns a list of idle domain groups (one for each idle state). Each group is a
- list of domains, and a domain is a list of cpu ids for which that idle state is
- common. E.g.
-
- [[[0], [1], [2]], [[0, 1], [2]], [[0, 1, 2]]]
-
- This defines three idle states for a machine with three cores. The first idle state
- has three domains with one core in each domain; the second state has two domains,
- with cores 0 and 1 sharing one domain; the final state has only one domain shared
- by all cores.
-
- This mapping created based on the assumptions
-
- - The device is an SMP or a big.LITTLE-like system with cores in one or
- more clusters (for SMP systems, all cores are considered to be in a "cluster").
- - Idle domain correspend to either individual cores, individual custers, or
- the compute subsystem as a whole.
- - Cluster states are always deeper (higher index) than core states, and
- system states are always deeper than cluster states.
-
- parameters:
-
- :core_clusters: a list indicating cluster "ID" of the corresponing core, e.g.
- ``[0, 0, 1]`` represents a three-core machines with cores 0
- and 1 on cluster 0, and core 2 on cluster 1.
- :num_states: total number of idle states on a device.
- :first_cluster_state: the ID of the first idle state shared by all cores in a
- cluster
- :first_system_state: the ID of the first idle state shared by all cores.
-
- """
- if first_cluster_state is None:
- first_cluster_state = sys.maxint
- if first_system_state is None:
- first_system_state = sys.maxint
- all_cpus = range(len(core_clusters))
- cluster_cpus = defaultdict(list)
- for cpu, cluster in enumerate(core_clusters):
- cluster_cpus[cluster].append(cpu)
- cluster_domains = [cluster_cpus[c] for c in sorted(cluster_cpus)]
- core_domains = [[c] for c in all_cpus]
-
- idle_state_domains = []
- for state_id in xrange(num_states):
- if state_id >= first_system_state:
- idle_state_domains.append([all_cpus])
- elif state_id >= first_cluster_state:
- idle_state_domains.append(cluster_domains)
- else:
- idle_state_domains.append(core_domains)
-
- return idle_state_domains
-
-
-def report_power_stats(trace_file, idle_state_names, core_names, core_clusters,
- num_idle_states, first_cluster_state=sys.maxint,
- first_system_state=sys.maxint, use_ratios=False,
- timeline_csv_file=None, filter_trace=False):
- # pylint: disable=too-many-locals
- trace = TraceCmdTrace(filter_markers=filter_trace)
- ps_processor = PowerStateProcessor(core_clusters,
- num_idle_states=num_idle_states,
- first_cluster_state=first_cluster_state,
- first_system_state=first_system_state,
- wait_for_start_marker=not filter_trace)
- reporters = [
- ParallelStats(core_clusters, use_ratios),
- PowerStateStats(core_names, idle_state_names, use_ratios)
- ]
- if timeline_csv_file:
- reporters.append(PowerStateTimeline(timeline_csv_file,
- core_names, idle_state_names))
-
- event_stream = trace.parse(trace_file, names=['cpu_idle', 'cpu_frequency', 'print'])
- transition_stream = stream_cpu_power_transitions(event_stream)
- power_state_stream = ps_processor.process(transition_stream)
- core_state_stream = gather_core_states(power_state_stream)
-
- for timestamp, states in core_state_stream:
- for reporter in reporters:
- reporter.update(timestamp, states)
-
- reports = []
- for reporter in reporters:
- report = reporter.report()
- if report:
- reports.append(report)
- return reports
-
-
-def main():
- # pylint: disable=unbalanced-tuple-unpacking
- args = parse_arguments()
- parallel_report, powerstate_report = report_power_stats(
- trace_file=args.infile,
- idle_state_names=args.idle_state_names,
- core_names=args.core_names,
- core_clusters=args.core_clusters,
- num_idle_states=args.num_idle_states,
- first_cluster_state=args.first_cluster_state,
- first_system_state=args.first_system_state,
- use_ratios=args.ratios,
- timeline_csv_file=args.timeline_file,
- filter_trace=(not args.no_trace_filter),
- )
- parallel_report.write(os.path.join(args.output_directory, 'parallel.csv'))
- powerstate_report.write(os.path.join(args.output_directory, 'cpustate.csv'))
-
-
-class SplitListAction(argparse.Action):
-
- def __init__(self, option_strings, dest, nargs=None, **kwargs):
- if nargs is not None:
- raise ValueError('nargs not allowed')
- super(SplitListAction, self).__init__(option_strings, dest, **kwargs)
-
- def __call__(self, parser, namespace, values, option_string=None):
- setattr(namespace, self.dest, [v.strip() for v in values.split(',')])
-
-
-def parse_arguments(): # NOQA
- parser = argparse.ArgumentParser(description="""
- Produce CPU power activity statistics reports from
- power trace.
- """)
- parser.add_argument('infile', metavar='TRACEFILE', help='''
- Path to the trace file to parse. This must be in the format generated
- by "trace-cmd report" command.
- ''')
- parser.add_argument('-d', '--output-directory', default='.',
- help='''
- Output directory where reports will be placed.
- ''')
- parser.add_argument('-F', '--no-trace-filter', action='store_true', default=False,
- help='''
- Normally, only the trace between begin and end marker is used. This disables
- the filtering so the entire trace file is considered.
- ''')
- parser.add_argument('-c', '--core-names', action=SplitListAction,
- help='''
- Comma-separated list of core names for the device on which the trace
- was collected.
- ''')
- parser.add_argument('-C', '--core-clusters', action=SplitListAction, default=[],
- help='''
- Comma-separated list of core cluster IDs for the device on which the
- trace was collected. If not specified, this will be generated from
- core names on the assumption that all cores with the same name are on the
- same cluster.
- ''')
- parser.add_argument('-i', '--idle-state-names', action=SplitListAction,
- help='''
- Comma-separated list of idle state names. The number of names must match
- --num-idle-states if that was explicitly specified.
- ''')
- parser.add_argument('-n', '--num-idle-states', type=int,
- help='''
- number of idle states on the device
- ''')
- parser.add_argument('-q', '--first-cluster-state', type=int,
- help='''
- ID of the first cluster state. Must be < --num-idle-states.
- ''')
- parser.add_argument('-s', '--first-system-state', type=int,
- help='''
- ID of the first system state. Must be < --numb-idle-states, and
- > --first-cluster-state.
- ''')
- parser.add_argument('-R', '--ratios', action='store_true',
- help='''
- By default proportional values will be reported as percentages, if this
- flag is enabled, they will be reported as ratios instead.
- ''')
- parser.add_argument('-t', '--timeline-file', metavar='FILE',
- help='''
- A timeline of core power states will be written to the specified file in
- CSV format.
- ''')
-
- args = parser.parse_args()
-
- if not args.core_names:
- raise ValueError('core names must be specified using -c or --core-names')
- if not args.core_clusters:
- logger.debug('core clusters not specified, inferring from core names')
- core_cluster_map = {}
- core_clusters = []
- current_cluster = 0
- for cn in args.core_names:
- if cn not in core_cluster_map:
- core_cluster_map[cn] = current_cluster
- current_cluster += 1
- core_clusters.append(core_cluster_map[cn])
- args.core_clusters = core_clusters
- if not args.num_idle_states and args.idle_state_names:
- args.num_idle_states = len(args.idle_state_names)
- elif args.num_idle_states and not args.idle_state_names:
- args.idle_state_names = ['idle{}'.format(i) for i in xrange(args.num_idle_states)]
- elif args.num_idle_states and args.idle_state_names:
- if len(args.idle_state_names) != args.num_idle_states:
- raise ValueError('Number of idle state names does not match --num-idle-states')
- else:
- raise ValueError('Either --num-idle-states or --idle-state-names must be specified')
-
- if not args.first_cluster_state and len(set(args.core_clusters)) > 1:
- if args.first_system_state:
- logger.debug('First cluster idle state not specified; state previous to first system state')
- args.first_cluster_state = args.first_system_state - 1
- else:
- logger.debug('First cluster idle state not specified; assuming last available state')
- args.first_cluster_state = args.num_idle_states - 1
-
- return args
-
-if __name__ == '__main__':
- main()
diff --git a/wlauto/utils/serial_port.py b/wlauto/utils/serial_port.py
deleted file mode 100644
index b1a419f2..00000000
--- a/wlauto/utils/serial_port.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import time
-from contextlib import contextmanager
-from distutils.version import StrictVersion as V
-
-import serial
-
-# pylint: disable=ungrouped-imports
-import pexpect
-if V(pexpect.__version__) < V('4.0.0'):
- import fdpexpect # pylint: disable=import-error
-else:
- from pexpect import fdpexpect
-
-from wlauto.utils.log import LogWriter
-from devlib.utils.serial_port import pulse_dtr, get_connection, open_serial_connection
-
-
-class PexpectLogger(LogWriter):
-
- def __init__(self, kind):
- """
- File-like object class designed to be used for logging with pexpect or
- fdpexect. Each complete line (terminated by new line character) gets logged
- at DEBUG level. In complete lines are buffered until the next new line.
-
- :param kind: This specified which of pexpect logfile attributes this logger
- will be set to. It should be "read" for logfile_read, "send" for
- logfile_send, and "" (emtpy string) for logfile.
-
- """
- if kind not in ('read', 'send', ''):
- raise ValueError('kind must be "read", "send" or ""; got {}'.format(kind))
- self.kind = kind
- logger_name = 'serial_{}'.format(kind) if kind else 'serial'
- super(PexpectLogger, self).__init__(logger_name)
diff --git a/wlauto/utils/serializer.py b/wlauto/utils/serializer.py
deleted file mode 100644
index d03c5cdd..00000000
--- a/wlauto/utils/serializer.py
+++ /dev/null
@@ -1,283 +0,0 @@
-"""
-This module contains wrappers for Python serialization modules for
-common formats that make it easier to serialize/deserialize WA
-Plain Old Data structures (serilizable WA classes implement
-``to_pod()``/``from_pod()`` methods for converting between POD
-structures and Python class instances).
-
-The modifications to standard serilization procedures are:
-
- - mappings are deserialized as ``OrderedDict``\ 's rather than standard
- Python ``dict``\ 's. This allows for cleaner syntax in certain parts
- of WA configuration (e.g. values to be written to files can be specified
- as a dict, and they will be written in the order specified in the config).
- - regular expressions are automatically encoded/decoded. This allows for
- configuration values to be transparently specified as strings or regexes
- in the POD config.
-
-This module exports the "wrapped" versions of serialization libraries,
-and this should be imported and used instead of importing the libraries
-directly. i.e. ::
-
- from wa.utils.serializer import yaml
- pod = yaml.load(fh)
-
-instead of ::
-
- import yaml
- pod = yaml.load(fh)
-
-It's also possible to use the serializer directly::
-
- from wa.utils import serializer
- pod = serializer.load(fh)
-
-This can also be used to ``dump()`` POD structures. By default,
-``dump()`` will produce JSON, but ``fmt`` parameter may be used to
-specify an alternative format (``yaml`` or ``python``). ``load()`` will
-use the file plugin to guess the format, but ``fmt`` may also be used
-to specify it explicitly.
-
-"""
-# pylint: disable=unused-argument
-
-import os
-import re
-import json as _json
-from collections import OrderedDict
-from datetime import datetime
-
-import yaml as _yaml
-import dateutil.parser
-
-from wlauto.exceptions import SerializerSyntaxError
-from wlauto.utils.types import regex_type, none_type
-from wlauto.utils.misc import isiterable
-
-
-__all__ = [
- 'json',
- 'yaml',
- 'read_pod',
- 'dump',
- 'load',
- 'is_pod',
- 'POD_TYPES',
-]
-
-POD_TYPES = [
- list,
- tuple,
- dict,
- set,
- str,
- unicode,
- int,
- float,
- bool,
- datetime,
- regex_type,
- none_type,
-]
-
-class WAJSONEncoder(_json.JSONEncoder):
-
- def default(self, obj): # pylint: disable=method-hidden
- if isinstance(obj, regex_type):
- return 'REGEX:{}:{}'.format(obj.flags, obj.pattern)
- elif isinstance(obj, datetime):
- return 'DATET:{}'.format(obj.isoformat())
- else:
- return _json.JSONEncoder.default(self, obj)
-
-
-class WAJSONDecoder(_json.JSONDecoder):
-
- def decode(self, s, **kwargs):
- d = _json.JSONDecoder.decode(self, s, **kwargs)
-
- def try_parse_object(v):
- if isinstance(v, basestring) and v.startswith('REGEX:'):
- _, flags, pattern = v.split(':', 2)
- return re.compile(pattern, int(flags or 0))
- elif isinstance(v, basestring) and v.startswith('DATET:'):
- _, pattern = v.split(':', 1)
- return dateutil.parser.parse(pattern)
- else:
- return v
-
- def load_objects(d):
- pairs = []
- for k, v in d.iteritems():
- if hasattr(v, 'iteritems'):
- pairs.append((k, load_objects(v)))
- elif isiterable(v):
- pairs.append((k, [try_parse_object(i) for i in v]))
- else:
- pairs.append((k, try_parse_object(v)))
- return OrderedDict(pairs)
-
- return load_objects(d)
-
-
-class json(object):
-
- @staticmethod
- def dump(o, wfh, indent=4, *args, **kwargs):
- return _json.dump(o, wfh, cls=WAJSONEncoder, indent=indent, *args, **kwargs)
-
- @staticmethod
- def load(fh, *args, **kwargs):
- try:
- return _json.load(fh, cls=WAJSONDecoder, object_pairs_hook=OrderedDict, *args, **kwargs)
- except ValueError as e:
- raise SerializerSyntaxError(e.message)
-
- @staticmethod
- def loads(s, *args, **kwargs):
- try:
- return _json.loads(s, cls=WAJSONDecoder, object_pairs_hook=OrderedDict, *args, **kwargs)
- except ValueError as e:
- raise SerializerSyntaxError(e.message)
-
-
-_mapping_tag = _yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG
-_regex_tag = u'tag:wa:regex'
-
-
-def _wa_dict_representer(dumper, data):
- return dumper.represent_mapping(_mapping_tag, data.iteritems())
-
-
-def _wa_regex_representer(dumper, data):
- text = '{}:{}'.format(data.flags, data.pattern)
- return dumper.represent_scalar(_regex_tag, text)
-
-
-def _wa_dict_constructor(loader, node):
- pairs = loader.construct_pairs(node)
- seen_keys = set()
- for k, _ in pairs:
- if k in seen_keys:
- raise ValueError('Duplicate entry: {}'.format(k))
- seen_keys.add(k)
- return OrderedDict(pairs)
-
-
-def _wa_regex_constructor(loader, node):
- value = loader.construct_scalar(node)
- flags, pattern = value.split(':', 1)
- return re.compile(pattern, int(flags or 0))
-
-
-_yaml.add_representer(OrderedDict, _wa_dict_representer)
-_yaml.add_representer(regex_type, _wa_regex_representer)
-_yaml.add_constructor(_mapping_tag, _wa_dict_constructor)
-_yaml.add_constructor(_regex_tag, _wa_regex_constructor)
-
-
-class yaml(object):
-
- @staticmethod
- def dump(o, wfh, *args, **kwargs):
- return _yaml.dump(o, wfh, *args, **kwargs)
-
- @staticmethod
- def load(fh, *args, **kwargs):
- try:
- return _yaml.load(fh, *args, **kwargs)
- except _yaml.YAMLError as e:
- lineno = None
- if hasattr(e, 'problem_mark'):
- lineno = e.problem_mark.line # pylint: disable=no-member
- raise SerializerSyntaxError(e.message, lineno)
-
- loads = load
-
-
-class python(object):
-
- @staticmethod
- def dump(o, wfh, *args, **kwargs):
- raise NotImplementedError()
-
- @classmethod
- def load(cls, fh, *args, **kwargs):
- return cls.loads(fh.read())
-
- @staticmethod
- def loads(s, *args, **kwargs):
- pod = {}
- try:
- exec s in pod # pylint: disable=exec-used
- except SyntaxError as e:
- raise SerializerSyntaxError(e.message, e.lineno)
- for k in pod.keys():
- if k.startswith('__'):
- del pod[k]
- return pod
-
-
-def read_pod(source, fmt=None):
- if isinstance(source, basestring):
- with open(source) as fh:
- return _read_pod(fh, fmt)
- elif hasattr(source, 'read') and (hasattr(source, 'name') or fmt):
- return _read_pod(source, fmt)
- else:
- message = 'source must be a path or an open file handle; got {}'
- raise ValueError(message.format(type(source)))
-
-def write_pod(pod, dest, fmt=None):
- if isinstance(dest, basestring):
- with open(dest, 'w') as wfh:
- return _write_pod(pod, wfh, fmt)
- elif hasattr(dest, 'write') and (hasattr(dest, 'name') or fmt):
- return _write_pod(pod, dest, fmt)
- else:
- message = 'dest must be a path or an open file handle; got {}'
- raise ValueError(message.format(type(dest)))
-
-
-def dump(o, wfh, fmt='json', *args, **kwargs):
- serializer = {'yaml': yaml,
- 'json': json,
- 'python': python,
- 'py': python,
- }.get(fmt)
- if serializer is None:
- raise ValueError('Unknown serialization format: "{}"'.format(fmt))
- serializer.dump(o, wfh, *args, **kwargs)
-
-
-def load(s, fmt='json', *args, **kwargs):
- return read_pod(s, fmt=fmt)
-
-
-def _read_pod(fh, fmt=None):
- if fmt is None:
- fmt = os.path.splitext(fh.name)[1].lower().strip('.')
- if fmt == 'yaml':
- return yaml.load(fh)
- elif fmt == 'json':
- return json.load(fh)
- elif fmt == 'py':
- return python.load(fh)
- else:
- raise ValueError('Unknown format "{}": {}'.format(fmt, getattr(fh, 'name', '<none>')))
-
-def _write_pod(pod, wfh, fmt=None):
- if fmt is None:
- fmt = os.path.splitext(wfh.name)[1].lower().strip('.')
- if fmt == 'yaml':
- return yaml.dump(pod, wfh)
- elif fmt == 'json':
- return json.dump(pod, wfh)
- elif fmt == 'py':
- raise ValueError('Serializing to Python is not supported')
- else:
- raise ValueError('Unknown format "{}": {}'.format(fmt, getattr(wfh, 'name', '<none>')))
-
-def is_pod(obj):
- return type(obj) in POD_TYPES
-
diff --git a/wlauto/utils/terminalsize.py b/wlauto/utils/terminalsize.py
deleted file mode 100644
index 828ca3e6..00000000
--- a/wlauto/utils/terminalsize.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Adapted from
-# https://gist.github.com/jtriley/1108174
-# pylint: disable=bare-except,unpacking-non-sequence
-import os
-import shlex
-import struct
-import platform
-import subprocess
-
-
-def get_terminal_size():
- """ getTerminalSize()
- - get width and height of console
- - works on linux,os x,windows,cygwin(windows)
- originally retrieved from:
- http://stackoverflow.com/questions/566746/how-to-get-console-window-width-in-python
- """
- current_os = platform.system()
- tuple_xy = None
- if current_os == 'Windows':
- tuple_xy = _get_terminal_size_windows()
- if tuple_xy is None:
- # needed for window's python in cygwin's xterm
- tuple_xy = _get_terminal_size_tput()
- if current_os in ['Linux', 'Darwin'] or current_os.startswith('CYGWIN'):
- tuple_xy = _get_terminal_size_linux()
- if tuple_xy is None or tuple_xy == (0, 0):
- tuple_xy = (80, 25) # assume "standard" terminal
- return tuple_xy
-
-
-def _get_terminal_size_windows():
- # pylint: disable=unused-variable,redefined-outer-name,too-many-locals
- try:
- from ctypes import windll, create_string_buffer
- # stdin handle is -10
- # stdout handle is -11
- # stderr handle is -12
- h = windll.kernel32.GetStdHandle(-12)
- csbi = create_string_buffer(22)
- res = windll.kernel32.GetConsoleScreenBufferInfo(h, csbi)
- if res:
- (bufx, bufy, curx, cury, wattr,
- left, top, right, bottom,
- maxx, maxy) = struct.unpack("hhhhHhhhhhh", csbi.raw)
- sizex = right - left + 1
- sizey = bottom - top + 1
- return sizex, sizey
- except:
- pass
-
-
-def _get_terminal_size_tput():
- # get terminal width
- # src: http://stackoverflow.com/questions/263890/how-do-i-find-the-width-height-of-a-terminal-window
- try:
- cols = int(subprocess.check_call(shlex.split('tput cols')))
- rows = int(subprocess.check_call(shlex.split('tput lines')))
- return (cols, rows)
- except:
- pass
-
-
-def _get_terminal_size_linux():
- def ioctl_GWINSZ(fd):
- try:
- import fcntl
- import termios
- cr = struct.unpack('hh',
- fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
- return cr
- except:
- pass
- cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
- if not cr:
- try:
- fd = os.open(os.ctermid(), os.O_RDONLY)
- cr = ioctl_GWINSZ(fd)
- os.close(fd)
- except:
- pass
- if not cr:
- try:
- cr = (os.environ['LINES'], os.environ['COLUMNS'])
- except:
- return None
- return int(cr[1]), int(cr[0])
-
-
-if __name__ == "__main__":
- sizex, sizey = get_terminal_size()
- print 'width =', sizex, 'height =', sizey
-
diff --git a/wlauto/utils/trace_cmd.py b/wlauto/utils/trace_cmd.py
deleted file mode 100644
index 6c64c72d..00000000
--- a/wlauto/utils/trace_cmd.py
+++ /dev/null
@@ -1,271 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import re
-import logging
-from itertools import chain
-
-from wlauto.utils.misc import isiterable
-from wlauto.utils.types import numeric
-
-
-logger = logging.getLogger('trace-cmd')
-
-
-# These markers can be injected into trace to identify the "interesting"
-# portion.
-TRACE_MARKER_START = 'TRACE_MARKER_START'
-TRACE_MARKER_STOP = 'TRACE_MARKER_STOP'
-
-
-class TraceCmdEvent(object):
- """
- A single trace-cmd event. This will appear in the trace cmd report in the format ::
-
- <idle>-0 [000] 3284.126993: sched_rq_runnable_load: cpu=0 load=54
- | | | | |___________|
- | | | | |
- thread cpu timestamp name body
-
- """
-
- __slots__ = ['thread', 'reporting_cpu_id', 'timestamp', 'name', 'text', 'fields']
-
- def __init__(self, thread, cpu_id, ts, name, body, parser=None):
- """
- parameters:
-
- :thread: thread which generated the event
- :cpu: cpu on which the event has occurred
- :ts: timestamp of the event
- :name: the name of the event
- :bodytext: a string with the rest of the event text
- :parser: optionally, a function that will parse bodytext to populate
- this event's attributes
-
- The parser can be any callable that can be invoked with
-
- parser(event, text)
-
- Where ``event`` is this TraceCmdEvent instance, and ``text`` is the body text to be
- parsed. The parser should updated the passed event instance and not return anything
- (the return value will be ignored). Any exceptions raised by the parser will be silently
- ignored (note that this means that the event's attributes may be partially initialized).
-
- """
- self.thread = thread
- self.reporting_cpu_id = int(cpu_id)
- self.timestamp = numeric(ts)
- self.name = name
- self.text = body
- self.fields = {}
-
- if parser:
- try:
- parser(self, self.text)
- except Exception: # pylint: disable=broad-except
- # unknown format assume user does not care or know how to
- # parse self.text
- pass
-
- def __getattr__(self, name):
- try:
- return self.fields[name]
- except KeyError:
- raise AttributeError(name)
-
- def __str__(self):
- return 'TE({} @ {})'.format(self.name, self.timestamp)
-
- __repr__ = __str__
-
-
-class DroppedEventsEvent(object):
-
- __slots__ = ['thread', 'reporting_cpu_id', 'timestamp', 'name', 'text', 'fields']
-
- def __init__(self, cpu_id):
- self.thread = None
- self.reporting_cpu_id = None
- self.timestamp = None
- self.name = 'DROPPED EVENTS DETECTED'
- self.text = None
- self.fields = {'cpu_id': int(cpu_id)}
-
- def __getattr__(self, name):
- try:
- return self.fields[name]
- except KeyError:
- raise AttributeError(name)
-
- def __str__(self):
- return 'DROPPED_EVENTS_ON_CPU{}'.format(self.cpu_id)
-
- __repr__ = __str__
-
-
-def try_convert_to_numeric(v):
- try:
- if isiterable(v):
- return map(numeric, v)
- else:
- return numeric(v)
- except ValueError:
- return v
-
-
-def default_body_parser(event, text):
- """
- Default parser to attempt to use to parser body text for the event (i.e. after
- the "header" common to all events has been parsed). This assumes that the body is
- a whitespace-separated list of key=value pairs. The parser will attempt to convert
- the value into a numeric type, and failing that, keep it as string.
-
- """
- parts = [e.rsplit(' ', 1) for e in text.strip().split('=')]
- parts = [p.strip() for p in chain.from_iterable(parts)]
- if not len(parts) % 2:
- i = iter(parts)
- for k, v in zip(i, i):
- try:
- v = int(v)
- except ValueError:
- pass
- event.fields[k] = v
-
-
-def regex_body_parser(regex, flags=0):
- """
- Creates an event body parser form the specified regular expression (could be an
- ``re.RegexObject``, or a string). The regular expression should contain some named
- groups, as those will be extracted as the event attributes (unnamed groups and the
- reset of the match will be ignored).
-
- If the specified regex is a string, it will be compiled, in which case ``flags`` may
- be provided for the resulting regex object (see ``re`` standard module documentation).
- If regex is a pre-compiled object, flags will be ignored.
-
- """
- if isinstance(regex, basestring):
- regex = re.compile(regex, flags)
-
- def regex_parser_func(event, text):
- match = regex.search(text)
- if match:
- for k, v in match.groupdict().iteritems():
- try:
- event.fields[k] = int(v)
- except ValueError:
- event.fields[k] = v
-
- return regex_parser_func
-
-
-# Maps event onto the corresponding parser for its body text. A parser may be
-# a callable with signature
-#
-# parser(event, bodytext)
-#
-# a re.RegexObject, or a string (in which case it will be compiled into a
-# regex). In case of a string/regex, its named groups will be used to populate
-# the event's attributes.
-EVENT_PARSER_MAP = {
- 'sched_switch': re.compile(
- r'(?P<prev_comm>\S.*):(?P<prev_pid>\d+) \[(?P<prev_prio>\d+)\] (?P<status>\S+)'
- r' ==> '
- r'(?P<next_comm>\S.*):(?P<next_pid>\d+) \[(?P<next_prio>\d+)\]'
- ),
-}
-
-TRACE_EVENT_REGEX = re.compile(r'^\s+(?P<thread>\S+.*?\S+)\s+\[(?P<cpu_id>\d+)\]\s+(?P<ts>[\d.]+):\s+'
- r'(?P<name>[^:]+):\s+(?P<body>.*?)\s*$')
-
-HEADER_REGEX = re.compile(r'^\s*(?:version|cpus)\s*=\s*([\d.]+)\s*$')
-
-DROPPED_EVENTS_REGEX = re.compile(r'CPU:(?P<cpu_id>\d+) \[\d*\s*EVENTS DROPPED\]')
-
-EMPTY_CPU_REGEX = re.compile(r'CPU \d+ is empty')
-
-
-class TraceCmdTrace(object):
-
- def __init__(self, filter_markers=True):
- self.filter_markers = filter_markers
-
- def parse(self, filepath, names=None, check_for_markers=True): # pylint: disable=too-many-branches,too-many-locals
- """
- This is a generator for the trace event stream.
-
- """
- inside_maked_region = False
- filters = [re.compile('^{}$'.format(n)) for n in names or []]
- if check_for_markers:
- with open(filepath) as fh:
- for line in fh:
- if TRACE_MARKER_START in line:
- break
- else:
- # maker not found force filtering by marker to False
- self.filter_markers = False
-
- with open(filepath) as fh:
- for line in fh:
- # if processing trace markers, skip marker lines as well as all
- # lines outside marked region
- if self.filter_markers:
- if not inside_maked_region:
- if TRACE_MARKER_START in line:
- inside_maked_region = True
- continue
- elif TRACE_MARKER_STOP in line:
- inside_maked_region = False
- continue
-
- match = DROPPED_EVENTS_REGEX.search(line)
- if match:
- yield DroppedEventsEvent(match.group('cpu_id'))
- continue
-
- matched = False
- for rx in [HEADER_REGEX, EMPTY_CPU_REGEX]:
- match = rx.search(line)
- if match:
- logger.debug(line.strip())
- matched = True
- break
- if matched:
- continue
-
- match = TRACE_EVENT_REGEX.search(line)
- if not match:
- logger.warning('Invalid trace event: "{}"'.format(line))
- continue
-
- event_name = match.group('name')
-
- if filters:
- found = False
- for f in filters:
- if f.search(event_name):
- found = True
- break
- if not found:
- continue
-
- body_parser = EVENT_PARSER_MAP.get(event_name, default_body_parser)
- if isinstance(body_parser, basestring) or isinstance(body_parser, re._pattern_type): # pylint: disable=protected-access
- body_parser = regex_body_parser(body_parser)
- yield TraceCmdEvent(parser=body_parser, **match.groupdict())
-
diff --git a/wlauto/utils/types.py b/wlauto/utils/types.py
deleted file mode 100644
index 7b13f979..00000000
--- a/wlauto/utils/types.py
+++ /dev/null
@@ -1,476 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-"""
-Routines for doing various type conversions. These usually embody some higher-level
-semantics than are present in standard Python types (e.g. ``boolean`` will convert the
-string ``"false"`` to ``False``, where as non-empty strings are usually considered to be
-``True``).
-
-A lot of these are intened to stpecify type conversions declaratively in place like
-``Parameter``'s ``kind`` argument. These are basically "hacks" around the fact that Python
-is not the best language to use for configuration.
-
-"""
-import os
-import re
-import math
-import shlex
-from bisect import insort
-from collections import defaultdict, MutableMapping
-from copy import copy
-
-from wlauto.utils.misc import isiterable, to_identifier
-from devlib.utils.types import identifier, boolean, integer, numeric, caseless_string
-
-
-def list_of_strs(value):
- """
- Value must be iterable. All elements will be converted to strings.
-
- """
- if not isiterable(value):
- raise ValueError(value)
- return map(str, value)
-
-list_of_strings = list_of_strs
-
-
-def list_of_ints(value):
- """
- Value must be iterable. All elements will be converted to ``int``\ s.
-
- """
- if not isiterable(value):
- raise ValueError(value)
- return map(int, value)
-
-list_of_integers = list_of_ints
-
-
-def list_of_numbers(value):
- """
- Value must be iterable. All elements will be converted to numbers (either ``ints`` or
- ``float``\ s depending on the elements).
-
- """
- if not isiterable(value):
- raise ValueError(value)
- return map(numeric, value)
-
-
-def list_of_bools(value, interpret_strings=True):
- """
- Value must be iterable. All elements will be converted to ``bool``\ s.
-
- .. note:: By default, ``boolean()`` conversion function will be used, which means that
- strings like ``"0"`` or ``"false"`` will be interpreted as ``False``. If this
- is undesirable, set ``interpret_strings`` to ``False``.
-
- """
- if not isiterable(value):
- raise ValueError(value)
- if interpret_strings:
- return map(boolean, value)
- else:
- return map(bool, value)
-
-
-def list_of(type_):
- """Generates a "list of" callable for the specified type. The callable
- attempts to convert all elements in the passed value to the specifed
- ``type_``, raising ``ValueError`` on error."""
- def __init__(self, values):
- list.__init__(self, map(type_, values))
-
- def append(self, value):
- list.append(self, type_(value))
-
- def extend(self, other):
- list.extend(self, map(type_, other))
-
- def __setitem__(self, idx, value):
- list.__setitem__(self, idx, type_(value))
-
- return type('list_of_{}s'.format(type_.__name__),
- (list, ), {
- "__init__": __init__,
- "__setitem__": __setitem__,
- "append": append,
- "extend": extend,
- })
-
-
-def list_or_string(value):
- """
- Converts the value into a list of strings. If the value is not iterable,
- a one-element list with stringified value will be returned.
-
- """
- if isinstance(value, basestring):
- return [value]
- else:
- try:
- return list(value)
- except ValueError:
- return [str(value)]
-
-
-def list_or_caseless_string(value):
- """
- Converts the value into a list of ``caseless_string``'s. If the value is not iterable
- a one-element list with stringified value will be returned.
-
- """
- if isinstance(value, basestring):
- return [caseless_string(value)]
- else:
- try:
- return map(caseless_string, value)
- except ValueError:
- return [caseless_string(value)]
-
-
-def list_or(type_):
- """
- Generator for "list or" types. These take either a single value or a list values
- and return a list of the specfied ``type_`` performing the conversion on the value
- (if a single value is specified) or each of the elemented of the specified list.
-
- """
- list_type = list_of(type_)
-
- class list_or_type(list_type):
- def __init__(self, value):
- # pylint: disable=non-parent-init-called,super-init-not-called
- if isiterable(value):
- list_type.__init__(self, value)
- else:
- list_type.__init__(self, [value])
- return list_or_type
-
-
-list_or_integer = list_or(integer)
-list_or_number = list_or(numeric)
-list_or_bool = list_or(boolean)
-
-
-regex_type = type(re.compile(''))
-none_type = type(None)
-
-
-def regex(value):
- """
- Regular expression. If value is a string, it will be complied with no flags. If you
- want to specify flags, value must be precompiled.
-
- """
- if isinstance(value, regex_type):
- return value
- else:
- return re.compile(value)
-
-
-__counters = defaultdict(int)
-
-
-def reset_counter(name=None):
- __counters[name] = 0
-
-
-def counter(name=None):
- """
- An auto incremeting value (kind of like an AUTO INCREMENT field in SQL).
- Optionally, the name of the counter to be used is specified (each counter
- increments separately).
-
- Counts start at 1, not 0.
-
- """
- __counters[name] += 1
- value = __counters[name]
- return value
-
-
-class arguments(list):
- """
- Represents command line arguments to be passed to a program.
-
- """
-
- def __init__(self, value=None):
- if isiterable(value):
- super(arguments, self).__init__(map(str, value))
- elif isinstance(value, basestring):
- posix = os.name != 'nt'
- super(arguments, self).__init__(shlex.split(value, posix=posix))
- elif value is None:
- super(arguments, self).__init__()
- else:
- super(arguments, self).__init__([str(value)])
-
- def append(self, value):
- return super(arguments, self).append(str(value))
-
- def extend(self, values):
- return super(arguments, self).extend(map(str, values))
-
- def __str__(self):
- return ' '.join(self)
-
-
-class prioritylist(object):
-
- def __init__(self):
- """
- Returns an OrderedReceivers object that externaly behaves
- like a list but it maintains the order of its elements
- according to their priority.
- """
- self.elements = defaultdict(list)
- self.is_ordered = True
- self.priorities = []
- self.size = 0
- self._cached_elements = None
-
- def add(self, new_element, priority=0):
- """
- adds a new item in the list.
-
- - ``new_element`` the element to be inserted in the prioritylist
- - ``priority`` is the priority of the element which specifies its
- order withing the List
- """
- self._add_element(new_element, priority)
-
- def add_before(self, new_element, element):
- priority, index = self._priority_index(element)
- self._add_element(new_element, priority, index)
-
- def add_after(self, new_element, element):
- priority, index = self._priority_index(element)
- self._add_element(new_element, priority, index + 1)
-
- def index(self, element):
- return self._to_list().index(element)
-
- def remove(self, element):
- index = self.index(element)
- self.__delitem__(index)
-
- def _priority_index(self, element):
- for priority, elements in self.elements.iteritems():
- if element in elements:
- return (priority, elements.index(element))
- raise IndexError(element)
-
- def _to_list(self):
- if self._cached_elements is None:
- self._cached_elements = []
- for priority in self.priorities:
- self._cached_elements += self.elements[priority]
- return self._cached_elements
-
- def _add_element(self, element, priority, index=None):
- if index is None:
- self.elements[priority].append(element)
- else:
- self.elements[priority].insert(index, element)
- self.size += 1
- self._cached_elements = None
- if priority not in self.priorities:
- insort(self.priorities, priority)
-
- def _delete(self, priority, priority_index):
- del self.elements[priority][priority_index]
- self.size -= 1
- if len(self.elements[priority]) == 0:
- self.priorities.remove(priority)
- self._cached_elements = None
-
- def __iter__(self):
- for priority in reversed(self.priorities): # highest priority first
- for element in self.elements[priority]:
- yield element
-
- def __getitem__(self, index):
- return self._to_list()[index]
-
- def __delitem__(self, index):
- if isinstance(index, numbers.Integral):
- index = int(index)
- if index < 0:
- index_range = [len(self) + index]
- else:
- index_range = [index]
- elif isinstance(index, slice):
- index_range = range(index.start or 0, index.stop, index.step or 1)
- else:
- raise ValueError('Invalid index {}'.format(index))
- current_global_offset = 0
- priority_counts = {priority: count for (priority, count) in
- zip(self.priorities, [len(self.elements[p])
- for p in self.priorities])}
- for priority in self.priorities:
- if not index_range:
- break
- priority_offset = 0
- while index_range:
- del_index = index_range[0]
- if priority_counts[priority] + current_global_offset <= del_index:
- current_global_offset += priority_counts[priority]
- break
- within_priority_index = del_index - \
- (current_global_offset + priority_offset)
- self._delete(priority, within_priority_index)
- priority_offset += 1
- index_range.pop(0)
-
- def __len__(self):
- return self.size
-
-
-class toggle_set(set):
- """
- A list that contains items to enable or disable something.
-
- A prefix of ``~`` is used to denote disabling something, for example
- the list ['apples', '~oranges', 'cherries'] enables both ``apples``
- and ``cherries`` but disables ``oranges``.
- """
-
- @staticmethod
- def from_pod(pod):
- return toggle_set(pod)
-
- @staticmethod
- def merge(source, dest):
- for item in source:
- if item not in dest:
- #Disable previously enabled item
- if item.startswith('~') and item[1:] in dest:
- dest.remove(item[1:])
- #Enable previously disabled item
- if not item.startswith('~') and ('~' + item) in dest:
- dest.remove('~' + item)
- dest.add(item)
- return dest
-
- def merge_with(self, other):
- new_self = copy(self)
- return toggle_set.merge(other, new_self)
-
- def merge_into(self, other):
- other = copy(other)
- return toggle_set.merge(self, other)
-
- def values(self):
- """
- returns a list of enabled items.
- """
- return set([item for item in self if not item.startswith('~')])
-
- def conflicts_with(self, other):
- """
- Checks if any items in ``other`` conflict with items already in this list.
-
- Args:
- other (list): The list to be checked against
-
- Returns:
- A list of items in ``other`` that conflict with items in this list
- """
- conflicts = []
- for item in other:
- if item.startswith('~') and item[1:] in self:
- conflicts.append(item)
- if not item.startswith('~') and ('~' + item) in self:
- conflicts.append(item)
- return conflicts
-
- def to_pod(self):
- return list(self.values())
-
-
-class ID(str):
-
- def merge_with(self, other):
- return '_'.join(self, other)
-
- def merge_into(self, other):
- return '_'.join(other, self)
-
-
-class obj_dict(MutableMapping):
- """
- An object that behaves like a dict but each dict entry can also be accesed
- as an attribute.
-
- :param not_in_dict: A list of keys that can only be accessed as attributes
-
- """
-
- @staticmethod
- def from_pod(pod):
- return obj_dict(pod)
-
- def __init__(self, values=None, not_in_dict=None):
- self.__dict__['dict'] = dict(values or {})
- self.__dict__['not_in_dict'] = not_in_dict if not_in_dict is not None else []
-
- def to_pod(self):
- return self.__dict__['dict']
-
- def __getitem__(self, key):
- if key in self.not_in_dict:
- msg = '"{}" is in the list keys that can only be accessed as attributes'
- raise KeyError(msg.format(key))
- return self.__dict__['dict'][key]
-
- def __setitem__(self, key, value):
- self.__dict__['dict'][key] = value
-
- def __delitem__(self, key):
- del self.__dict__['dict'][key]
-
- def __len__(self):
- return sum(1 for _ in self)
-
- def __iter__(self):
- for key in self.__dict__['dict']:
- if key not in self.__dict__['not_in_dict']:
- yield key
-
- def __repr__(self):
- return repr(dict(self))
-
- def __str__(self):
- return str(dict(self))
-
- def __setattr__(self, name, value):
- self.__dict__['dict'][name] = value
-
- def __delattr__(self, name):
- if name in self:
- del self.__dict__['dict'][name]
- else:
- raise AttributeError("No such attribute: " + name)
-
- def __getattr__(self, name):
- if name in self.__dict__['dict']:
- return self.__dict__['dict'][name]
- else:
- raise AttributeError("No such attribute: " + name)
diff --git a/wlauto/workloads/__init__.py b/wlauto/workloads/__init__.py
deleted file mode 100644
index cd5d64d6..00000000
--- a/wlauto/workloads/__init__.py
+++ /dev/null
@@ -1,16 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
diff --git a/wlauto/workloads/andebench/__init__.py b/wlauto/workloads/andebench/__init__.py
deleted file mode 100644
index 56a91ec9..00000000
--- a/wlauto/workloads/andebench/__init__.py
+++ /dev/null
@@ -1,95 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import re
-
-from wlauto import AndroidUiAutoBenchmark, Parameter, Alias
-from wlauto.exceptions import ConfigError
-
-
-class Andebench(AndroidUiAutoBenchmark):
-
- name = 'andebench'
- description = """
- AndEBench is an industry standard Android benchmark provided by The
- Embedded Microprocessor Benchmark Consortium (EEMBC).
-
- http://www.eembc.org/andebench/about.php
-
- From the website:
-
- - Initial focus on CPU and Dalvik interpreter performance
- - Internal algorithms concentrate on integer operations
- - Compares the difference between native and Java performance
- - Implements flexible multicore performance analysis
- - Results displayed in Iterations per second
- - Detailed log file for comprehensive engineering analysis
-
- """
- package = 'com.eembc.coremark'
- activity = 'com.eembc.coremark.splash'
- summary_metrics = ['AndEMark Java', 'AndEMark Native']
-
- parameters = [
- Parameter('number_of_threads', kind=int,
- description='Number of threads that will be spawned by AndEBench.'),
- Parameter('single_threaded', kind=bool,
- description="""
- If ``true``, AndEBench will run with a single thread. Note: this must
- not be specified if ``number_of_threads`` has been specified.
- """),
- Parameter('native_only', kind=bool,
- description="""
- If ``true``, AndEBench will execute only the native portion of the benchmark.
- """),
- ]
-
- aliases = [
- Alias('andebenchst', number_of_threads=1),
- ]
-
- regex = re.compile('\s*(?P<key>(AndEMark Native|AndEMark Java))\s*:'
- '\s*(?P<value>\d+)')
-
- def validate(self):
- if (self.number_of_threads is not None) and (self.single_threaded is not None): # pylint: disable=E1101
- raise ConfigError('Can\'t specify both number_of_threads and single_threaded parameters.')
-
- def setup(self, context):
- if self.number_of_threads is None: # pylint: disable=access-member-before-definition
- if self.single_threaded: # pylint: disable=E1101
- self.number_of_threads = 1 # pylint: disable=attribute-defined-outside-init
- else:
- self.number_of_threads = self.device.number_of_cores # pylint: disable=W0201
- self.logger.debug('Using {} threads'.format(self.number_of_threads))
- self.uiauto_params['number_of_threads'] = self.number_of_threads
- self.uiauto_params['native_only'] = False
- if self.native_only:
- self.uiauto_params['native_only'] = True
- # Called after this setup as modifying uiauto_params
- super(Andebench, self).setup(context)
-
- def update_result(self, context):
- super(Andebench, self).update_result(context)
- results = {}
- with open(self.logcat_log) as fh:
- for line in fh:
- match = self.regex.search(line)
- if match:
- data = match.groupdict()
- results[data['key']] = data['value']
- for key, value in results.iteritems():
- context.result.add_metric(key, value)
-
diff --git a/wlauto/workloads/andebench/com.arm.wlauto.uiauto.andebench.jar b/wlauto/workloads/andebench/com.arm.wlauto.uiauto.andebench.jar
deleted file mode 100644
index cc1bb880..00000000
--- a/wlauto/workloads/andebench/com.arm.wlauto.uiauto.andebench.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/andebench/uiauto/build.sh b/wlauto/workloads/andebench/uiauto/build.sh
deleted file mode 100755
index d36878cf..00000000
--- a/wlauto/workloads/andebench/uiauto/build.sh
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-${ANDROID_HOME}/tools/android update project -p .
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.andebench.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.andebench.jar ..
-fi
diff --git a/wlauto/workloads/andebench/uiauto/build.xml b/wlauto/workloads/andebench/uiauto/build.xml
deleted file mode 100644
index 8d0957f1..00000000
--- a/wlauto/workloads/andebench/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.andebench" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/andebench/uiauto/project.properties b/wlauto/workloads/andebench/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/andebench/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/andebench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/andebench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index 0d8fa06c..00000000
--- a/wlauto/workloads/andebench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.andebench;
-
-import java.util.concurrent.TimeUnit;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "andebench";
-
- private static int initialTimeoutSeconds = 20;
- private static int shortDelaySeconds = 3;
-
- public void runUiAutomation() throws Exception{
- Bundle status = new Bundle();
- Bundle params = getParams();
- String numThreads = params.getString("number_of_threads");
- Boolean nativeOnly = Boolean.parseBoolean(params.getString("native_only"));
- status.putString("product", getUiDevice().getProductName());
-
- waitForStartButton();
- setConfiguration(numThreads, nativeOnly);
- hitStart();
- waitForAndExtractResuts();
-
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
- public void waitForStartButton() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject startButton = new UiObject(selector.className("android.widget.ImageButton")
- .packageName("com.eembc.coremark"));
- if (!startButton.waitForExists(TimeUnit.SECONDS.toMillis(initialTimeoutSeconds))) {
- throw new UiObjectNotFoundException("Did not see start button.");
- }
- }
-
- public void setConfiguration(String numThreads, boolean nativeOnly) throws Exception {
- UiSelector selector = new UiSelector();
- getUiDevice().pressMenu();
-
- UiObject settingsButton = new UiObject(selector.clickable(true));
- settingsButton.click();
-
- if (nativeOnly) {
- UiObject nativeButton = new UiObject(selector.textContains("Native"));
- nativeButton.click();
- }
-
- UiObject threadNumberField = new UiObject(selector.className("android.widget.EditText"));
- threadNumberField.clearTextField();
- threadNumberField.setText(numThreads);
-
- getUiDevice().pressBack();
- sleep(shortDelaySeconds);
- // If the device does not have a physical keyboard, a virtual one might have
- // poped up when setting the number of threads. If that happend, then the above
- // backpress would dismiss the vkb and another one will be necessary to return
- // from the settings screen.
- if(threadNumberField.exists())
- {
- getUiDevice().pressBack();
- sleep(shortDelaySeconds);
- }
- }
-
- public void hitStart() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject startButton = new UiObject(selector.className("android.widget.ImageButton")
- .packageName("com.eembc.coremark"));
- startButton.click();
- sleep(shortDelaySeconds);
- }
-
- public void waitForAndExtractResuts() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject runningText = new UiObject(selector.textContains("Running...")
- .className("android.widget.TextView")
- .packageName("com.eembc.coremark"));
- runningText.waitUntilGone(TimeUnit.SECONDS.toMillis(600));
-
- UiObject resultText = new UiObject(selector.textContains("Results in Iterations/sec:")
- .className("android.widget.TextView")
- .packageName("com.eembc.coremark"));
- resultText.waitForExists(TimeUnit.SECONDS.toMillis(shortDelaySeconds));
- Log.v(TAG, resultText.getText());
- sleep(shortDelaySeconds);
- }
-}
diff --git a/wlauto/workloads/androbench/__init__.py b/wlauto/workloads/androbench/__init__.py
deleted file mode 100644
index 429ddd2c..00000000
--- a/wlauto/workloads/androbench/__init__.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import sqlite3
-
-from wlauto import AndroidUiAutoBenchmark
-from wlauto.exceptions import WorkloadError
-
-
-class Androbench(AndroidUiAutoBenchmark):
- name = 'androbench'
- description = """
- Measures the storage performance of an Android device.
-
- Website: http://www.androbench.org/wiki/AndroBench
- """
- package = 'com.andromeda.androbench2'
- activity = '.main'
- run_timeout = 10 * 60
-
- def initialize(self, context):
- if not self.device.is_rooted:
- raise WorkloadError('Androbench workload only works on rooted devices.')
-
- def update_result(self, context):
- super(Androbench, self).update_result(context)
- dbn = 'databases/history.db'
- db = self.device.path.join(self.device.package_data_directory, self.package, dbn)
- host_results = os.path.join(context.output_directory, 'history.db')
- self.device.pull(db, host_results, as_root=True)
- qs = 'select * from history'
- conn = sqlite3.connect(host_results)
- c = conn.cursor()
- c.execute(qs)
- results = c.fetchone()
- context.result.add_metric('Sequential Read', results[8], 'MB/s')
- context.result.add_metric('Sequential Write', results[9], 'MB/s')
- context.result.add_metric('Random Read', results[10], 'MB/s')
- context.result.add_metric('Random Write', results[12], 'MB/s')
diff --git a/wlauto/workloads/androbench/com.arm.wlauto.uiauto.androbench.jar b/wlauto/workloads/androbench/com.arm.wlauto.uiauto.androbench.jar
deleted file mode 100644
index d047faf2..00000000
--- a/wlauto/workloads/androbench/com.arm.wlauto.uiauto.androbench.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/androbench/uiauto/build.sh b/wlauto/workloads/androbench/uiauto/build.sh
deleted file mode 100755
index 2bec8695..00000000
--- a/wlauto/workloads/androbench/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.androbench.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.androbench.jar ..
-fi
diff --git a/wlauto/workloads/androbench/uiauto/build.xml b/wlauto/workloads/androbench/uiauto/build.xml
deleted file mode 100644
index 6293990f..00000000
--- a/wlauto/workloads/androbench/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.androbench" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/androbench/uiauto/project.properties b/wlauto/workloads/androbench/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/androbench/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/androbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/androbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index dd621aad..00000000
--- a/wlauto/workloads/androbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,67 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.androbench;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "androbench";
-
- public void runUiAutomation() throws Exception {
- Bundle status = new Bundle();
- status.putString("product", getUiDevice().getProductName());
- UiSelector selector = new UiSelector();
- sleep(3);
- UiObject btn_microbench = new UiObject(selector .textContains("Micro")
- .className("android.widget.Button"));
- btn_microbench.click();
-
- UiObject btn_yes= new UiObject(selector .textContains("Yes")
- .className("android.widget.Button"));
- btn_yes.click();
-
-
- try{
- UiObject complete_text = new UiObject(selector .text("Cancel")
- .className("android.widget.Button"));
-
- waitObject(complete_text);
-
- sleep(2);
- complete_text.click();
- } finally{
- //complete_text.click();
- }
-
- sleep(5);
- takeScreenshot("Androbench");
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
-}
diff --git a/wlauto/workloads/angrybirds/__init__.py b/wlauto/workloads/angrybirds/__init__.py
deleted file mode 100644
index 92ef6828..00000000
--- a/wlauto/workloads/angrybirds/__init__.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import GameWorkload
-
-
-class AngryBirds(GameWorkload):
-
- name = 'angrybirds'
- description = """
- Angry Birds game.
-
- A very popular Android 2D game.
- """
- package = 'com.rovio.angrybirds'
- activity = 'com.rovio.ka3d.App'
-
diff --git a/wlauto/workloads/angrybirds/angrybirds_classic.revent b/wlauto/workloads/angrybirds/angrybirds_classic.revent
deleted file mode 100644
index 74a46c70..00000000
--- a/wlauto/workloads/angrybirds/angrybirds_classic.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/angrybirds/revent_files/.empty b/wlauto/workloads/angrybirds/revent_files/.empty
deleted file mode 100644
index e69de29b..00000000
--- a/wlauto/workloads/angrybirds/revent_files/.empty
+++ /dev/null
diff --git a/wlauto/workloads/angrybirds_rio/__init__.py b/wlauto/workloads/angrybirds_rio/__init__.py
deleted file mode 100644
index c413fd97..00000000
--- a/wlauto/workloads/angrybirds_rio/__init__.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import GameWorkload
-
-
-class AngryBirdsRio(GameWorkload):
-
- name = 'angrybirds_rio'
- description = """
- Angry Birds Rio game.
-
- The sequel to the very popular Android 2D game.
- """
- package = 'com.rovio.angrybirdsrio'
- activity = 'com.rovio.ka3d.App'
-
diff --git a/wlauto/workloads/angrybirds_rio/revent_files/.empty b/wlauto/workloads/angrybirds_rio/revent_files/.empty
deleted file mode 100644
index e69de29b..00000000
--- a/wlauto/workloads/angrybirds_rio/revent_files/.empty
+++ /dev/null
diff --git a/wlauto/workloads/angrybirds_rio/revent_files/Nexus10.run.revent b/wlauto/workloads/angrybirds_rio/revent_files/Nexus10.run.revent
deleted file mode 100644
index bb0e7018..00000000
--- a/wlauto/workloads/angrybirds_rio/revent_files/Nexus10.run.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/angrybirds_rio/revent_files/Nexus10.setup.revent b/wlauto/workloads/angrybirds_rio/revent_files/Nexus10.setup.revent
deleted file mode 100644
index 5f2ae879..00000000
--- a/wlauto/workloads/angrybirds_rio/revent_files/Nexus10.setup.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/anomaly2/__init__.py b/wlauto/workloads/anomaly2/__init__.py
deleted file mode 100644
index 8060c34c..00000000
--- a/wlauto/workloads/anomaly2/__init__.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import json
-
-from wlauto.common.android.workload import GameWorkload
-from wlauto.exceptions import WorkloadError, DeviceError
-
-
-class Anomaly2(GameWorkload):
-
- name = 'anomaly2'
- description = """
- Anomaly 2 game demo and benchmark.
-
- Plays three scenes from the game, benchmarking each one. Scores reported are intended to
- represent overall perceived quality of the game, based not only on raw FPS but also factors
- like smoothness.
-
- """
- package = 'com.elevenbitstudios.anomaly2Benchmark'
- activity = 'com.android.Game11Bits.MainActivity'
- loading_time = 30
- asset_file = 'obb:com.elevenbitstudios.anomaly2Benchmark.tar.gz'
-
- def reset(self, context):
- pass
-
- def update_result(self, context):
- super(Anomaly2, self).update_result(context)
- sent_blobs = {'data': []}
- with open(self.logcat_log) as fh:
- for line in fh:
- if 'sendHttpRequest: json = ' in line:
- data = json.loads(line.split('json = ')[1])
- sent_blobs['data'].append(data)
- if 'scene' not in data['intValues']:
- continue
- scene = data['intValues']['scene']
- score = data['intValues']['score']
- fps = data['floatValues']['fps']
- context.result.add_metric('scene_{}_score'.format(scene), score)
- context.result.add_metric('scene_{}_fps'.format(scene), fps)
- outfile = os.path.join(context.output_directory, 'anomaly2.json')
- with open(outfile, 'wb') as wfh:
- json.dump(sent_blobs, wfh, indent=4)
-
- def teardown(self, context):
- self.device.execute('am force-stop {}'.format(self.package))
-
diff --git a/wlauto/workloads/antutu/__init__.py b/wlauto/workloads/antutu/__init__.py
deleted file mode 100644
index f863148f..00000000
--- a/wlauto/workloads/antutu/__init__.py
+++ /dev/null
@@ -1,145 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-from collections import defaultdict, OrderedDict
-
-from wlauto import AndroidUiAutoBenchmark, Parameter, File
-from wlauto.utils.android import ApkInfo
-
-
-class Antutu(AndroidUiAutoBenchmark):
-
- name = 'antutu'
- description = """
- AnTuTu Benchmark is an benchmarking tool for Android Mobile Phone/Pad. It
- can run a full test of a key project, through the "Memory Performance","CPU
- Integer Performance","CPU Floating point Performance","2D 3D Graphics
- Performance","SD card reading/writing speed","Database IO" performance
- testing, and gives accurate analysis for Andriod smart phones.
-
- http://www.antutulabs.com/AnTuTu-Benchmark
-
- From the website:
-
- AnTuTu Benchmark can support the latest quad-core cpu. In reaching the
- overall and individual scores of the hardware, AnTuTu Benchmark could judge
- your phone by the scores of the performance of the hardware. By uploading
- the scores, Benchmark can view your device in the world rankings, allowing
- points to let you know the level of hardware performance equipment.
-
- """
- #pylint: disable=E1101
-
- package = "com.antutu.ABenchMark"
- activity = ".ABenchMarkStart"
- summary_metrics = ['score', 'Overall_Score']
-
- valid_versions = ['3.3.2', '4.0.3', '5.3.0', '6.0.1']
-
- device_prefs_directory = '/data/data/com.antutu.ABenchMark/shared_prefs'
- device_prefs_file = '/'.join([device_prefs_directory, 'com.antutu.ABenchMark_preferences.xml'])
- local_prefs_directory = os.path.join(os.path.dirname(__file__), 'shared_prefs')
-
- parameters = [
- Parameter('version', allowed_values=valid_versions, default=sorted(valid_versions, reverse=True)[0],
- description=('Specify the version of AnTuTu to be run. If not specified, the latest available '
- 'version will be used.')),
- Parameter('times', kind=int, default=1,
- description=('The number of times the benchmark will be executed in a row (i.e. '
- 'without going through the full setup/teardown process). Note: this does '
- 'not work with versions prior to 4.0.3.')),
- Parameter('enable_sd_tests', kind=bool, default=False,
- description=('If ``True`` enables SD card tests in pre version 4 AnTuTu. These tests '
- 'were know to cause problems on platforms without an SD card. This parameter '
- 'will be ignored on AnTuTu version 4 and higher.')),
- ]
-
- def __init__(self, device, **kwargs): # pylint: disable=W0613
- super(Antutu, self).__init__(device, **kwargs)
- self.run_timeout = 10 * 60 * self.times
- self.uiauto_params['version'] = self.version
- self.uiauto_params['times'] = self.times
- self.uiauto_params['enable_sd_tests'] = self.enable_sd_tests
-
- def setup(self, context):
- if self.version == "6.0.1":
- antutu_3d = context.resolver.get(File(self, "com.antutu.benchmark.full-1.apk"))
- info = ApkInfo(antutu_3d)
- if not context.device.is_installed(info.package):
- self.device.install_apk(antutu_3d, timeout=120)
- # Antutu doesnt seem to list this as one of its permissions, but it asks for it.
- self.device.execute("pm grant com.antutu.ABenchMark android.permission.ACCESS_FINE_LOCATION")
- super(Antutu, self).setup(context)
-
- def update_result(self, context):
- super(Antutu, self).update_result(context)
- with open(self.logcat_log) as fh:
- if self.version == '3.3.2':
- metrics = extract_older_version_metrics(fh)
- else:
- metrics = extract_metrics(fh) # pylint: disable=redefined-variable-type
- for key, value in metrics.iteritems():
- key = key.replace(' ', '_')
- context.result.add_metric(key, value)
-
-
-# Utility functions
-
-def extract_metrics(fh):
- metrics = OrderedDict()
- metric_counts = defaultdict(int)
- for line in fh:
- if 'ANTUTU RESULT:' in line:
- result = line.split('ANTUTU RESULT:')[1]
- metric, value_string = [v.strip() for v in result.split(':', 1)]
- # If times prameter > 1 the same metric will appear
- # multiple times in logcat -- we want to collet all of
- # them as they're from different iterations.
- metric_counts[metric] += 1
- if metric_counts[metric] > 1:
- metric += '_' + str(metric_counts[metric])
-
- # Grahics results report resolution in square brackets
- # as part of value string.
- if ']' in value_string:
- value = int(value_string.split(']')[1].strip())
- else:
- value = int(value_string)
-
- metrics[metric] = value
- return metrics
-
-
-def extract_older_version_metrics(fh):
- metrics = {}
- metric_counts = defaultdict(int)
- for line in fh:
- if 'i/antutu' in line.lower():
- parts = line.split(':')
- if len(parts) != 3:
- continue
- metric = parts[1].strip()
- value = int(parts[2].strip())
-
- # If times prameter > 1 the same metric will appear
- # multiple times in logcat -- we want to collet all of
- # them as they're from different iterations.
- metric_counts[metric] += 1
- if metric_counts[metric] > 1:
- metric += ' ' + str(metric_counts[metric])
-
- metrics[metric] = value
- return metrics
diff --git a/wlauto/workloads/antutu/com.arm.wlauto.uiauto.antutu.jar b/wlauto/workloads/antutu/com.arm.wlauto.uiauto.antutu.jar
deleted file mode 100644
index 4aecde1d..00000000
--- a/wlauto/workloads/antutu/com.arm.wlauto.uiauto.antutu.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/antutu/uiauto/build.sh b/wlauto/workloads/antutu/uiauto/build.sh
deleted file mode 100755
index 7cd2e7f0..00000000
--- a/wlauto/workloads/antutu/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.antutu.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.antutu.jar ..
-fi
diff --git a/wlauto/workloads/antutu/uiauto/build.xml b/wlauto/workloads/antutu/uiauto/build.xml
deleted file mode 100644
index a649f2fd..00000000
--- a/wlauto/workloads/antutu/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.antutu" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/antutu/uiauto/project.properties b/wlauto/workloads/antutu/uiauto/project.properties
deleted file mode 100644
index 4ab12569..00000000
--- a/wlauto/workloads/antutu/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-19
diff --git a/wlauto/workloads/antutu/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/antutu/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index 4d7a08d1..00000000
--- a/wlauto/workloads/antutu/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,380 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.antutu;
-
-import java.util.Set;
-import java.util.HashSet;
-import java.util.concurrent.TimeUnit;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.core.UiCollection;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "antutu";
- public static String TestButton5 = "com.antutu.ABenchMark:id/start_test_region";
- public static String TestButton6 = "com.antutu.ABenchMark:id/start_test_text";
- private static int initialTimeoutSeconds = 20;
-
- public void runUiAutomation() throws Exception{
- Bundle parameters = getParams();
-
- String version = parameters.getString("version");
- boolean enableSdTests = Boolean.parseBoolean(parameters.getString("enable_sd_tests"));
-
- int times = Integer.parseInt(parameters.getString("times"));
- if (times < 1) {
- times = 1;
- }
-
- if (version.equals("3.3.2")) { // version earlier than 4.0.3
- dismissReleaseNotesDialogIfNecessary();
- if(!enableSdTests){
- disableSdCardTests();
- }
- hitStart();
- waitForAndViewResults();
- }
- else {
- int iteration = 0;
- dismissNewVersionNotificationIfNecessary();
- while (true) {
- if(version.equals("6.0.1"))
- hitTestButtonVersion5(TestButton6);
- else if (version.equals("5.3.0")) {
- hitTestButton();
- hitTestButtonVersion5(TestButton5);
- }
- else if (version.equals("4.0.3")) {
- hitTestButton();
- hitTestButton();
- }
- else
- hitTestButton();
-
- if(version.equals("6.0.1"))
- {
- waitForVersion6Results();
- extractResults6();
- }
- else
- {
- waitForVersion4Results();
- viewDetails();
- extractResults();
- }
-
- iteration++;
- if (iteration >= times) {
- break;
- }
-
- returnToTestScreen(version);
- dismissRateDialogIfNecessary();
- testAgain();
- }
- }
-
- Bundle status = new Bundle();
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
- public boolean dismissNewVersionNotificationIfNecessary() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject closeButton = new UiObject(selector.text("Cancel"));
- if (closeButton.waitForExists(TimeUnit.SECONDS.toMillis(initialTimeoutSeconds))) {
- closeButton.click();
- sleep(1); // diaglog dismissal
- return true;
- } else {
- return false;
- }
- }
-
- public boolean dismissReleaseNotesDialogIfNecessary() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject closeButton = new UiObject(selector.text("Close"));
- if (closeButton.waitForExists(TimeUnit.SECONDS.toMillis(initialTimeoutSeconds))) {
- closeButton.click();
- sleep(1); // diaglog dismissal
- return true;
- } else {
- return false;
- }
- }
-
- public boolean dismissRateDialogIfNecessary() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject closeButton = new UiObject(selector.text("NOT NOW"));
- boolean dismissed = false;
- // Sometimes, dismissing the dialog the first time does not work properly --
- // it starts to disappear but is then immediately re-created; so may need to
- // dismiss it as long as keeps popping up.
- while (closeButton.waitForExists(2)) {
- closeButton.click();
- sleep(1); // diaglog dismissal
- dismissed = true;
- }
- return dismissed;
- }
-
- public void hitTestButton() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject test = new UiObject(selector.text("Test")
- .className("android.widget.Button"));
- test.waitForExists(initialTimeoutSeconds);
- test.click();
- sleep(1); // possible tab transtion
- }
-
- /* In version 5 of antutu, the test has been changed from a button widget to a textview */
-
- public void hitTestButtonVersion5(String id) throws Exception {
- UiSelector selector = new UiSelector();
- UiObject test = new UiObject(selector.resourceId(id)
- .className("android.widget.TextView"));
- test.waitForExists(initialTimeoutSeconds);
- test.click();
- sleep(1); // possible tab transtion
- }
-
-
- public void hitTest() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject test = new UiObject(selector.text("Test"));
- test.click();
- sleep(1); // possible tab transtion
- }
-
- public void disableSdCardTests() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject custom = new UiObject(selector.textContains("Custom"));
- custom.click();
- sleep(1); // tab transition
-
- UiObject sdCardButton = new UiObject(selector.text("SD card IO"));
- sdCardButton.click();
- }
-
- public void hitStart() throws Exception {
- UiSelector selector = new UiSelector();
- Log.v(TAG, "Start the test");
- UiObject startButton = new UiObject(selector.text("Start Test")
- .className("android.widget.Button"));
- startButton.click();
- }
-
- public void waitForVersion4Results() throws Exception {
- // The observed behaviour seems to vary between devices. On some platforms,
- // the benchmark terminates in the barchart screen; on others, it terminates in
- // details screen. So we have to wait for either and then act appropriatesl (on the barchart
- // screen a back button press is required to get to the details screen.
- UiSelector selector = new UiSelector();
- UiObject barChart = new UiObject(new UiSelector().className("android.widget.TextView")
- .text("Bar Chart"));
- UiObject detailsButton = new UiObject(new UiSelector().className("android.widget.Button")
- .text("Details"));
- for (int i = 0; i < 60; i++) {
- if (detailsButton.exists() || barChart.exists()) {
- break;
- }
- sleep(5);
- }
-
- if (barChart.exists()) {
- getUiDevice().pressBack();
- }
- }
-
- public void waitForVersion6Results() throws Exception {
- UiObject qrText = new UiObject(new UiSelector().className("android.widget.TextView")
- .text("QRCode of result"));
- for (int i = 0; i < 120; i++) {
- if (qrText.exists()) {
- break;
- }
- sleep(5);
- }
- }
-
- public void viewDetails() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject detailsButton = new UiObject(new UiSelector().className("android.widget.Button")
- .text("Details"));
- detailsButton.clickAndWaitForNewWindow();
- }
-
- public void extractResults6() throws Exception {
- //Overal result
- UiObject result = new UiObject(new UiSelector().resourceId("com.antutu.ABenchMark:id/tv_score_name"));
- if (result.exists()) {
- Log.v(TAG, String.format("ANTUTU RESULT: Overall Score: %s", result.getText()));
- }
-
- // individual scores
- extractSectionResults6("3d");
- extractSectionResults6("ux");
- extractSectionResults6("cpu");
- extractSectionResults6("ram");
- }
-
- public void extractSectionResults6(String section) throws Exception {
- UiSelector selector = new UiSelector();
- UiObject resultLayout = new UiObject(selector.resourceId("com.antutu.ABenchMark:id/hcf_" + section));
- UiObject result = resultLayout.getChild(selector.resourceId("com.antutu.ABenchMark:id/tv_score_value"));
-
- if (result.exists()) {
- Log.v(TAG, String.format("ANTUTU RESULT: %s Score: %s", section, result.getText()));
- }
- }
-
- public void extractResults() throws Exception {
- extractOverallResult();
- extractSectionResults();
- }
-
- public void extractOverallResult() throws Exception {
- UiSelector selector = new UiSelector();
- UiSelector resultTextSelector = selector.className("android.widget.TextView").index(0);
- UiSelector relativeLayoutSelector = selector.className("android.widget.RelativeLayout").index(1);
- UiObject result = new UiObject(selector.className("android.widget.LinearLayout")
- .childSelector(relativeLayoutSelector)
- .childSelector(resultTextSelector));
- if (result.exists()) {
- Log.v(TAG, String.format("ANTUTU RESULT: Overall Score: %s", result.getText()));
- }
- }
-
- public void extractSectionResults() throws Exception {
- UiSelector selector = new UiSelector();
- Set<String> processedMetrics = new HashSet<String>();
-
- actuallyExtractSectionResults(processedMetrics);
- UiScrollable resultsList = new UiScrollable(selector.className("android.widget.ScrollView"));
- // Note: there is an assumption here that the entire results list fits on at most
- // two screens on the device. Given then number of entries in the current
- // antutu verion and the devices we're dealing with, this is a reasonable
- // assumption. But if this changes, this will need to be adapted to scroll more
- // slowly.
- resultsList.scrollToEnd(10);
- actuallyExtractSectionResults(processedMetrics);
- }
-
- public void actuallyExtractSectionResults(Set<String> processedMetrics) throws Exception {
- UiSelector selector = new UiSelector();
-
- for (int i = 1; i < 8; i += 2) {
- UiObject table = new UiObject(selector.className("android.widget.TableLayout").index(i));
- for (int j = 0; j < 3; j += 2) {
- UiObject row = table.getChild(selector.className("android.widget.TableRow").index(j));
- UiObject metric = row.getChild(selector.className("android.widget.TextView").index(0));
- UiObject value = row.getChild(selector.className("android.widget.TextView").index(1));
-
- if (metric.exists() && value.exists()) {
- String metricText = metric.getText();
- if (!processedMetrics.contains(metricText)) {
- Log.v(TAG, String.format("ANTUTU RESULT: %s %s", metric.getText(), value.getText()));
- processedMetrics.add(metricText);
- }
- }
- }
- }
- }
-
- public void returnToTestScreen(String version) throws Exception {
- getUiDevice().pressBack();
- if (version.equals("5.3.0"))
- {
- UiSelector selector = new UiSelector();
- UiObject detailsButton = new UiObject(new UiSelector().className("android.widget.Button")
- .text("Details"));
- sleep(1);
- getUiDevice().pressBack();
- }
- }
-
- public void testAgain() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject retestButton = new UiObject(selector.text("Test Again")
- .className("android.widget.Button"));
- if (!retestButton.waitForExists(TimeUnit.SECONDS.toMillis(2))) {
- getUiDevice().pressBack();
- retestButton.waitForExists(TimeUnit.SECONDS.toMillis(2));
- }
- retestButton.clickAndWaitForNewWindow();
- }
-
- public void waitForAndViewResults() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject submitTextView = new UiObject(selector.text("Submit Scores")
- .className("android.widget.TextView"));
- UiObject detailTextView = new UiObject(selector.text("Detailed Scores")
- .className("android.widget.TextView"));
- UiObject commentTextView = new UiObject(selector.text("User comment")
- .className("android.widget.TextView"));
- boolean foundResults = false;
- for (int i = 0; i < 60; i++) {
- if (detailTextView.exists() || submitTextView.exists() || commentTextView.exists()) {
- foundResults = true;
- break;
- }
- sleep(5);
- }
-
- if (!foundResults) {
- throw new UiObjectNotFoundException("Did not see AnTuTu results screen.");
- }
-
- if (commentTextView.exists()) {
- getUiDevice().pressBack();
- }
- // Yes, sometimes, it needs to be done twice...
- if (commentTextView.exists()) {
- getUiDevice().pressBack();
- }
-
- if (detailTextView.exists()) {
- detailTextView.click();
- sleep(1); // tab transition
-
- UiObject testTextView = new UiObject(selector.text("Test")
- .className("android.widget.TextView"));
- if (testTextView.exists()) {
- testTextView.click();
- sleep(1); // tab transition
- }
-
- UiObject scoresTextView = new UiObject(selector.text("Scores")
- .className("android.widget.TextView"));
- if (scoresTextView.exists()) {
- scoresTextView.click();
- sleep(1); // tab transition
- }
- }
- }
-}
diff --git a/wlauto/workloads/apklaunch/__init__.py b/wlauto/workloads/apklaunch/__init__.py
deleted file mode 100644
index 6dc201fc..00000000
--- a/wlauto/workloads/apklaunch/__init__.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the 'License');
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an 'AS IS' BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=attribute-defined-outside-init
-import os
-
-from time import sleep
-
-from wlauto import Workload, Parameter
-from wlauto import File
-from wlauto.exceptions import ConfigError
-from wlauto.utils.android import ApkInfo
-
-
-class ApkLaunchWorkload(Workload):
- name = 'apklaunch'
- description = '''
- Installs and runs a .apk file, waits wait_time_seconds, and tests if the app
- has started successfully.
- '''
- supported_platforms = ['android']
-
- parameters = [
- Parameter('apk_file', description='Name to the .apk to run', mandatory=True),
- Parameter('uninstall_required', kind=bool, default=False,
- description='Set to true if the package should be uninstalled'),
- Parameter('wait_time_seconds', kind=int, default=0,
- description='Seconds to wait before testing if the app is still alive')
- ]
-
- def setup(self, context):
- apk_file = context.resolver.get(File(self, self.apk_file))
- self.package = ApkInfo(apk_file).package # pylint: disable=attribute-defined-outside-init
-
- self.logger.info('Installing {}'.format(apk_file))
- return self.device.install(apk_file)
-
- def run(self, context):
- self.logger.info('Starting {}'.format(self.package))
- self.device.execute('am start -W {}'.format(self.package))
-
- self.logger.info('Waiting {} seconds'.format(self.wait_time_seconds))
- sleep(self.wait_time_seconds)
-
- def update_result(self, context):
- app_is_running = bool([p for p in self.device.ps() if p.name == self.package])
- context.result.add_metric('ran_successfully', app_is_running)
-
- def teardown(self, context):
- if self.uninstall_required:
- self.logger.info('Uninstalling {}'.format(self.package))
- self.device.execute('pm uninstall {}'.format(self.package))
diff --git a/wlauto/workloads/applaunch/__init__.py b/wlauto/workloads/applaunch/__init__.py
deleted file mode 100644
index 8270e6e8..00000000
--- a/wlauto/workloads/applaunch/__init__.py
+++ /dev/null
@@ -1,188 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101
-
-from __future__ import division
-import os
-
-try:
- import jinja2
-except ImportError:
- jinja2 = None
-
-from wlauto import Workload, settings, Parameter
-from wlauto.exceptions import WorkloadError
-from wlauto.utils.hwmon import discover_sensors
-from wlauto.utils.misc import get_meansd
-from wlauto.utils.types import boolean, identifier, list_of_strs
-
-
-THIS_DIR = os.path.dirname(__file__)
-TEMPLATE_NAME = 'device_script.template'
-SCRIPT_TEMPLATE = os.path.join(THIS_DIR, TEMPLATE_NAME)
-
-APP_CONFIG = {
- 'browser': {
- 'package': 'com.android.browser',
- 'activity': '.BrowserActivity',
- 'options': '-d about:blank',
- },
- 'calculator': {
- 'package': 'com.android.calculator2',
- 'activity': '.Calculator',
- 'options': '',
- },
- 'calendar': {
- 'package': 'com.android.calendar',
- 'activity': '.LaunchActivity',
- 'options': '',
- },
-}
-
-
-class ApplaunchWorkload(Workload):
-
- name = 'applaunch'
- description = """
- Measures the time and energy used in launching an application.
-
- """
- supported_platforms = ['android']
-
- parameters = [
- Parameter('app', default='browser', allowed_values=['calculator', 'browser', 'calendar'],
- description='The name of the application to measure.'),
- Parameter('set_launcher_affinity', kind=bool, default=True,
- description=('If ``True``, this will explicitly set the affinity of the launcher '
- 'process to the A15 cluster.')),
- Parameter('times', kind=int, default=8,
- description='Number of app launches to do on the device.'),
- Parameter('measure_energy', kind=boolean, default=False,
- description="""
- Specfies wether energy measurments should be taken during the run.
-
- .. note:: This depends on appropriate sensors to be exposed through HWMON.
-
- """),
- Parameter('io_stress', kind=boolean, default=False,
- description='Specifies whether to stress IO during App launch.'),
- Parameter('io_scheduler', allowed_values=['noop', 'deadline', 'row', 'cfq', 'bfq'],
- description='Set the IO scheduler to test on the device.'),
- Parameter('cleanup', kind=boolean, default=True,
- description='Specifies whether to clean up temporary files on the device.'),
- ]
-
- def __init__(self, device, **kwargs):
- super(ApplaunchWorkload, self).__init__(device, **kwargs)
- if not jinja2:
- raise WorkloadError('Please install jinja2 Python package: "sudo pip install jinja2"')
- filename = '{}-{}.sh'.format(self.name, self.app)
- self.host_script_file = os.path.join(settings.meta_directory, filename)
- self.device_script_file = os.path.join(self.device.working_directory, filename)
- self._launcher_pid = None
- self._old_launcher_affinity = None
- self.sensors = []
-
- def on_run_init(self, context): # pylint: disable=W0613
- if self.measure_energy:
- self.sensors = discover_sensors(self.device, ['energy'])
- for sensor in self.sensors:
- sensor.label = identifier(sensor.label).upper()
-
- def setup(self, context):
- self.logger.debug('Creating script {}'.format(self.host_script_file))
- with open(self.host_script_file, 'w') as wfh:
- env = jinja2.Environment(loader=jinja2.FileSystemLoader(THIS_DIR))
- template = env.get_template(TEMPLATE_NAME)
- wfh.write(template.render(device=self.device, # pylint: disable=maybe-no-member
- sensors=self.sensors,
- iterations=self.times,
- io_stress=self.io_stress,
- io_scheduler=self.io_scheduler,
- cleanup=self.cleanup,
- package=APP_CONFIG[self.app]['package'],
- activity=APP_CONFIG[self.app]['activity'],
- options=APP_CONFIG[self.app]['options'],
- ))
- self.device_script_file = self.device.install(self.host_script_file)
- if self.set_launcher_affinity:
- self._set_launcher_affinity()
- self.device.clear_logcat()
-
- def run(self, context):
- self.device.execute('sh {}'.format(self.device_script_file), timeout=300, as_root=self.io_stress)
-
- def update_result(self, context): # pylint: disable=too-many-locals
- result_files = ['time.result']
- result_files += ['{}.result'.format(sensor.label) for sensor in self.sensors]
- metric_suffix = ''
- if self.io_stress:
- host_scheduler_file = os.path.join(context.output_directory, 'scheduler')
- device_scheduler_file = '/sys/block/mmcblk0/queue/scheduler'
- self.device.pull(device_scheduler_file, host_scheduler_file)
- with open(host_scheduler_file) as fh:
- scheduler = fh.read()
- scheduler_used = scheduler[scheduler.index("[") + 1:scheduler.index("]")]
- metric_suffix = '_' + scheduler_used
- for filename in result_files:
- self._extract_results_from_file(context, filename, metric_suffix)
-
- def teardown(self, context):
- if self.set_launcher_affinity:
- self._reset_launcher_affinity()
- if self.cleanup:
- self.device.remove(self.device_script_file)
-
- def _set_launcher_affinity(self):
- try:
- self._launcher_pid = self.device.get_pids_of('com.android.launcher')[0]
- result = self.device.execute('taskset -p {}'.format(self._launcher_pid), busybox=True, as_root=True)
- self._old_launcher_affinity = int(result.split(':')[1].strip(), 16)
-
- cpu_ids = [i for i, x in enumerate(self.device.core_names) if x == 'a15']
- if not cpu_ids or len(cpu_ids) == len(self.device.core_names):
- self.logger.debug('Cannot set affinity.')
- return
-
- new_mask = reduce(lambda x, y: x | y, cpu_ids, 0x0)
- self.device.execute('taskset -p 0x{:X} {}'.format(new_mask, self._launcher_pid), busybox=True, as_root=True)
- except IndexError:
- raise WorkloadError('Could not set affinity of launcher: PID not found.')
-
- def _reset_launcher_affinity(self):
- command = 'taskset -p 0x{:X} {}'.format(self._old_launcher_affinity, self._launcher_pid)
- self.device.execute(command, busybox=True, as_root=True)
-
- def _extract_results_from_file(self, context, filename, metric_suffix):
- host_result_file = os.path.join(context.output_directory, filename)
- device_result_file = self.device.path.join(self.device.working_directory, filename)
- self.device.pull(device_result_file, host_result_file)
-
- with open(host_result_file) as fh:
- if filename == 'time.result':
- values = [v / 1000 for v in map(int, fh.read().split())]
- _add_metric(context, 'time' + metric_suffix, values, 'Seconds')
- else:
- metric = filename.replace('.result', '').lower()
- numbers = iter(map(int, fh.read().split()))
- deltas = [(after - before) / 1000000 for before, after in zip(numbers, numbers)]
- _add_metric(context, metric, deltas, 'Joules')
-
-
-def _add_metric(context, metric, values, units):
- mean, sd = get_meansd(values)
- context.result.add_metric(metric, mean, units)
- context.result.add_metric(metric + ' sd', sd, units, lower_is_better=True)
diff --git a/wlauto/workloads/applaunch/device_script.template b/wlauto/workloads/applaunch/device_script.template
deleted file mode 100644
index 43207aac..00000000
--- a/wlauto/workloads/applaunch/device_script.template
+++ /dev/null
@@ -1,88 +0,0 @@
-#!{{ device.binaries_directory.rstrip('/') }}/sh
-
-
-{% for sensor in sensors %}
-GET_{{ sensor.label }}="cat {{ sensor.filepath }}"
-{% endfor %}
-
-LAUNCH_COMMAND="am start -W -n {{ package }}/{{ activity }} {{ options }}"
-STOP_COMMAND="am force-stop {{ package }}"
-TEMP_FILE=tmp.txt
-
-TIME_RESULT=""
-{% for sensor in sensors %}
-{{ sensor.label }}=""
-{% endfor %}
-
-cd {{ device.working_directory }}
-
-# esc esc down down down ENTER (this should bring up the apps menu)
-input keyevent 111
-sleep 1
-input keyevent 111
-sleep 1
-input keyevent 20
-sleep 1
-input keyevent 20
-sleep 1
-input keyevent 20
-sleep 1
-input keyevent 66
-sleep 1
-
-# Warm up caches.
-$LAUNCH_COMMAND
-$STOP_COMMAND
-$LAUNCH_COMMAND
-$STOP_COMMAND
-$LAUNCH_COMMAND
-$STOP_COMMAND
-
-{% if io_scheduler != None %}
-echo {{ io_scheduler }} > /sys/block/mmcblk0/queue/scheduler
-{% endif %}
-
-for i in $(busybox seq 1 {{ iterations }})
-do
- {% for sensor in sensors %}
- {{ sensor.label }}="${{ sensor.label }} `$GET_{{ sensor.label }}`"
- {% endfor %}
-
- {% if io_stress %}
- # Drop caches to get a cold start.
- sync; echo 3 > /proc/sys/vm/drop_caches
- # Run IO stress during App launch.
- busybox dd if=/dev/zero of=write.img bs=1048576 count=2000 conv=fsync > dd_write.txt 2>&1 &
- io_write=$!
- busybox dd if=/dev/block/mmcblk0 of=/dev/null bs=1048576 > dd_read.txt 2>&1 &
- io_read=$!
- {% endif %}
-
- $LAUNCH_COMMAND > $TEMP_FILE
-
- {% for sensor in sensors %}
- {{ sensor.label }}="${{ sensor.label }} `$GET_{{ sensor.label }}`"
- {% endfor %}
-
- TIME=`busybox awk '{if($1~"TotalTime") print $2}' $TEMP_FILE`
- TIME_RESULT="$TIME_RESULT $TIME"
- {% if cleanup %}
- rm $TEMP_FILE
- {% if io_stress %}
- kill $io_write
- kill $io_read
- rm -f write.img
- {% endif %}
- {% endif %}
-
- $STOP_COMMAND
- sleep 2
-done
-
-{% for sensor in sensors %}
-echo ${{ sensor.label }} > {{ sensor.label }}.result
-{% endfor %}
-echo $TIME_RESULT > time.result
-# esc esc down down down ENTER (this should bring up the apps menu)
-input keyevent 111
-sleep 1
diff --git a/wlauto/workloads/audio/__init__.py b/wlauto/workloads/audio/__init__.py
deleted file mode 100644
index 12d045e0..00000000
--- a/wlauto/workloads/audio/__init__.py
+++ /dev/null
@@ -1,103 +0,0 @@
-# Copyright 2012-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=E1101,W0201
-import os
-import time
-import urllib
-
-from wlauto import settings, Workload, Parameter
-from wlauto.exceptions import ConfigError
-from wlauto.utils.types import boolean
-
-
-DEFAULT_AUDIO_FILE_URL = "http://archive.org/download/PachelbelsCanoninD/Canon_in_D_Piano.mp3"
-
-
-class Audio(Workload):
-
- name = 'audio'
- description = """
- Audio workload plays an MP3 file using the built-in music player. By default,
- it plays Canon_in_D_Pieano.mp3 for 30 seconds.
-
- """
- supported_platforms = ['android']
-
- parameters = [
- Parameter('duration', kind=int, default=30,
- description='The duration the music will play for in seconds.'),
- Parameter('audio_file', default=os.path.join(settings.dependencies_directory, 'Canon_in_D_Piano.mp3'),
- description='''The (on-host) path to the audio file to be played.
-
- .. note:: If the default file is not present locally, it will be downloaded.
- '''),
- Parameter('perform_cleanup', kind=boolean, default=False,
- description='If ``True``, workload files on the device will be deleted after execution.'),
- Parameter('clear_file_cache', kind=boolean, default=True,
- description='Clear the the file cache on the target device prior to running the workload.')
- ]
-
- def init_resources(self, context):
- if not os.path.isfile(self.audio_file):
- self._download_audio_file()
-
- def setup(self, context):
- self.on_device_file = os.path.join(self.device.working_directory,
- os.path.basename(self.audio_file))
-
- self.device.push(self.audio_file, self.on_device_file, timeout=120)
-
- # Open the browser with default page
- self.device.execute('am start -n com.android.browser/.BrowserActivity about:blank')
- time.sleep(5)
-
- # Stop the browser if already running and wait for it to stop
- self.device.execute('am force-stop com.android.browser')
- time.sleep(5)
-
- # Clear the logs
- self.device.clear_logcat()
-
- # Clear browser cache
- self.device.execute('pm clear com.android.browser')
-
- if self.clear_file_cache:
- self.device.execute('sync')
- self.device.write_value('/proc/sys/vm/drop_caches', 3)
-
- # Start the background music
- self.device.execute('am start -W -S -n com.android.music/.MediaPlaybackActivity -d {}'.format(self.on_device_file))
-
- # Launch the browser to blank the screen
- self.device.execute('am start -W -n com.android.browser/.BrowserActivity about:blank')
- time.sleep(5) # Wait for browser to be properly launched
-
- def run(self, context):
- time.sleep(self.duration)
-
- def update_result(self, context):
- # Stop the browser
- self.device.execute('am force-stop com.android.browser')
- # Stop the audio
- self.device.execute('am force-stop com.android.music')
-
- def teardown(self, context):
- if self.perform_cleanup:
- self.device.remove(self.on_device_file)
-
- def _download_audio_file(self):
- self.logger.debug('Downloading audio file from {}'.format(DEFAULT_AUDIO_FILE_URL))
- urllib.urlretrieve(DEFAULT_AUDIO_FILE_URL, self.audio_file)
-
diff --git a/wlauto/workloads/autotest/__init__.py b/wlauto/workloads/autotest/__init__.py
deleted file mode 100644
index 16a2a05c..00000000
--- a/wlauto/workloads/autotest/__init__.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=attribute-defined-outside-init
-
-import os
-import re
-
-from wlauto import Workload, Parameter
-from wlauto.exceptions import WorkloadError
-from wlauto.utils.misc import which, check_output
-from wlauto.utils.types import arguments, numeric
-
-
-# Location of the power_LoadTest under the chroot
-#POWER_LOADTEST_DIR = '/mnt/host/source/src/third_party/autotest/files/client/site_tests/power_LoadTest'
-MARKER = '---------------------------'
-STATUS_REGEX = re.compile(r'^\S+\s+\[\s*(\S+)\s*\]')
-METRIC_REGEX = re.compile(r'^\S+\s+(\S+)\s*(\S+)')
-
-
-class ChromeAutotest(Workload):
-
- name = 'autotest'
- description = '''
- Executes tests from ChromeOS autotest suite
-
- .. note:: This workload *must* be run inside a CromeOS SDK chroot.
-
- See: https://www.chromium.org/chromium-os/testing/power-testing
-
- '''
- supported_platforms = ['chromeos']
-
- parameters = [
- Parameter('test', mandatory=True,
- description='''
- The test to be run
- '''),
- Parameter('test_that_args', kind=arguments, default='',
- description='''
- Extra arguments to be passed to test_that_invocation.
- '''),
- Parameter('run_timeout', kind=int, default=30 * 60,
- description='''
- Timeout, in seconds, for the test execution.
- '''),
- ]
-
- def setup(self, context):
- if self.device.os != 'chromeos':
- raise WorkloadError('{} only supports ChromeOS devices'.format(self.name))
- self.test_that = which('test_that')
- if not self.test_that:
- message = ('Could not find "test_that"; {} must be running in a ChromeOS SDK chroot '
- '(did you execute "cros_sdk"?)')
- raise WorkloadError(message.format(self.name))
- self.command = self._build_command()
- self.raw_output = None
- # make sure no other test is running
- self.device.execute('killall -9 autotest', check_exit_code=False)
-
- def run(self, context):
- self.logger.debug(self.command)
- self.raw_output, _ = check_output(self.command, timeout=self.run_timeout, shell=True)
-
- def update_result(self, context):
- if not self.raw_output:
- self.logger.warning('No power_LoadTest output detected; run failed?')
- return
- raw_outfile = os.path.join(context.output_directory, 'autotest-output.raw')
- with open(raw_outfile, 'w') as wfh:
- wfh.write(self.raw_output)
- context.add_artifact('autotest_raw', raw_outfile, kind='raw')
- lines = iter(self.raw_output.split('\n'))
- # Results are delimitted from the rest of the output by MARKER
- for line in lines:
- if MARKER in line:
- break
- for line in lines:
- match = STATUS_REGEX.search(line)
- if match:
- status = match.group(1)
- if status != 'PASSED':
- self.logger.warning(line)
- match = METRIC_REGEX.search(line)
- if match:
- try:
- context.result.add_metric(match.group(1), numeric(match.group(2)), lower_is_better=True)
- except ValueError:
- pass # non-numeric metrics aren't supported
-
- def _build_command(self):
- parts = [self.test_that, self.device.host, self.test]
- parts.append(str(self.test_that_args))
- return ' '.join(parts)
-
diff --git a/wlauto/workloads/bbench/__init__.py b/wlauto/workloads/bbench/__init__.py
deleted file mode 100644
index 08262ea0..00000000
--- a/wlauto/workloads/bbench/__init__.py
+++ /dev/null
@@ -1,244 +0,0 @@
-# Copyright 2012-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=E1101,W0201
-import os
-import time
-import urllib
-import tarfile
-import shutil
-import json
-import re
-
-from collections import defaultdict
-
-from wlauto import settings, Workload, Parameter, Alias, Executable
-from wlauto.exceptions import ConfigError
-from wlauto.utils.types import boolean
-
-DEFAULT_BBENCH_FILE = "http://bbench.eecs.umich.edu/bbench/bbench_2.0.tgz"
-DOWNLOADED_FILE_NAME = "bbench_2.0.tgz"
-BBENCH_SERVER_NAME = 'bbench_server'
-PATCH_FILES = os.path.join(os.path.dirname(__file__), "patches")
-DEFAULT_AUDIO_FILE = "http://archive.org/download/PachelbelsCanoninD/Canon_in_D_Piano.mp3"
-DEFAULT_AUDIO_FILE_NAME = 'Canon_in_D_Piano.mp3'
-
-
-class BBench(Workload):
-
- name = 'bbench'
- description = """
- BBench workload opens the built-in browser and navigates to, and
- scrolls through, some preloaded web pages and ends the workload by trying to
- connect to a local server it runs after it starts. It can also play the
- workload while it plays an audio file in the background.
-
- """
-
- summary_metrics = ['Mean Latency']
-
- parameters = [
- Parameter('with_audio', kind=boolean, default=False,
- description=('Specifies whether an MP3 should be played in the background during '
- 'workload execution.')),
- Parameter('server_timeout', kind=int, default=300,
- description='Specifies the timeout (in seconds) before the server is stopped.'),
- Parameter('force_dependency_push', kind=boolean, default=False,
- description=('Specifies whether to push dependency files to the device to the device '
- 'if they are already on it.')),
- Parameter('audio_file', default=os.path.join(settings.dependencies_directory, 'Canon_in_D_Piano.mp3'),
- description=('The (on-host) path to the audio file to be played. This is only used if '
- '``with_audio`` is ``True``.')),
- Parameter('perform_cleanup', kind=boolean, default=False,
- description='If ``True``, workload files on the device will be deleted after execution.'),
- Parameter('clear_file_cache', kind=boolean, default=True,
- description='Clear the the file cache on the target device prior to running the workload.'),
- Parameter('browser_package', default='com.android.browser',
- description='Specifies the package name of the device\'s browser app.'),
- Parameter('browser_activity', default='.BrowserActivity',
- description='Specifies the startup activity name of the device\'s browser app.'),
- ]
-
- aliases = [
- Alias('bbench_with_audio', with_audio=True),
- ]
-
- supported_platforms = ['android']
-
- def setup(self, context): # NOQA
- self.bbench_on_device = '/'.join([self.device.working_directory, 'bbench'])
- self.bbench_server_on_device = os.path.join(self.device.working_directory, BBENCH_SERVER_NAME)
- self.audio_on_device = os.path.join(self.device.working_directory, DEFAULT_AUDIO_FILE_NAME)
- self.index_noinput = 'file:///{}'.format(self.bbench_on_device) + '/index_noinput.html'
-
- if not os.path.isdir(os.path.join(self.dependencies_directory, "sites")):
- self._download_bbench_file()
- if self.with_audio and not os.path.isfile(self.audio_file):
- self._download_audio_file()
-
- if not os.path.isdir(self.dependencies_directory):
- raise ConfigError('Bbench directory does not exist: {}'.format(self.dependencies_directory))
- self._apply_patches()
-
- if self.with_audio:
- if self.force_dependency_push or not self.device.file_exists(self.audio_on_device):
- self.device.push(self.audio_file, self.audio_on_device, timeout=120)
-
- # Push the bbench site pages and http server to target device
- if self.force_dependency_push or not self.device.file_exists(self.bbench_on_device):
- self.logger.debug('Copying bbench sites to device.')
- self.device.push(self.dependencies_directory, self.bbench_on_device, timeout=300)
-
- # Push the bbench server
- host_binary = context.resolver.get(Executable(self, self.device.abi, 'bbench_server'))
- device_binary = self.device.install(host_binary)
- self.luanch_server_command = '{} {}'.format(device_binary, self.server_timeout)
-
- # Open the browser with default page
- self.device.execute('am start -n {}/{} about:blank'.format(self.browser_package, self.browser_activity))
- time.sleep(5)
-
- # Stop the browser if already running and wait for it to stop
- self.device.execute('am force-stop {}'.format(self.browser_package))
- time.sleep(5)
-
- # Clear the logs
- self.device.clear_logcat()
-
- # clear browser cache
- self.device.execute('pm clear {}'.format(self.browser_package))
- if self.clear_file_cache:
- self.device.execute('sync')
- self.device.write_value('/proc/sys/vm/drop_caches', 3)
-
- #On android 6+ the web browser requires permissions to access the sd card
- if self.device.os_version["sdk"] >= 23:
- self.device.execute("pm grant com.android.browser android.permission.READ_EXTERNAL_STORAGE")
- self.device.execute("pm grant com.android.browser android.permission.WRITE_EXTERNAL_STORAGE")
-
- # Launch the background music
- if self.with_audio:
- self.device.execute('am start -W -S -n com.android.music/.MediaPlaybackActivity -d {}'.format(self.audio_on_device))
-
- def run(self, context):
- # Launch the bbench
- self.device.execute('am start -n {}/{} {}'.format(self.browser_package, self.browser_activity, self.index_noinput))
- time.sleep(5) # WA1 parity
- # Launch the server waiting for Bbench to complete
- self.device.execute(self.luanch_server_command, self.server_timeout)
-
- def update_result(self, context):
- # Stop the browser
- self.device.execute('am force-stop {}'.format(self.browser_package))
-
- # Stop the music
- if self.with_audio:
- self.device.execute('am force-stop com.android.music')
-
- # Get index_no_input.html
- indexfile = os.path.join(self.device.working_directory, 'bbench/index_noinput.html')
- self.device.pull(indexfile, context.output_directory)
-
- # Get the logs
- output_file = os.path.join(self.device.working_directory, 'browser_bbench_logcat.txt')
- self.device.execute('logcat -v time -d > {}'.format(output_file))
- self.device.pull(output_file, context.output_directory)
-
- metrics = _parse_metrics(os.path.join(context.output_directory, 'browser_bbench_logcat.txt'),
- os.path.join(context.output_directory, 'index_noinput.html'),
- context.output_directory)
- for key, values in metrics:
- for i, value in enumerate(values):
- metric = '{}_{}'.format(key, i) if i else key
- context.result.add_metric(metric, value, units='ms', lower_is_better=True)
-
- def teardown(self, context):
- if self.perform_cleanup:
- self.device.execute('rm -r {}'.format(self.bbench_on_device))
- self.device.execute('rm {}'.format(self.audio_on_device))
-
- def _download_audio_file(self):
- self.logger.debug('Downloadling audio file.')
- urllib.urlretrieve(DEFAULT_AUDIO_FILE, self.audio_file)
-
- def _download_bbench_file(self):
- # downloading the file to bbench_dir
- self.logger.debug('Downloading bbench dependencies.')
- full_file_path = os.path.join(self.dependencies_directory, DOWNLOADED_FILE_NAME)
- urllib.urlretrieve(DEFAULT_BBENCH_FILE, full_file_path)
-
- # Extracting Bbench to bbench_dir/
- self.logger.debug('Extracting bbench dependencies.')
- tar = tarfile.open(full_file_path)
- tar.extractall(os.path.dirname(self.dependencies_directory))
-
- # Removing not needed files and the compressed file
- os.remove(full_file_path)
- youtube_dir = os.path.join(self.dependencies_directory, 'sites', 'youtube')
- os.remove(os.path.join(youtube_dir, 'www.youtube.com', 'kp.flv'))
- os.remove(os.path.join(youtube_dir, 'kp.flv'))
-
- def _apply_patches(self):
- self.logger.debug('Applying patches.')
- shutil.copy(os.path.join(PATCH_FILES, "bbench.js"), self.dependencies_directory)
- shutil.copy(os.path.join(PATCH_FILES, "results.html"), self.dependencies_directory)
- shutil.copy(os.path.join(PATCH_FILES, "index_noinput.html"), self.dependencies_directory)
- shutil.copy(os.path.join(PATCH_FILES, "bbc.html"),
- os.path.join(self.dependencies_directory, "sites", "bbc", "www.bbc.co.uk", "index.html"))
- shutil.copy(os.path.join(PATCH_FILES, "cnn.html"),
- os.path.join(self.dependencies_directory, "sites", "cnn", "www.cnn.com", "index.html"))
- shutil.copy(os.path.join(PATCH_FILES, "twitter.html"),
- os.path.join(self.dependencies_directory, "sites", "twitter", "twitter.com", "index.html"))
-
-
-def _parse_metrics(logfile, indexfile, output_directory): # pylint: disable=R0914
- regex_bbscore = re.compile(r'(?P<head>\w+)=(?P<val>\w+)')
- regex_bbmean = re.compile(r'Mean = (?P<mean>[0-9\.]+)')
- regex_pagescore_head = re.compile(r'metrics:(\w+),(\d+)')
- regex_pagescore_tail = re.compile(r',(\d+.\d+)')
- regex_indexfile = re.compile(r'<body onload="startTest\((.*)\)">')
- settings_dict = defaultdict()
-
- with open(indexfile) as fh:
- for line in fh:
- match = regex_indexfile.search(line)
- if match:
- settings_dict['iterations'], settings_dict['scrollDelay'], settings_dict['scrollSize'] = match.group(1).split(',')
- with open(logfile) as fh:
- results_dict = defaultdict(list)
- for line in fh:
- if 'metrics:Mean' in line:
- results_list = regex_bbscore.findall(line)
- results_dict['Mean Latency'].append(regex_bbmean.search(line).group('mean'))
- if results_list:
- break
- elif 'metrics:' in line:
- page_results = [0]
- match = regex_pagescore_head.search(line)
- name, page_results[0] = match.groups()
- page_results.extend(regex_pagescore_tail.findall(line[match.end():]))
- for val in page_results[:-2]:
- results_list.append((name, int(float(val))))
-
- setting_names = ['siteIndex', 'CGTPreviousTime', 'scrollDelay', 'scrollSize', 'iterations']
- for k, v in results_list:
- if k not in setting_names:
- results_dict[k].append(v)
-
- sorted_results = sorted(results_dict.items())
-
- with open(os.path.join(output_directory, 'settings.json'), 'w') as wfh:
- json.dump(settings_dict, wfh)
-
- return sorted_results
diff --git a/wlauto/workloads/bbench/bin/arm64/bbench_server b/wlauto/workloads/bbench/bin/arm64/bbench_server
deleted file mode 100755
index c33f5cfd..00000000
--- a/wlauto/workloads/bbench/bin/arm64/bbench_server
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/bbench/bin/armeabi/bbench_server b/wlauto/workloads/bbench/bin/armeabi/bbench_server
deleted file mode 100755
index c33f5cfd..00000000
--- a/wlauto/workloads/bbench/bin/armeabi/bbench_server
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/bbench/patches/bbc.html b/wlauto/workloads/bbench/patches/bbc.html
deleted file mode 100755
index 27d46619..00000000
--- a/wlauto/workloads/bbench/patches/bbc.html
+++ /dev/null
@@ -1,1412 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML+RDFa 1.0//EN" "http://www.w3.org/MarkUp/DTD/xhtml-rdfa-1.dtd">
-<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en-GB" xmlns:og="http://ogp.me/ns#" xmlns:fb="http://www.facebook.com/2008/fbml">
-<head profile="http://dublincore.org/documents/dcq-html/">
-
-<!--
- added this for bbench
--->
-
-<script type="text/javascript" src="../../../bbench.js"></script>
-
-<script type="text/javascript">
- var bb_start_time = new Date().getTime();
- var newSiteIndex = getURLParams("siteIndex");
- var allParams = getAllParams();
-</script>
-
-<meta name="dcterms.created" content="2011-04-15T17:49:01Z" />
-<meta http-equiv="content-type" content="text/html;charset=UTF-8" />
-<meta name="description" content="Breaking news, sport, TV, radio and a whole lot more. The BBC informs, educates and entertains - wherever you are, whatever your age." />
-<meta name="keywords" content="BBC, bbc.co.uk, bbc.com, Search, British Broadcasting Corporation, BBC iPlayer, BBCi" />
-<meta name="robots" content="noodp" />
-<meta property="fb:page_id" content="228735667216" />
-<!--
-<link rel="apple-touch-icon" href="../static.bbc.co.uk/wwhomepage-3.5/1.0.2/img/iphone.png"/>
--->
-<title>BBC - Homepage</title>
-
-<link rel="stylesheet" type="text/css" href="../static.bbc.co.uk/wwhomepage-3.5/1.0.2/css/bundles/main.css" media="screen,print" />
-<!--[if IE 6]><link rel="stylesheet" type="text/css" href="http://static.bbc.co.uk/wwhomepage-3.5/1.0.2/css/bundles/ie6.css" media="screen,print" />
-<![endif]--><!--[if IE 7]><link rel="stylesheet" type="text/css" href="http://static.bbc.co.uk/wwhomepage-3.5/1.0.2/css/bundles/ie7.css" media="screen,print" />
-<![endif]--><!--[if IE 8]><link rel="stylesheet" type="text/css" href="http://static.bbc.co.uk/wwhomepage-3.5/1.0.2/css/bundles/ie8.css" media="screen,print" />
-<![endif]--> <meta http-equiv="X-UA-Compatible" content="IE=8" /> <link rel="schema.dcterms" href="../purl.org/dc/terms/index.html" /> <link rel="index" href="a-z/index.html" title="A to Z" /> <link rel="help" href="help/index.html" title="BBC Help" /> <link rel="copyright" href="terms/index.html" title="Terms of Use" /> <link rel="icon" href="favicon.ico" type="image/x-icon" /> <meta name="viewport" content="width = 996" /> <link rel="stylesheet" type="text/css" href="../static.bbc.co.uk/frameworks/barlesque/1.8.19_/desktop/3/style/main.css" /> <link rel="stylesheet" type="text/css" href="../static.bbc.co.uk/bbcdotcom/0.3.41/style/3pt_ads.css" />
-
-<script type="text/javascript"> if (! window.gloader) { window.gloader = [ "glow", {map: "js/map.1.7.3.js"}]; } </script>
-
-<script type="text/javascript" src="js/gloader.0.1.6.js"></script>
-
-<!--
-<script type="text/javascript" src="js/require.js"></script>
--->
-
-<!--
-<script type="text/javascript"> bbcRequireMap = {"jquery-1":"http://static.bbc.co.uk/frameworks/jquery/0.1.6/sharedmodules/jquery-1.5.2","jquery-1.4":"http://static.bbc.co.uk/frameworks/jquery/0.1.6/sharedmodules/jquery-1.4","swfobject-2":"http://static.bbc.co.uk/frameworks/swfobject/0.1.3/sharedmodules/swfobject-2","demi-1":"http://static.bbc.co.uk/frameworks/demi/0.6.12/sharedmodules/demi-1","gelui-1":"http://static.bbc.co.uk/frameworks/gelui/0.6.8/sharedmodules/gelui-1","cssp!gelui-1/overlay":"http://static.bbc.co.uk/frameworks/gelui/0.6.8/sharedmodules/gelui-1/overlay.css","istats-1":"http://static.bbc.co.uk/frameworks/nedstat/0.1.33/sharedmodules/istats-1"}; require({ baseUrl: 'http://static.bbc.co.uk/', paths: bbcRequireMap, waitSeconds: 30 }); </script>
--->
-
-<script type="text/javascript" src="js/barlesque.js"></script>
- <!--[if (IE 6)|(IE 7)|(IE 8)]> <style type="text/css"> .blq-gvl-3 #blq-mast, body #blq-container.blq-gvl-3 .blq-foot-opaque { background: transparent; -ms-filter: "progid:DXImageTransform.Microsoft.gradient(startColorstr=#B2000000,endColorstr=#B2000000)"; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr=#B2000000,endColorstr=#B2000000); } .blq-gvl-3 #blq-mast.blq-mast-light { -ms-filter: "progid:DXImageTransform.Microsoft.gradient(startColorstr=#66000000,endColorstr=#66000000)"; filter: progid:DXImageTransform.Microsoft.gradient(startColorstr=#66000000,endColorstr=#66000000); } html body #blq-container #blq-nav {background: transparent;} .blq-gvl-3 #blq-mast #blq-search { padding: 5px 4px 4px 7px; } .blq-gvl-3 #blq-nav-main a { background-position: right 12px; } .blq-gvl-3 #blq-nav-main { background-position: 97% 18px; } .blq-morepanel-shown #blq-nav-m a { background-position: 83% -17px} </style> <![endif]--> <!--[if IE 6]> <style type="text/css"> .blq-clearfix {height:1%;} .blq-gvl-3 #blq-mast-home .blq-home {display:none;} .blq-gvl-3 #blq-autosuggest { margin-left:-7px; padding-bottom:8px} .blq-gvl-3 #blq-nav-main { background-position: 96% 17px; } .blq-gvl-3 #blq-mast-home img {visibility: hidden;} .blq-gvl-3 #blq-mast-home a {filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(enabled=true, sizingMethod='crop', src='img/light.png');} .blq-gvl-3 #blq-mast-home a {cursor:pointer} .blq-gvl-3 #blq-mast-home span.blq-home {height:32px; width:107px;} .blq-footer-image-light, .blq-footer-image-dark {width: 68px;height: 21px;display: block;} .blq-footer-image-dark img, .blq-footer-image-light img { visibility: hidden; } .blq-footer-image-light {filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(enabled=true, sizingMethod='scale', src='img/light.png');} .blq-footer-image-dark {filter:progid:DXImageTransform.Microsoft.AlphaImageLoader(enabled=true, sizingMethod='scale', src='img/dark.png');} </style> <script type="text/javascript"> try { document.execCommand("BackgroundImageCache",false,true); } catch(e) {} </script> <![endif]--> <!--[if lt IE 6]> <style> html body #blq-container #blq-foot { background: #4c4c4c; } html body #blq-container #blq-foot a, html body #blq-container #blq-foot p, html body #blq-container #blq-foot li { color: white; } </style> <![endif]-->
-
-<script type="text/javascript"> blq.setEnvironment('live'); if (blq.setFlagpole) { blq.setFlagpole('barlesque/nedstat', 'ON'); } </script> <script type="text/javascript" src="js/pulse.js"></script> <script type="text/javascript" src="js/conf.js"></script> <script type="text/javascript"> pulse.translations.intro = "We are always looking to improve the site and your opinions count."; pulse.translations.question = "Do you have a few minutes to tell us what you think about this site?"; pulse.translations.accept = "Yes"; pulse.translations.reject = "No"; </script> <link rel="stylesheet" href="../static.bbc.co.uk/frameworks/pulsesurvey/0.6.3/style/pulse.css" type="text/css"/>
-
-<!--[if gte IE 6]> <style type="text/css"> .pulse-pop li{display:inline;width:40px} </style> <![endif]--> <!--[if IE 6]> <style type="text/css"> .pulse-pop li{display:inline;width:40px} .pulse-pop #pulse-q{background:url(http://static.bbc.co.uk/frameworks/pulsesurvey/0.6.3/img/pulse_bg.gif) no-repeat} .pulse-pop #pulse-a{background:url(http://static.bbc.co.uk/frameworks/pulsesurvey/0.6.3/img/pulse_bg.gif) bottom no-repeat;} </style> <![endif]--> <!--[if IE 7]> <style type="text/css"> .pulse-pop #pulse-a{zoom:1} </style> <![endif]--> <!-- BBCDOTCOM ipIsAdvertiseCombined: true journalismVariant: false adsEnabled: true flagpole: not set -->
- <script type="text/javascript">
- if(typeof(bbcdotcom) == "undefined") bbcdotcom = {};
- </script>
- <script type="text/javascript">
- /* Note all JS comments should use asterisks so as to work with Spectrum's simple minification */
- if(typeof BBC == "undefined") {var BBC = {}}; BBC.adverts = {setZone: function(){}, configure: function(){}, write: function(){}, show: function(){}};
- </script>
- <script type="text/javascript" src="js/bbccom.js"></script>
-
-<!--
- <script type="text/javascript">
- //<![CDATA[
- (function(){
- var zone = "3pt_zone_file";
- var zoneAuto = false;
- var siteAuto = false;
- var keywordsAuto = false;
- var zoneOverride = false;
-
-
-
- if(/[?|&]zone=((?!preview)\w*\/*\w+)(&|$)/.test(window.location.search)) {
- zone = RegExp.$1;
- zoneOverride = false;
- };
-
- if(/[?|&]zone=(http:\/\/.+(\.bbc\.co\.uk){1}.*(bbc\.com){1}.*\.js)(&|$)/.test(window.location.search)) {
- if (RegExp.$1.indexOf("http://www.bbc.co.uk/") === -1) {
- zone = RegExp.$1;
- zoneOverride = true;
- };
- };
-
- /* Added by Barlesque, so new static root can be passed post-load scripts. */
- BBC.adverts.setScriptRoot("http://static.bbc.co.uk/bbcdotcom/0.3.41/script/");
-
- BBC.adverts.init({
- domain: "www.bbc.co.uk",
- location: window.location.pathname,
- zoneVersion: zone,
- zoneAuto: zoneAuto,
- siteAuto: siteAuto,
- keywordsAuto: keywordsAuto,
- zoneOverride: zoneOverride,
- zoneReferrer: document.referrer,
- inclusionstyle: "direct_ads"
- });
- })();
- //]]>
- </script>
--->
-<script type="text/javascript">
-//<![CDATA[
-if (!window.console) {
- window.console = { log: function() { return; }, warn: function() { return; }, info: function() {return; }, error: function() { return; }, debug: function() { return; }
-};
- }
-if(typeof(Homepage) === "undefined") {
- Homepage = {};
- Homepage.VSGet = function() {
- return '';
- };
- // Homepage.stat;
- Homepage.edition = 'i';
- Homepage.env = 'live';
- Homepage.fetchBbcCookie = function () {
- if (!navigator.cookieEnabled) {
- return '';
- }
-
- var nameEQ = "BBC-UID=";
- var ca = document.cookie.split(';');
- for(i=0;i < ca.length;i++) {
- var c = ca[i];
- while (c.charAt(0)===' ') { c = c.substring(1,c.length); }
- if (c.indexOf(nameEQ) === 0) {
- return c.substring(nameEQ.length,c.length);
- }
- }
- return '';
- };
-}
-
-if(typeof(wwhomepage) === 'undefined'){
- wwhomepage = {};
- wwhomepage.configs = [];
- if(Homepage !== 'undefined'){
- wwhomepage.legacy = Homepage;
- }
-
- wwhomepage.createObject = function(strName) {
- var nameParts = strName.split("."),
- i = 0,
- len = nameParts.length,
- obj = window;
-
- for (; i < len; i++){
- if (obj[nameParts[i]] === undefined) {
- obj[nameParts[i]] = {};
- }
-
- obj = obj[nameParts[i]];
- }
-
- return obj;
- }
-}
-
-
-
-//(function(){
- //require({
- //baseUrl:"http://static.bbc.co.uk/wwhomepage-3.5/1.0.2/js/",
- //paths:{"wwhomepage.ui.carouselfactory":"bundles/main", "wwhomepage.utils.popup":"bundles/main", "wwhomepage.ui.drawers":"bundles/main", "wwhomepage.ui.tabs":"bundles/main", "wwhomepage.ui.weather":"bundles/main","lib.external.moodular":"bundles/main", "lib.external.jqueryEffects":"bundles/main"}
- //});
-//})()
-//]]>
-</script>
-
-</head>
-
-<!--
-<body id="us" class="greenMode promoSpanTwoColumnsLeft blq_hp">
--->
-<body id="us" class="greenMode promoSpanTwoColumnsLeft blq_hp" onload="siteTest(bb_site[newSiteIndex] + allParams, newSiteIndex, bb_start_time, 'bbc')">
- <!-- NEDSTAT -->
-<!-- Begin iStats 20100118 (UX-CMC 1.1009.3) -->
-<!--
-<script type="text/javascript">
-// <![CDATA[
-function sitestat(n){var j=document,f=j.location,b="";if(j.cookie.indexOf("st_ux=")!=-1){var k=j.cookie.split(";");var e="st_ux",h=document.domain,a="http://www.bbc.co.uk/";if(typeof ns_!="undefined"&&typeof ns_.ux!="undefined"){e=ns_.ux.cName||e;h=ns_.ux.cDomain||h;a=ns_.ux.cPath||a}for(var g=0,f=k.length;g<f;g++){var m=k[g].indexOf("st_ux=");if(m!=-1){b="&"+unescape(k[g].substring(m+6))}}document.cookie=e+"=; expires="+new Date(new Date().getTime()-60).toGMTString()+"; path="+a+"; domain="+h}ns_pixelUrl=n;n=ns_pixelUrl+"&ns__t="+(new Date().getTime())+"&ns_c="+((j.characterSet)?j.characterSet:j.defaultCharset)+"&ns_ti="+escape(j.title)+b+"&ns_jspageurl="+escape(f&&f.href?f.href:j.URL)+"&ns_referrer="+escape(j.referrer);if(n.length>2000&&n.lastIndexOf("&")){n=n.substring(0,n.lastIndexOf("&")+1)+"ns_cut="+n.substring(n.lastIndexOf("&")+1,n.lastIndexOf("=")).substring(0,40)}(j.images)?new Image().src=n:j.write('<p><i'+'mg src="'+n+'" height="1" width="1" alt="" /></p>')};
-if (document.cookie.indexOf('NO-SA=') == -1) {sitestat("//sa.bbc.co.uk/bbc/bbc/s?name=home.page&geo_edition=us&ml_name=barlesque&app_type=web&language=en-GB&ml_version=0.1.33");}
-// ]]>
-</script>
--->
-<noscript><p class="blq-hide"><img src="img/s.gif" height="1" width="1" alt="" /></p></noscript>
-<!-- End iStats (UX-CMC) -->
-<!-- END NEDSTAT -->
- <div id="blq-global" class="blq-gvl-3"> <div id="blq-pre-mast" xml:lang="en-GB"> <!-- Pre mast --> <div id="bbccom_leaderboard_container">
- <div id="bbccom_leaderboard" class="bbccom_display_none">
- <script type="text/javascript">BBC.adverts.write("leaderboard", true);</script>
- </div>
- <script type="text/javascript">BBC.adverts.show("leaderboard");</script>
- </div>
- </div> </div> <div id="blq-container-outer"> <div id="blq-container" class="blq-lang-en-GB blq-dotcom blq-gvl-3"> <div id="blq-container-inner" xml:lang="en-GB"> <div id="blq-acc" class="blq-rst"> <p id="blq-mast-home" class="blq-no-images"><a href="http://www.bbc.co.uk/" hreflang="en-GB"> <span class="blq-home">British Broadcasting Corporation</span><img src="../static.bbc.co.uk/frameworks/barlesque/1.8.19_/desktop/3/img/blocks/light.png" alt="BBC" id="blq-blocks" width="84" height="24" /><span class="blq-span">Home</span></a></p> <p class="blq-hide"><strong><a id="page-top">Accessibility links</a></strong></p> <ul id="blq-acc-links"> <li class="blq-hide"><a href="#blq-content">Skip to content</a></li> <li class="blq-hide"><a href="#blq-nav-main">Skip to bbc.co.uk navigation</a></li> <li class="blq-hide"><a href="#blq-search">Skip to bbc.co.uk search</a></li> <li id="blq-acc-help"><a href="http://www.bbc.co.uk/help/">Help</a></li> <li class="blq-hide"><a href="http://www.bbc.co.uk/accessibility/">Accessibility Help</a></li> </ul> </div> <div id="blq-main" class="blq-clearfix"> <h1 class="hide">BBC Homepage</h1>
-<div class="colA column">
-</div><div class="colB column">
-<div id="promo" class="module">
- <h2 id="promo_title" class="draggable">
- <a id="promo_title_link" href="http://news.bbc.co.uk/" title="Go to Top stories">
- Top stories <span class="raquo">&raquo;</span>
- </a>
- </h2>
-
-
-
- <div id="promo_container" class="container">
- <div class="contentBlocks cbg0"><div id="promo_carousel">
- <div id="carousel_items" class="carousel">
- <dl class="news">
- <dt><a href="http://www.bbc.co.uk/news/health-13381292">Drugs &#039;reduce&#039; HIV transmission</a></dt>
- <dd class="desc">An HIV positive person can reduce the risk of spreading the virus to uninfected partners by 96% if they are given anti-retroviral drugs immediately, according to US scientists.</dd>
- <dd class="img"><a href="http://www.bbc.co.uk/news/health-13381292"><img src="../static.bbc.co.uk/wwhomepage-3.5/ic/news/432-259/52689000/gif/_52689818_c0078971-hiv_capsid_release.gif" alt="HIV virus" width="432" height="259" /></a></dd>
- </dl>
- <dl class="sport">
- <dt><a href="http://news.bbc.co.uk/sport1/hi/football/13381508.stm">Uefa charges Busquets over racism</a></dt>
- <dd class="desc">Uefa launches a disciplinary case against Barcelona&#039;s Sergio Busquets over alleged racial abuse in the Champions League tie with Real Madrid.</dd>
- <dd class="img"><a href="http://news.bbc.co.uk/sport1/hi/football/13381508.stm"><img src="../static.bbc.co.uk/wwhomepage-3.5/ic/news/432-259/52685000/jpg/_52685906_011803887-1.jpg" alt="Sergio Busquets (left) and Marcelo (right)" width="432" height="259" /></a></dd>
- </dl>
- <dl class="travel">
- <dt><a href="http://www.bbc.com/travel/feature/20110511-st-petersburg-goes-back-to-the-future">Back to the future of St Petersburg</a></dt>
- <dd class="desc">Hotels, restaurants and museums that highlight two different cities, one new and the other historic, both splendid.</dd>
- <dd class="img"><a href="http://www.bbc.com/travel/feature/20110511-st-petersburg-goes-back-to-the-future"><img src="../static.bbc.co.uk/wwtravel/img/ic/432-258/130513460118259240140_1.jpg" alt="" width="432" height="259" /></a></dd>
- </dl>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- var carousel = document.getElementById("carousel_items");
- carousel.style.height = '259px';
- carousel.style.overflow = 'hidden';
- if(carousel.getElementsByTagName('a').length >=1){
- var firstItemHref = carousel.getElementsByTagName('a')[0].getAttribute('href');
- }
- //now that we have js, change the text in the promo
- document.getElementById('promo_title').innerHTML = '<a title="Go to Top News story" href="'+firstItemHref+'" id="promo_title_link">' +
- 'Top News story <span class="raquo">»</span>' +
- '</a>';
-
-
- //require(["wwhomepage.ui.carouselfactory"], function (carousel) {
- //require.ready(function(){
- //carousel.createCarousel('#carousel_items',{auto:true, speed:800, api:true, controls:'previewNav',item:'dl',dispTimeout:5000,promoCarousel:true});
- //})
- //});
- //]]>
- </script>
-</div>
-</div>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- //]]>
- </script>
- </div>
-<div id="news" class="module">
- <h2 id="news_title" class="draggable">
- <a id="news_title_link" href="http://news.bbc.co.uk/" title="Go to News">
- News <span class="raquo">&raquo;</span>
- </a>
- </h2>
-
-
-
- <div id="news_container" class="container">
- <div class="contentBlocks cbg0"><div id="news_hero" class="hero contentBlock">
- <h3 id="news_hero_title" class="draggable">Top Story</h3>
-<div class="title image_first">
-
- <a href="http://www.bbc.co.uk/news/world-us-canada-13379298" class="heroLink">
- <img src="../static.bbc.co.uk/wwhomepage-3.5/ic/news/304-171/52689000/jpg/_52689400_011907423-1.jpg" alt="Osama Bin Laden's compound in Abbottabad" width="304" height="171" />
- <span>Fears for Bin Laden US Navy Seals</span>
- </a>
-
-
- <p class="summary" id="news_hero_summary">US Defence Secretary Robert Gates says security around the special forces unit that killed Osama Bin Laden will be tightened after it raised concerns.</p>
- </div>
-</div>
-<div id="news_hero_mini" class="hero contentBlock">
-<div class="title image_first">
-
- <a href="http://www.bbc.co.uk/news/world-africa-13379433" class="heroLink">
- <img src="../static.bbc.co.uk/wwhomepage-3.5/ic/news/144-81/52683000/jpg/_52683750_011954295-1.jpg" alt="Picture taken on a government tour showing a man, who officials said was wounded in an air strike, at a hospital in Tripoli May 12, 2011." width="144" height="81" />
- <span>Nato in new raid on Gaddafi base</span>
- </a>
-
-
- <p class="summary" id="news_hero_mini_summary">Nato air strikes again target Col Muammar Gaddafi&#039;s compound, hours after Libyan state TV showed footage purportedly of the leader in Tripoli.</p>
- </div>
-</div>
-<div id="news_moreTopStories" class="list contentBlock">
-<ul class="blq-clearfix">
-<li class="first highlight"><a href="http://www.bbc.co.uk/news/world-middle-east-13383002">Syria &#039;to halt firing on rallies&#039;</a></li><li class=" highlight"><a href="http://www.bbc.co.uk/news/technology-13374048">Facebook exposed in Google smear</a></li><li><a href="http://www.bbc.co.uk/news/world-europe-12321549">Demjanjuk guilty of Nazi murders</a></li><li><a href="http://www.bbc.co.uk/news/business-13380471">Bernanke warns on US debt impasse</a></li><li class="last"><a href="http://www.bbc.co.uk/news/world-us-canada-13384224">Obama seeks new term for FBI head</a></li></ul>
-</div>
-</div>
- </div>
- </div>
-<div id="business" class="module">
- <h2 id="business_title" class="draggable">
- <a id="business_title_link" href="http://www.bbc.com/news/business" title="Go to Business">
- Business <span class="raquo">&raquo;</span>
- </a>
- </h2>
-
-
-
- <div id="business_container" class="container">
- <div class="contentBlocks cbg0"><div id="business_moreTopStories" class="list contentBlock">
- <h3 id="business_moreTopStories_title" class="draggable"><a href="http://news.bbc.co.uk/1/hi/business/">Top<span class="hidden"> Business and Money</span> Stories</a>
- </h3>
-<ul class="blq-clearfix">
-<li class="first highlight"><a href="http://www.bbc.co.uk/news/business-13380471">Bernanke warns on US debt impasse</a></li><li class="last"><a href="http://www.bbc.co.uk/news/business-13384455">Egypt asks IMF to prop up deficit</a></li></ul>
-</div>
-<div id="business_marketData" class="marketData table list contentBlock">
- <h3 id="business_marketData_title" class="draggable"><a href="http://newsvote.bbc.co.uk/1/shared/fds/hi/business/market_data/overview/default.stm">Market Data</a>
- <abbr title="Thursday">Thu</abbr>, 12 <abbr title="May">May</abbr> 2011 21:59 BST</h3>
-
- <div id="marketwatch">
- <h4><a href="http://www.bbc.co.uk/news/business/market_data/overview/default.stm">Marketwatch</a></h4>
- <span>(min delay 15 mins)</span>
- <span id="marketwatchTicker">
- <a href="http://www.bbc.co.uk/news/business/market_data/ticker/markets/default.stm">Marketwatch Ticker</a>
- </span>
- </div>
- <table id="business_marketData_items" summary="Current market values for major international indexes" class="marketdata">
- <thead>
- <tr>
- <th>Market Name</th>
- <th>Current Value</th>
- <th>Movement</th>
- <th>Change</th>
- <th>Percentage Change</th>
- </tr>
- </thead>
- <tbody>
- <tr class="up">
- <td class="marketname">
- <a href="http://www.bbc.co.uk/news/business/market_data/stockmarket/2/">Dow Jones</a>
- </td>
- <td class="current">
- 12695.92 </td>
- <td class="movement">
- <span>+</span>
- </td>
- <td class="change">
- 65.89 </td>
- <td class="percentageChange">
- 0.52&#37;
- </td>
-</tr>
-<tr class="up dk">
- <td class="marketname">
- <a href="http://www.bbc.co.uk/news/business/market_data/stockmarket/12122/">Nasdaq</a>
- </td>
- <td class="current">
- 2863.04 </td>
- <td class="movement">
- <span>+</span>
- </td>
- <td class="change">
- 17.98 </td>
- <td class="percentageChange">
- 0.63&#37;
- </td>
-</tr>
-<tr class="down">
- <td class="marketname">
- <a href="http://www.bbc.co.uk/news/business/market_data/stockmarket/3/">FTSE 100</a>
- </td>
- <td class="current">
- 5944.96 </td>
- <td class="movement">
- <span>-</span>
- </td>
- <td class="change">
- -31.04 </td>
- <td class="percentageChange">
- -0.52&#37;
- </td>
-</tr>
-<tr class="down dk">
- <td class="marketname">
- <a href="http://www.bbc.co.uk/news/business/market_data/stockmarket/18/">Dax</a>
- </td>
- <td class="current">
- 7443.95 </td>
- <td class="movement">
- <span>-</span>
- </td>
- <td class="change">
- -51.1 </td>
- <td class="percentageChange">
- -0.68&#37;
- </td>
-</tr>
-<tr class="down">
- <td class="marketname">
- <a href="http://www.bbc.co.uk/news/business/market_data/stockmarket/1/">Cac 40</a>
- </td>
- <td class="current">
- 4023.29 </td>
- <td class="movement">
- <span>-</span>
- </td>
- <td class="change">
- -34.79 </td>
- <td class="percentageChange">
- -0.86&#37;
- </td>
-</tr>
- </tbody>
- </table>
-
- <script type="text/javascript">
- //<![CDATA[
- //require(["wwhomepage.utils.popup"], function (popupManager) {
- //require.ready(function(){
- //popupManager.registerPopup('#marketwatchTicker a',{windowName:'popUpWindow',width:'450',height:'557',left:'300',top:'100'});
- //})
- //});
- //]]>
- </script>
-</div>
-
-</div>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- //]]>
- </script>
- </div>
-<div id="sport" class="module">
- <h2 id="sport_title" class="draggable">
- <a id="sport_title_link" href="http://www.bbc.co.uk/sport" title="Go to Sport">
- Sport <span class="raquo">&raquo;</span>
- </a>
- </h2>
-
-
-
- <div id="sport_container" class="container">
- <div class="contentBlocks cbg0"><div id="sport_hero_mini" class="hero contentBlock">
-<div class="title title_first">
-
- <a href="http://news.bbc.co.uk/sport1/hi/golf/13381193.stm" class="heroLink">
- <span>Woods out of Players Championship</span>
- <img src="../static.bbc.co.uk/wwhomepage-3.5/ic/news/144-81/52685000/jpg/_52685275_52685269.jpg" alt="Tiger Woods" width="144" height="81" />
- </a>
-
-
- <p class="summary" id="sport_hero_mini_summary">Tiger Woods pulls out of the Players Championship midway through his first round with a recurrence of his knee and Achilles tendon injuries.</p>
- </div>
-</div>
-<div id="sport_moreTopStories" class="list contentBlock">
-<ul class="blq-clearfix">
-<li class="first highlight"><a href="http://news.bbc.co.uk/sport1/hi/football/13378374.stm">Dalglish given new Liverpool deal</a></li><li><a href="http://news.bbc.co.uk/sport1/hi/tennis/13375453.stm">Federer beaten by Gasquet in Rome</a></li><li><a href="http://news.bbc.co.uk/sport1/hi/football/europe/9484574.stm">Blanc backed in France race row</a></li><li class="last"><a href="http://news.bbc.co.uk/sport1/hi/football/13324684.stm">Nott&#039;m Forest 0-0 Swansea</a></li></ul>
-</div>
-</div>
- </div>
- </div>
-<div id="drawers" class="module">
-
-
- <div id="drawers_container" class="container">
- <div class="contentBlocks cbg0"><h2 id="drawers_entertainment_title">
- <a href="http://news.bbc.co.uk/2/hi/entertainment/">
- Entertainment &amp; Arts <span class="raquo">»</span>
- </a>
-</h2> <div id="drawers_from" class="contentBlock">
- <div id="bbccom_module_0002l" class="bbccom_display_none bbccom_module">
- <div id="moduleAdvertContent_0002l" class="moduleAdvertContent">
- <div id="bbccom_module_0002l_adlabel" class="bbccom_text bbccom_module_adlabel">In Association With</div>
- <div id="bbccom_module_0002l_image" class="bbccom_image bbccom_module_image">
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.write("module_0002l",false, {});
- //]]>
- </script>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.show("module_0002l");
- //]]>
- </script>
- </div>
- </div>
- </div>
-<div id="drawers_entertainment_hero" class="hero contentBlock">
- <h3 id="drawers_entertainment_hero_title" class="draggable">Top Story</h3>
-<div class="title image_first">
-
- <a href="http://www.bbc.co.uk/news/entertainment-arts-13376536" class="heroLink">
- <img src="../static.bbc.co.uk/wwhomepage-3.5/ic/news/296-167/52680000/jpg/_52680897_kevin1_464afp.jpg" alt="We Need to Talk about Kevin director Lynne Ramsay (centre) with actors John C Reilly and Tilda Swinton" width="296" height="167" />
- <span>Cannes gets talking about Kevin</span>
- </a>
-
-
- <p class="summary" id="drawers_entertainment_hero_summary">Critics heap praise on We Need to Talk about Kevin, the only British title in contention for the main award at the Cannes Film Festival.</p>
- </div>
-<div class="list">
- <ul>
- <li class="first">
-<a href="http://www.bbc.co.uk/news/entertainment-arts-13371502">Warhol self-portrait gets $38.4m</a>
-</li><li>
-<a href="http://www.bbc.co.uk/news/entertainment-arts-13360664">Michael announces orchestral tour</a>
-</li> </ul>
-</div>
-</div>
-<div id="drawers_entertainment_list" class="list contentBlock">
-<ul class="blq-clearfix">
-<li class="first"><a href="http://www.bbc.co.uk/newsbeat/13371625">Lindsay Lohan sentenced to jail</a></li><li><a href="http://www.bbc.co.uk/news/entertainment-arts-13374885">Radio listening reaches new high</a></li><li><a href="http://www.bbc.co.uk/news/entertainment-arts-13357481">Eurovision hopefuls whittled down</a></li><li><a href="http://www.bbc.co.uk/news/entertainment-arts-13372529">Classic Brits &#039;may honour shows&#039;</a></li><li><a href="http://www.bbc.co.uk/news/entertainment-arts-13365345">King&#039;s Speech wins trio of awards</a></li><li><a href="http://www.bbc.co.uk/news/entertainment-arts-13157885">Burgess archive yields lost gems</a></li><li class="last"><a href="http://www.bbc.co.uk/news/entertainment-arts-13358270">Warhol&#039;s Jackies scrapes $20m tag</a></li></ul>
-</div>
-</div>
-<div class="contentBlocks cbg1"><h2 id="drawers_health_title">
- <a href="http://news.bbc.co.uk/2/hi/health">
- Health <span class="raquo">»</span>
- </a>
-</h2><div id="drawers_health_hero" class="hero contentBlock">
- <h3 id="drawers_health_hero_title" class="draggable">Top Story</h3>
-<div class="title image_first">
-
- <a href="http://www.bbc.co.uk/news/health-13381292" class="heroLink">
- <img src="../static.bbc.co.uk/wwhomepage-3.5/ic/news/296-167/52689000/gif/_52689818_c0078971-hiv_capsid_release.gif" alt="HIV virus" width="296" height="167" />
- <span>Drugs &#039;reduce&#039; HIV transmission</span>
- </a>
-
-
- <p class="summary" id="drawers_health_hero_summary">An HIV positive person can reduce the risk of spreading the virus to uninfected partners by 96% if they are given anti-retroviral drugs immediately, according to US scientists.</p>
- </div>
-<div class="list">
- <ul>
- <li class="first">
-<a href="http://www.bbc.co.uk/news/health-13362927">Monkey HIV vaccine &#039;effective&#039;</a>
-</li><li>
-<a href="http://www.bbc.co.uk/news/health-13345860">Selenium &#039;does not stop cancer&#039;</a>
-</li> </ul>
-</div>
-</div>
-<div id="drawers_health_list" class="list contentBlock">
-<ul class="blq-clearfix">
-<li class="first"><a href="http://www.bbc.co.uk/news/uk-13354294">Rise in number of &#039;legal highs&#039;</a></li><li><a href="http://www.bbc.co.uk/news/health-13352074">Coffee a breast cancer preventer?</a></li><li><a href="http://www.bbc.co.uk/news/health-13359281">Dengue fever cases &#039;double in UK&#039;</a></li><li><a href="http://www.bbc.co.uk/news/health-13351681">Children &#039;recall early memories&#039;</a></li><li><a href="http://www.bbc.co.uk/news/health-13336986">Breastfeeding &#039;helps behaviour&#039;</a></li><li><a href="http://www.bbc.co.uk/news/health-13295300">Gay men &#039;have higher cancer rate&#039;</a></li><li class="last"><a href="http://www.bbc.co.uk/news/world-us-canada-13339512">Face transplant man &#039;can smell&#039;</a></li></ul>
-</div>
-</div>
-<div class="contentBlocks cbg2"><h2 id="drawers_technology_title">
- <a href="http://www.bbc.co.uk/news/technology/">
- Technology <span class="raquo">»</span>
- </a>
-</h2><div id="drawers_technology_hero" class="hero contentBlock">
- <h3 id="drawers_technology_hero_title" class="draggable">Top Story</h3>
-<div class="title image_first">
-
- <a href="http://www.bbc.co.uk/news/technology-13374048" class="heroLink">
- <img src="../static.bbc.co.uk/wwhomepage-3.5/ic/news/296-167/52688000/jpg/_52688304_facebookimage.jpg" alt="Man's face next to Facebook logo" width="296" height="167" />
- <span>Facebook exposed in Google smear</span>
- </a>
-
-
- <p class="summary" id="drawers_technology_hero_summary">Facebook admits hiring a PR agency to tout anti-Google stories related to user privacy in the US press.</p>
- </div>
-<div class="list">
- <ul>
- <li class="first">
-<a href="http://www.bbc.co.uk/news/technology-13372839">Wiki boss criticises injunctions</a>
-</li><li>
-<a href="http://www.bbc.co.uk/news/technology-13372982">Twitpic triggers copyright clash</a>
-</li> </ul>
-</div>
-</div>
-<div id="drawers_technology_list" class="list contentBlock">
-<ul class="blq-clearfix">
-<li class="first"><a href="http://www.bbc.co.uk/newsbeat/13380753">Digital music sales &#039;pass &pound;1bn&#039;</a></li><li><a href="http://www.bbc.co.uk/news/technology-13362111">Google unveils Chrome laptops</a></li><li><a href="http://www.bbc.co.uk/news/technology-13358293">Facebook profile access &#039;leaked&#039;</a></li><li><a href="http://www.bbc.co.uk/news/technology-13358896">ACS:Law fined over data breach</a></li><li><a href="http://www.bbc.co.uk/news/technology-13362765">Apple, Google in privacy hearing</a></li><li><a href="http://www.bbc.co.uk/news/technology-13345309">100,000 net champions recruited</a></li><li class="last"><a href="http://www.bbc.co.uk/news/technology-13350345">Google Music stumbles at launch</a></li></ul>
-</div>
-</div>
-<div class="contentBlocks cbg3"><h2 id="drawers_science_environment_title">
- <a href="http://www.bbc.co.uk/news/science_and_environment/">
- Science &amp; Environment <span class="raquo">»</span>
- </a>
-</h2><div id="drawers_science_environment_hero" class="hero contentBlock">
- <h3 id="drawers_science_environment_hero_title" class="draggable">Top Story</h3>
-<div class="title image_first">
-
- <a href="http://www.bbc.co.uk/news/science-environment-13378864" class="heroLink">
- <img src="../static.bbc.co.uk/wwhomepage-3.5/ic/news/296-167/52682000/jpg/_52682670_jup6.jpg" alt="Io pictured by Galileo" width="296" height="167" />
- <span>Jupiter moon &#039;holds magma ocean&#039;</span>
- </a>
-
-
- <p class="summary" id="drawers_science_environment_hero_summary">Data from the Galileo probe suggest Jupiter&#039;s moon Io may hold a giant magma ocean beneath its crust that is at least 50km thick.</p>
- </div>
-<div class="list">
- <ul>
- <li class="first">
-<a href="http://www.bbc.co.uk/news/science-environment-13376416">Resource demand threatens future</a>
-</li><li>
-<a href="http://www.bbc.co.uk/news/world-asia-pacific-13374153">Setbacks at Japan nuclear plant</a>
-</li> </ul>
-</div>
-</div>
-<div id="drawers_science_environment_list" class="list contentBlock">
-<ul class="blq-clearfix">
-<li class="first"><a href="http://www.bbc.co.uk/news/science-environment-13361822">Private route to fisheries reform</a></li><li><a href="http://news.bbc.co.uk/1/hi/programmes/newsnight/9483790.stm">Wikileaks shows Arctic &#039;carve up&#039;</a></li><li><a href="http://news.bbc.co.uk/earth/hi/earth_news/newsid_9483000/9483108.stm">Drug ban helps vulture recovery</a></li><li><a href="http://www.bbc.co.uk/news/health-13362927">Monkey HIV vaccine &#039;effective&#039;</a></li><li><a href="http://www.bbc.co.uk/news/science-environment-13352166">UK leads space disaster charter</a></li><li><a href="http://www.bbc.co.uk/news/world-latin-america-13373293">Brazil forest law vote postponed</a></li><li class="last"><a href="http://www.bbc.co.uk/news/world-europe-13378567">Arctic states meet in Greenland</a></li></ul>
-</div>
-</div>
- </div>
- <script type="text/javascript">
- //<![CDATA[
-
- //require(["wwhomepage.ui.drawers"], function (drawers) {
- //require.ready(function(){
- //drawers = new drawers;drawers.renderDrawers('#drawers',{"drawerWidth":"480","drawerHandleWidth":"48","drawerTargetElements":"#drawers .contentBlocks"});
- //});
- //});
- //]]>
- </script>
- </div>
-<div id="travel" class="module">
- <h2 id="travel_title" class="draggable">
- <a id="travel_title_link" href="http://www.bbc.com/travel" title="Go to Travel">
- Travel <span class="raquo">&raquo;</span>
- </a>
- </h2>
- <div id="travel_from" class="contentBlock">
- <div id="bbccom_module_0001f" class="bbccom_display_none bbccom_module">
- <div id="moduleAdvertContent_0001f" class="moduleAdvertContent">
- <div id="bbccom_module_0001f_adlabel" class="bbccom_text bbccom_module_adlabel">In Association With</div>
- <div id="bbccom_module_0001f_image" class="bbccom_image bbccom_module_image">
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.write("module_0001f",false, {});
- //]]>
- </script>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.show("module_0001f");
- //]]>
- </script>
- </div>
- </div>
- </div>
-
-
-
- <div id="travel_container" class="container">
- <div class="contentBlocks cbg0"><div id="travel_carousel" class="carousel list contentBlock">
-<div id="travel_carousel_items" class="carousel">
-
-
- <dl>
- <dt><a href="http://www.bbc.com/travel/feature/20110503-mini-guide-to-san-francisco-usa">Mini guide to San Francisco</a></dt>
- <dd class="desc">Even if San Francisco&rsquo;s streets are not paved with gold, they are splashed with rainbow-coloured murals and the skies over North Beach are ruled by trash-talking parrots.</dd>
- <dd class="img"><a href="http://www.bbc.com/travel/feature/20110503-mini-guide-to-san-francisco-usa"><img src="../static.bbc.co.uk/wwtravel/img/ic/432-258/130322419618027417917_1.jpg" alt="" width="432" height="259" /></a></dd>
- </dl>
-
-
-
-
- <dl>
- <dt><a href="http://www.bbc.com/travel/feature/20110509-round-the-world-with-kids-what-to-know-before-you-go">Going around the world with kids</a></dt>
- <dd class="desc">How to prepare if you are thinking of packing up and travelling with kids for a couple of years.</dd>
- <dd class="img"><a href="http://www.bbc.com/travel/feature/20110509-round-the-world-with-kids-what-to-know-before-you-go"><img src="../static.bbc.co.uk/wwtravel/img/ic/432-258/130469404411032117144_1.jpg" alt="" width="432" height="259" /></a></dd>
- </dl>
-
-
-
-
- <dl>
- <dt><a href="http://www.bbc.com/travel/blog/20110505-a-vintage-car-rally-through-italy">A vintage car rally through Italy</a></dt>
- <dd class="desc">The Mille Miglia sees classic cars set off from Brescia on 12 May, then thunder through tranquil Italian villages down to Rome.</dd>
- <dd class="img"><a href="http://www.bbc.com/travel/blog/20110505-a-vintage-car-rally-through-italy"><img src="../static.bbc.co.uk/wwtravel/img/ic/432-258/13047013101103615422_1.jpg" alt="" width="432" height="259" /></a></dd>
- </dl>
-
-
-
-
- <dl>
- <dt><a href="http://www.bbc.com/travel/feature/20110428-barcelona-more-than-meets-the-eye">Barcelona: More than meets the eye</a></dt>
- <dd class="desc">Garish, gregarious Barcelona doesn&rsquo;t guard its secrets jealously, but the Catalan capital does seem to get a kick from hiding the best of them in plain sight.</dd>
- <dd class="img"><a href="http://www.bbc.com/travel/feature/20110428-barcelona-more-than-meets-the-eye"><img src="../static.bbc.co.uk/wwtravel/img/ic/432-258/130512483118258745267_1.jpg" alt="" width="432" height="259" /></a></dd>
- </dl>
-
-
-
-
- <dl>
- <dt><a href="http://www.bbc.com/travel/feature/20110506-the-rebirth-of-detroit-amid-modern-day-ruins">The rebirth of Detroit</a></dt>
- <dd class="desc">As the city continues the fight of its life, artists and visionaries are breathing life into tragic symbols of abandonment and decay.</dd>
- <dd class="img"><a href="http://www.bbc.com/travel/feature/20110506-the-rebirth-of-detroit-amid-modern-day-ruins"><img src="../static.bbc.co.uk/wwtravel/img/ic/432-258/130462610113876663213_1.jpg" alt="" width="432" height="259" /></a></dd>
- </dl>
-
-
-
-
- <dl>
- <dt><a href="http://www.bbc.com/travel/feature/20110505-revisiting-mexico-citys-forgotten-cantinas">Mexico City&rsquo;s forgotten cantinas</a></dt>
- <dd class="desc">A cleaner and safer city centre has given 100-year-old bars a new lease on life.</dd>
- <dd class="img"><a href="http://www.bbc.com/travel/feature/20110505-revisiting-mexico-citys-forgotten-cantinas"><img src="../static.bbc.co.uk/wwtravel/img/ic/432-258/130394144725024437107_1.jpg" alt="" width="432" height="259" /></a></dd>
- </dl>
-
-
-
-
- <dl>
- <dt><a href="http://www.bbc.com/travel/feature/20110504-equal-rights-for-air-travellers-outside-the-us-and-eu">Unequal travellers&rsquo; rights</a></dt>
- <dd class="desc">New passenger rights and regulations in the EU and US better protect airline passengers, but governmental intervention is less consistent elsewhere.</dd>
- <dd class="img"><a href="http://www.bbc.com/travel/feature/20110504-equal-rights-for-air-travellers-outside-the-us-and-eu"><img src="../static.bbc.co.uk/wwtravel/img/ic/432-258/130453393821600812350_1.jpg" alt="" width="432" height="259" /></a></dd>
- </dl>
-
-
-
-
-
-</div>
-<script type="text/javascript">
-//<![CDATA[
-var carousel = document.getElementById("travel_carousel_items");
-carousel.style.height = '259px';
-carousel.style.overflow = 'hidden';
-
-//require(["wwhomepage.ui.carouselfactory"], function (carousel) {
- //require.ready(function(){
- //carousel.createCarousel('#travel_carousel_items',{auto:false, speed:800, api:true, controls:'previewNav',item:'dl',dispTimeout:5000});
- //})
-//});
-//]]>
-</script>
-
-
-
-
-
-
-</div>
-
-
-
-
-</div>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- //]]>
- </script>
- </div>
-<div id="worldService" class="module">
- <h2 id="worldService_title" class="draggable">
- <a id="worldService_title_link" href="http://www.bbc.co.uk/worldservice/" title="Go to World Service">
- World Service <span class="raquo">&raquo;</span>
- </a>
- </h2>
-
-
-
- <div id="worldService_container" class="container">
- <div class="contentBlocks cbg0"><div id="worldService_spanish" class="hero contentBlock">
- <h3 id="worldService_spanish_title" class="draggable"><a href="http://www.bbc.co.uk/mundo/">Spanish</a>
- </h3>
-<div class="title image_first">
-
- <a href="http://www.bbc.co.uk/mundo/noticias/2011/05/110512_terremoto_espana_lorca_inmigrantes_rg.shtml" class="heroLink">
- <span>Tras el terremoto, centenares de inmigrantes deambulan por Lorca</span>
- </a>
-
-
- <p class="summary" id="worldService_spanish_summary">Cerca del 20% de la poblaci&oacute;n son inmigrantes, la mayor&iacute;a de ellos ecuatorianos que han llegado en los &uacute;ltimos diez a&ntilde;os. Muchos de ellos duermen en la calle tras la tragedia del mi&eacute;rcoles.</p>
- </div>
-</div>
-<div id="worldService_chinese" class="hero contentBlock">
- <h3 id="worldService_chinese_title" class="draggable"><a href="http://www.bbc.co.uk/zhongwen/simp/">Chinese</a>
- </h3>
-<div class="title image_first">
-
- <a href="http://www.bbc.co.uk/zhongwen/simp/science/2011/05/110512_hiv_infection.shtml" class="heroLink">
- <span>研究:艾滋病毒传播风险可减96%</span>
- </a>
-
-
- <p class="summary" id="worldService_chinese_summary">一项突破性研究发现,身体健康的艾滋病毒携带者在发现感染后立即服用抗逆转录病毒药,可将病毒传染给不带病毒的性伙伴的风险减少96%</p>
- </div>
-</div>
-<div id="worldService_arabic" class="hero contentBlock">
- <h3 id="worldService_arabic_title" class="draggable"><a href="http://www.bbc.co.uk/arabic/">Arabic</a>
- </h3>
-<div class="title image_first" dir="rtl">
-
- <a href="http://www.bbc.co.uk/arabic/middleeast/2011/05/110512_libya_clashes_misrata.shtml" class="heroLink">
- <span>زعيم معارض: القذافي &quot;هدف مشروع&quot; لغارات الناتو</span>
- </a>
-
-
- <p class="summary" id="worldService_arabic_summary">رئيس المجلس الوطني الانتقالي الليبي المعارض يقول ان القذافي &quot;هدف مشروع&quot; لغارات حلف الناتو، الذي يواصل شن ضرباته على اهداف تابعة للحكومة الليبية.</p>
- </div>
-</div>
-<div id="worldService_persian" class="hero contentBlock">
- <h3 id="worldService_persian_title" class="draggable"><a href="http://www.bbc.co.uk/persian/">Persian</a>
- </h3>
-<div class="title image_first" dir="rtl">
-
- <a href="http://www.bbc.co.uk/persian/iran/2011/05/110512_l30_iranian_prison_mother_reaxtion.shtml" class="heroLink">
- <span>نویسنده نامه زندان رجایی شهر به انفرادی افتاد</span>
- </a>
-
-
- <p class="summary" id="worldService_persian_summary">مهدی محمودیان روزنامه&zwnj;نگاری که نامه&zwnj;ای در باره وقوع تجاورز جنسی در زندان رجایی شهر کرج منتشر کرده بود، به سلول انفرادی منتقل شده است.</p>
- </div>
-</div>
-</div>
-<div class="contentBlocks cbg1"><div id="worldService_languages" class="list contentBlock fourColumn">
- <h3 id="worldService_languages_title" class="draggable"><a href="http://www.bbc.co.uk/worldservice/languages">More languages</a>
- </h3>
-<ul class="blq-clearfix">
-<li class="first"><a href="http://www.bbc.co.uk/bengali/" xml:lang="bn" class="bn" title="Bangla service">Bangla</a></li><li><a href="http://www.bbc.co.uk/burmese/" xml:lang="my" class="my" title="Burmese service">Burmese</a></li><li><a href="http://www.bbc.co.uk/afrique/" xml:lang="fr" class="fr" title="French (for Africa) service">French</a></li><li><a href="http://www.bbc.co.uk/hausa/" xml:lang="ha" class="ha" title="Hausa service">Hausa</a></li><li><a href="http://www.bbc.co.uk/hindi/" xml:lang="hi" class="hi" title="Hindi service">Hindi</a></li><li><a href="http://www.bbc.co.uk/indonesia/" xml:lang="id" class="id" title="Indonesian service">Indonesian</a></li><li><a href="http://www.bbc.co.uk/gahuza/" xml:lang="rw" class="rw" title="Kinyarwanda service">Kinyarwanda</a></li><li><a href="http://www.bbc.co.uk/gahuza/" xml:lang="rn" class="rn" title="Kirundi service">Kirundi</a></li><li><a href="http://www.bbc.co.uk/kyrgyz/" xml:lang="ky" class="ky" title="Kyrgyz service">Kyrgyz</a></li><li><a href="http://www.bbc.co.uk/nepali/" xml:lang="ne" class="ne" title="Nepali service">Nepali</a></li><li><a href="http://www.bbc.co.uk/pashto/" xml:lang="ps" class="ps" title="Pashto service">Pashto</a></li><li><a href="http://www.bbc.co.uk/portuguese/" xml:lang="pt-BR" class="pt-BR" title="Portuguese (for Brazil) service">Portuguese</a></li><li><a href="http://www.bbc.co.uk/russian/" xml:lang="ru" class="ru" title="Russian service">Russian</a></li><li><a href="http://www.bbc.co.uk/sinhala/" xml:lang="si" class="si" title="Sinhala service">Sinhala</a></li><li><a href="http://www.bbc.co.uk/somali/" xml:lang="so" class="so" title="Somali service">Somali</a></li><li><a href="http://www.bbc.co.uk/swahili/" xml:lang="sw" class="sw" title="Swahili service">Swahili</a></li><li><a href="http://www.bbc.co.uk/tamil/" xml:lang="ta" class="ta" title="Tamil service">Tamil</a></li><li><a href="http://www.bbc.co.uk/turkce/" xml:lang="tr" class="tr" title="Turkish service">Turkish</a></li><li><a href="http://www.bbc.co.uk/ukrainian/" xml:lang="uk" class="uk" title="Ukrainian service">Ukrainian</a></li><li><a href="http://www.bbc.co.uk/urdu/" xml:lang="ur" class="ur" title="Urdu service">Urdu</a></li><li><a href="http://www.bbc.co.uk/uzbek/" xml:lang="uz" class="uz" title="Uzbek service">Uzbek</a></li><li><a href="http://www.bbc.co.uk/vietnamese/" xml:lang="vi" class="vi" title="Vietnamese service">Vietnamese</a></li></ul>
-</div>
-</div>
- </div>
- </div>
-</div><div class="colC column">
-<div id="adMpu" class="module">
-
-
- <div id="adMpu_container" class="container">
- <div class="contentBlocks cbg0"> <div class="advert">
- <div id="bbccom_mpu" class="bbccom_display_none">
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.write("mpu",true);
- //]]>
- </script>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.show("mpu");
- //]]>
- </script>
- </div>
-</div>
- </div>
- </div>
-<div id="spotlight" class="module">
- <h2 id="spotlight_title" class="draggable">
- Spotlight </h2>
-
-
-
- <div id="spotlight_container" class="container">
- <div class="contentBlocks cbg0"><div id="spotlight_sponsorAd" class="contentBlock">
-<h3 id="spotlight_sponsorAd_title" class="draggable"><a href="http://www.bbc.co.uk/news/video_and_audio/" title="Go to Video and Audio">Video and Audio</a></h3>
- <div id="bbccom_module_0000i" class="bbccom_display_none bbccom_module">
- <div id="moduleAdvertContent_0000i" class="moduleAdvertContent">
- <div id="bbccom_module_0000i_adlabel" class="bbccom_text bbccom_module_adlabel">In Association With</div>
- <div id="bbccom_module_0000i_image" class="bbccom_image bbccom_module_image">
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.write("module_0000i",false);
- //]]>
- </script>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.show("module_0000i");
- //]]>
- </script>
- </div>
- </div>
-</div>
-<div id="spotlight_hero" class="hero contentBlock">
- <h3 id="spotlight_hero_title" class="draggable">Top Story</h3>
-<div class="title image_first">
-
- <a href="http://www.bbc.co.uk/news/uk-13370546" class="heroLink">
- <img src="../static.bbc.co.uk/wwhomepage-3.5/ic/spotlight-borg/336-189/spotlight_thequeen_120511.jpg" alt="Her Majesty The Queen" width="336" height="189" />
- <span>The Queen passes a royal milestone</span>
- </a>
-
-
- <p class="summary" id="spotlight_hero_summary">Queen Elizabeth II has become the second longest-serving monarch in British history, after 57 years and 97 days on the throne.</p>
- </div>
-</div>
-<div id="spotlight_list" class="list contentBlock">
-<ul class="blq-clearfix">
-<li class="first"><a href="http://www.bbc.co.uk/news/world-europe-13368657">Spanish earthquake aftermath</a></li><li><a href="http://www.bbc.co.uk/news/entertainment-arts-13359824">Josie Foster stands by Mel Gibson</a></li><li class="last"><a href="http://www.bbc.co.uk/news/video_and_audio/">One-minute World News</a></li></ul>
-</div>
-</div>
- </div>
- </div>
-<div id="mostPopular" class="module">
- <h2 id="mostPopular_title" class="draggable">
- Most Popular in News </h2>
-
-
-
- <div id="mostPopular_container" class="container">
- <div class="contentBlocks cbg0"><div id="mostPopular_tabset" class="livestats livestats-tabbed contentBlock topTabs">
-
- <h3 class="first tab">
- <a id="mostPopular_tabset_shared_link" href="#">Shared</a>
- </h3>
-
-
- <div id="mostPopular_tabset_shared" class="panel">
-
-
- <ul class="blq-clearfix">
-
-
- <li class="ol0 first-child">
- <a href="http://www.bbc.co.uk/news/technology-13374048"><span class="livestats-icon livestats-1">1: </span>Facebook exposed in Google smear</a> </li>
-
-
- <li class="ol1">
- <a href="http://www.bbc.co.uk/news/world-europe-12321549"><span class="livestats-icon livestats-2">2: </span>Demjanjuk guilty of Nazi murders</a> </li>
-
-
- <li class="ol2">
- <a href="http://www.bbc.co.uk/news/world-europe-13371472"><span class="livestats-icon livestats-3">3: </span>Spain shocked by fatal earthquake</a> </li>
-
-
- <li class="ol3">
- <a href="http://www.bbc.co.uk/news/health-13362927"><span class="livestats-icon livestats-4">4: </span>Monkey HIV vaccine &#039;effective&#039;</a> </li>
-
-
- <li class="ol4">
- <a href="http://www.bbc.co.uk/news/world-europe-13368599"><span class="livestats-icon livestats-5">5: </span>Deadly quake rocks southern Spain</a> </li>
-
-
- </ul>
-
-
- </div>
-
-
- <h3 class="tab">
- <a id="mostPopular_tabset_read_link" href="#">Read</a>
- </h3>
-
-
- <div id="mostPopular_tabset_read" class="panel">
-
-
- <ul class="blq-clearfix">
-
-
- <li class="ol0 first-child">
- <a href="http://www.bbc.co.uk/news/world-us-canada-13379298"><span class="livestats-icon livestats-1">1: </span>Fears for Bin Laden US Navy Seals</a> </li>
-
-
- <li class="ol1">
- <a href="http://www.bbc.co.uk/news/technology-13374048"><span class="livestats-icon livestats-2">2: </span>Facebook exposed in Google smear</a> </li>
-
-
- <li class="ol2">
- <a href="http://www.bbc.co.uk/news/health-13381292"><span class="livestats-icon livestats-3">3: </span>Drugs &#039;reduce&#039; HIV transmission</a> </li>
-
-
- <li class="ol3">
- <a href="http://www.bbc.co.uk/news/world-europe-12321549"><span class="livestats-icon livestats-4">4: </span>Demjanjuk guilty of Nazi murders</a> </li>
-
-
- <li class="ol4">
- <a href="http://www.bbc.co.uk/news/world-13375779"><span class="livestats-icon livestats-5">5: </span>Day in pictures</a> </li>
-
-
- </ul>
-
-
- </div>
-
-
- <h3 class="selected tab">
- <a id="mostPopular_tabset_watched_link" href="#">Watched/Listened</a>
- </h3>
-
-
- <div id="mostPopular_tabset_watched" class="selected panel">
-
-
- <ul class="blq-clearfix">
-
-
- <li class="ol0 first-child">
- <a href="http://www.bbc.co.uk/news/entertainment-arts-13359824"><span class="livestats-icon livestats-1">1: </span>Jodie Foster stands by Mel Gibson</a> </li>
-
-
- <li class="ol1">
- <a href="http://www.bbc.co.uk/news/uk-13370546"><span class="livestats-icon livestats-2">2: </span>Queen Elizabeth II passes milestone</a> </li>
-
-
- <li class="ol2">
- <a href="http://www.bbc.co.uk/news/world-europe-13381243"><span class="livestats-icon livestats-3">3: </span>Footage shows Mount Etna eruption</a> </li>
-
-
- <li class="ol3">
- <a href="http://www.bbc.co.uk/news/world-europe-13381409"><span class="livestats-icon livestats-4">4: </span>Sobibor relative on Nazi verdict</a> </li>
-
-
- <li class="ol4">
- <a href="http://www.bbc.co.uk/news/uk-england-beds-bucks-herts-13362700"><span class="livestats-icon livestats-5">5: </span>Schoolboy wears a skirt to school</a> </li>
-
-
- </ul>
-
-
- </div>
-
- <script type="text/javascript">
- //<![CDATA[
- //require(["wwhomepage.ui.tabs"], function (tabs) {
- //require.ready(function(){
- //tabs.registerTabs('#mostPopular_tabset');
- //})
- //});
- //]]>
- </script>
-</div>
-
-
- <div id="mostPopular_from" class="contentBlock">
- <div id="bbccom_module_00029" class="bbccom_display_none bbccom_module">
- <div id="moduleAdvertContent_00029" class="moduleAdvertContent">
- <div id="bbccom_module_00029_adlabel" class="bbccom_text bbccom_module_adlabel">Functionality In Association With</div>
- <div id="bbccom_module_00029_image" class="bbccom_image bbccom_module_image">
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.write("module_00029",false, {});
- //]]>
- </script>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.show("module_00029");
- //]]>
- </script>
- </div>
- </div>
- </div>
-</div>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- //]]>
- </script>
- </div>
-<div id="weather" class="module">
- <h2 id="weather_title" class="draggable">
- <a id="weather_title_link" href="http://news.bbc.co.uk/weather/" title="Go to Weather">
- Weather <span class="raquo">&raquo;</span>
- </a>
- </h2>
- <div id="weather_from" class="contentBlock">
- <div id="bbccom_module_0001k" class="bbccom_display_none bbccom_module">
- <div id="moduleAdvertContent_0001k" class="moduleAdvertContent">
- <div id="bbccom_module_0001k_adlabel" class="bbccom_text bbccom_module_adlabel">In Association With</div>
- <div id="bbccom_module_0001k_image" class="bbccom_image bbccom_module_image">
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.write("module_0001k",false, {});
- //]]>
- </script>
- </div>
- <script type="text/javascript">
- //<![CDATA[
- BBC.adverts.show("module_0001k");
- //]]>
- </script>
- </div>
- </div>
- </div>
-
-
-
- <div id="weather_container" class="container">
- <div class="contentBlocks cbg0"><div id="weather_forecast" class="weatherforecast contentBlock">
- <h3>London</h3>
-
-<div id="weather_searchPanel" class="searchPanel editPanel contentBlock">
- <form id="weather_searchPanel_form" method="get" action="http://www.bbc.co.uk/customise">
- <fieldset>
-
- <input type="hidden" name="queue" value="searchPanel%7C0001k%7C0001q%7C0001m" />
- <input type="hidden" name="action" value="searchPanel" />
- <input type="hidden" name="module" value="0001k" />
- <input type="hidden" name="contentBlockGroup" value="0001q" />
- <input type="hidden" name="contentBlock" value="0001m" />
-
- <div class="field">
- <label>Enter city, town or region</label>
- <input type="text" size="25" maxlength="40" value="Enter city, town or region" class="searchPanelSearch" name="value" />
- <button type="submit" class="submit">Search</button>
- </div>
-
- </fieldset>
- </form>
- <div id="searchResults" class="results">
- <p>&nbsp;</p>
-
- <ul><li>&nbsp;</li></ul>
- </div>
-</div>
- <dl class="dayForecast">
- <dt>
- Friday </dt>
- <dd>
- <a href="http://news.bbc.co.uk/weather/forecast/8?area=London">
- <img width="62" height="62" alt="Sunny Intervals" src="../newsimg.bbc.co.uk/weather/img/symbols/57x57/3.gif" class="icon" />
- </a>
- <p class="forecast">Sunny Intervals</p>
- <p class="temp max">
- Max<span class="hide"> Temperature</span>:
- <span> 18&#176;C</span>
- <span class="hide">64&#176;F</span>
- </p>
- <p class="temp min">
- Min<span class="hide"> Temperature</span>:
- <span> 10&#176;C</span>
- <span class="hide">50&#176;F</span>
- </p>
- </dd>
- <dt>
- Saturday </dt>
- <dd>
- <a href="http://news.bbc.co.uk/weather/forecast/8?area=London">
- <img width="62" height="62" alt="Sunny Intervals" src="../newsimg.bbc.co.uk/weather/img/symbols/57x57/3.gif" class="icon" />
- </a>
- <p class="forecast">Sunny Intervals</p>
- <p class="temp max">
- Max<span class="hide"> Temperature</span>:
- <span> 16&#176;C</span>
- <span class="hide">61&#176;F</span>
- </p>
- <p class="temp min">
- Min<span class="hide"> Temperature</span>:
- <span> 8&#176;C</span>
- <span class="hide">46&#176;F</span>
- </p>
- </dd>
- <dt>
- Sunday </dt>
- <dd>
- <a href="http://news.bbc.co.uk/weather/forecast/8?area=London#Next4Days">
- <img width="62" height="62" alt="White Cloud" src="../newsimg.bbc.co.uk/weather/img/symbols/57x57/7.gif" class="icon" />
- </a>
- <p class="forecast">White Cloud</p>
- <p class="temp max">
- Max<span class="hide"> Temperature</span>:
- <span> 13&#176;C</span>
- <span class="hide">55&#176;F</span>
- </p>
- <p class="temp min">
- Min<span class="hide"> Temperature</span>:
- <span> 10&#176;C</span>
- <span class="hide">50&#176;F</span>
- </p>
- </dd>
- <dt class="last">
- Monday </dt>
- <dd class="last">
- <a href="http://news.bbc.co.uk/weather/forecast/8?area=London">
- <img width="62" height="62" alt="White Cloud" src="../newsimg.bbc.co.uk/weather/img/symbols/57x57/7.gif" class="icon" />
- </a>
- <p class="forecast">White Cloud</p>
- <p class="temp max">
- Max<span class="hide"> Temperature</span>:
- <span> 15&#176;C</span>
- <span class="hide">59&#176;F</span>
- </p>
- <p class="temp min">
- Min<span class="hide"> Temperature</span>:
- <span> 11&#176;C</span>
- <span class="hide">52&#176;F</span>
- </p>
- </dd>
-
- </dl>
-
- <p class="detail"><a href="http://news.bbc.co.uk/weather/forecast/8?area=London" title="Detailed forecast for London">Detailed forecast</a></p>
-</div>
-</div>
- </div>
- <script type="text/javascript">
- //<![CDATA[
-
- //require(["wwhomepage.ui.weather"], function (weather) {
- //require.ready(function(){
- //weather = new weather;weather.renderWeather('#weather',{"autocomplete":{"enabled":false,"minLength":4}});
- //});
- //});
- //]]>
- </script>
- </div>
-</div><div id="exploreTray">
- <h2>Explore the BBC</h2>
- <div class="directory">
- <div class="directoryColumn hpDir1">
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/head/t/-/news/1/hi/default.stm">News</a></h3>
- <h4 class="offscreen">World Regions</h4>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/africa/t/-/news/2/hi/africa/default.stm">Africa</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/asiapacific/t/-/news/2/hi/asia-pacific/default.stm">Asia-Pacific</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/europe/t/-/news/2/hi/europe/default.stm">Europe</a></li>
- <li><a href="http://www.bbc.co.uk/news/world/latin_america/">Latin America</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/middleeast/t/-/news/2/hi/middle_east/default.stm">Middle East</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/southasia/t/-/news/2/hi/south_asia/default.stm">South Asia</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/uknews/t/-/news/2/hi/uk_news/default.stm">UK</a></li>
- <li><a href="http://www.bbc.co.uk/news/world/us_and_canada/">US &amp; Canada</a></li>
- </ul>
- <h4 class="offscreen">Types of news</h4>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/business/t/-/news/2/hi/business/default.stm">Business</a></li>
- <li><a href="http://news.bbc.co.uk/go/homepage/i/int/br/ne/health/t/-/2/hi/health/default.stm">Health</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/sciencenature/t/-/news/2/hi/science/nature/default.stm">Science &amp; Environment</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/technology/t/-/news/2/hi/technology/default.stm">Technology</a></li>
- <li><a href="http://www.bbc.co.uk/news/entertainment_and_arts/">Entertainment &amp; Arts</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/alsointhenews/t/-/news/2/hi/also_in_the_news/">Also in the News</a></li>
- </ul>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ne/languages/t/-/worldservice/languages/">BBC in your language</a></li>
- </ul>
- </div>
- <div class="directoryColumn hpDir2">
-
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sp/head/t/-/news/sport2/hi/default.stm">Sport</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sp/football/t/-/news/sport2/hi/football/default.stm">Football</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sp/cricket/t/-/news/sport2/hi/cricket/default.stm">Cricket</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sp/tennis/t/-/news/sport2/hi/tennis/default.stm">Tennis</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sp/motorsport/t/-/news/sport2/hi/motorsport/default.stm">Motorsport</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/bus/head/t/-/news/2/hi/business/default.stm">Business</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/bus/markets/t/-/news/2/shared/fds/hi/business/market_data/overview/default.stm">Market Data</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/bus/economy/t/-/news/2/hi/business/economy/default.stm">Economy</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/bus/companies/t/-/news/2/hi/business/companies/default.stm">Companies</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/wea/head/t/-/news/weather">Weather</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/wea/news/t/-/news/weather/hi/news">Weather News</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/wea/head/t/-/http://www.bbc.com/travel">Travel</a></h3>
- </div>
- <div class="directoryColumn hpDir3">
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ent/head/t/-/entertainment/">Entertainment</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ent/head/t/-/news/2/hi/entertainment/default.stm">Entertainment News</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ent/comedy/t/-/comedy/">Comedy</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/ent/drama/t/-/drama/">Drama</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/mus/head/t/-/music/">Music</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/mus/genres/t/-/music/genres/">Genres</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/mus/reviews/t/-/music/reviews">Reviews</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/mus/artists/t/-/music/artists/">Artists</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/mus/news/t/-/music/news/">News</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/art/head/t/-/arts/">Arts &amp; Culture</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/art/film/t/-/film/">Film</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/food/head/t/-/food/">Food</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/food/techniques">Techniques</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/food/recipes/t/-/food/recipes/">Recipes</a></li>
- </ul>
- </div>
- <div class="directoryColumn hpDir4">
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sci/headscience/t/-/sn/">Science</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sci/human/t/-/science/humanbody/">Humans</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sci/space/t/-/space/">Space</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sci/headnature/t/-/nature/">Nature</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/sci/animals/t/-/nature/animals/">Animals</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/gard/head/t/-/gardening/">Gardening</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/gard/plantfind/t/-/gardening/plants/plant_finder/">Plant finder</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/gard/advice/t/-/gardening/advice/">Advice</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/rel/headreligion/t/-/religion/">Religion</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/rel/calendar/t/-/religion/tools/calendar/">Multifaith Calendar</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/rel/quiz/t/-/religion/tools/quizzes">Quizzes</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/rel/headethics/t/-/ethics/">Ethics</a></h3>
- </div>
- <div class="directoryColumn hpDir5">
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/learningenglish/t/-/worldservice/learningenglish/">Learning English</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/le/learningenglish/t/-/worldservice/learningenglish/">Online courses</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/le/grammar/t/-/worldservice/learningenglish/grammar/index.shtml">Vocabulary &amp; Grammar</a></li>
- <li class="clearLeft nosep"><a href="http://www.bbc.co.uk/go/homepage/i/int/br/le/quizzes/t/-/worldservice/learningenglish/quizzes/index.shtml">Quizzes</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/le/languages/t/-/languages/">Learning Other Languages</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/languages/french/t/-/languages/french/">French</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/languages/spanish/t/-/languages/spanish/">Spanish</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/languages/german/t/-/languages/german/">German</a></li>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/languages/italian/t/-/languages/italian/">Italian</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/tv/tv1/t/-/tv/i/">TV Channels</a></h3>
- <ul>
- <li><a href="http://www.bbcworldnews.com/">BBC World News</a></li>
- <li><a href="http://www.bbcamerica.com/">BBC America</a></li>
- </ul>
- <h3><a href="http://www.bbc.co.uk/go/homepage/i/int/br/rad/head/t/-/radio/">Radio</a></h3>
- <ul>
- <li><a href="http://www.bbc.co.uk/go/homepage/i/int/br/rad/ws/t/-/worldservice/">World Service</a></li>
- </ul>
- <p class="aToz">Can't find it? Try the <a href="http://www.bbc.co.uk/go/homepage/i/int/br/atoz/-/a-z/">A to Z</a></p>
- </div>
- </div>
-</div>
-
- </div> <div id="blq-mast" class="blq-rst blq-mast-dark blq-new-nav" xml:lang="en-GB"> <div id="blq-acc-mobile"><a href="http://www.bbc.co.uk/mobile/">Mobile</a></div> <form method="get" action="http://search.bbc.co.uk/search" accept-charset="utf-8"> <p> <input type="hidden" name="go" value="toolbar" /> <input type="hidden" name="uri" value="/" /> <label for="blq-search" class="blq-hide">Search term:</label> <input id="blq-search" type="text" name="q" value="" maxlength="128" /> <input id="blq-search-btn" type="submit" value="Search" /> </p> </form> <h2 class="blq-hide">bbc.co.uk navigation</h2> <ul id="blq-nav-main" class="blq-not-uk"> <li id="blq-nav-n"><a href="http://www.bbc.co.uk/news/" hreflang="en-GB">News</a></li> <li id="blq-nav-s"><a href="http://news.bbc.co.uk/sport/" hreflang="en-GB">Sport</a></li> <li id="blq-nav-w"><a href="http://news.bbc.co.uk/weather/" hreflang="en-GB">Weather</a></li> <li id="blq-nav-tr"> <a href="http://www.bbc.com/travel/" hreflang="en-GB">Travel</a> </li> <li id="blq-nav-t"><a href="http://www.bbc.co.uk/tv/" hreflang="en-GB">TV</a></li> <li id="blq-nav-r"><a href="http://www.bbc.co.uk/radio/" hreflang="en-GB">Radio</a></li> <li id="blq-nav-m"><a href="#blq-nav">More</a></li> </ul> </div> <div id="blq-nav" class="blq-blue blq-rst"> <div id="blq-nav-links" class="blq-clearfix" xml:lang="en-GB"> <div id="blq-nav-links-inner"> <ul class="blq-nav-sub blq-first"> <li><a href="http://www.bbc.co.uk/cbbc/">CBBC</a></li> <li><a href="http://www.bbc.co.uk/cbeebies/">CBeebies</a></li> <li><a href="http://www.bbc.co.uk/comedy/">Comedy</a></li> <li><a href="http://www.bbc.co.uk/food/">Food</a></li> <li><a href="http://www.bbc.co.uk/health/">Health</a></li> </ul> <ul class="blq-nav-sub"> <li><a href="http://www.bbc.co.uk/history/">History</a></li> <li><a href="http://www.bbc.co.uk/learning/">Learning</a></li> <li><a href="http://www.bbc.co.uk/music/">Music</a></li> <li><a href="http://www.bbc.co.uk/science/">Science</a></li> <li><a href="http://www.bbc.co.uk/nature/">Nature</a></li> </ul> <ul class="blq-nav-sub blq-last"> <li><a href="http://www.bbc.co.uk/local/">Local</a></li> <li><a href="http://www.bbc.co.uk/northernireland/">Northern Ireland</a></li> <li><a href="http://www.bbc.co.uk/scotland/">Scotland</a></li> <li><a href="http://www.bbc.co.uk/wales/">Wales</a></li> <li id="blq-az"><a href="http://www.bbc.co.uk/a-z/">Full A-Z<span class="blq-hide"> of BBC sites</span></a></li> </ul> </div> </div> </div> <!--[if IE 6]> <div id="blq-ie6-upgrade"> <p> <span>You're using the Internet Explorer 6 browser to view the BBC website. Our site will work much better if you change to a more modern browser. It's free, quick and easy.</span> <a href="http://www.browserchoice.eu/">Find out more <span>about upgrading your browser</span> here&hellip;</a> </p> </div> <![endif]--> <div id="blq-foot" xml:lang="en-GB" class="blq-rst blq-clearfix blq-foot-opaque"> <div id="blq-footlinks"> <h2 class="blq-hide">BBC links</h2> <ul id="blq-bbclinks"> <li> <a href="http://www.bbc.co.uk/aboutthebbc/">About the BBC</a> </li> <li> <a href="http://www.bbc.co.uk/help/">BBC Help</a> </li> <li> <a href="http://www.bbc.co.uk/feedback/">Contact Us</a> </li> <li> <a href="http://www.bbc.co.uk/accessibility/">Accessibility Help</a> </li> <li> <a href="http://www.bbc.co.uk/terms/">Terms of Use</a> </li> <li> <a href="http://www.bbc.co.uk/jobs/">Jobs</a> </li> <li> <a href="http://www.bbc.co.uk/privacy/">Privacy &amp; Cookies</a> </li> <li> <a href="http://www.bbc.co.uk/bbc.com/furtherinformation/">Advertise With Us</a> </li> </ul> </div> <p id="blq-logo" class="blq-footer-image-light"><img src="../static.bbc.co.uk/frameworks/barlesque/1.8.19_/desktop/3/img/blocks/light.png" width="84" height="24" alt="BBC" /></p> <div id="bbccom_bottom" class="bbccom_display_none" style="width:468px; text-align:right;">
- <script type="text/javascript">BBC.adverts.write("bottom",true);</script>
- </div>
- <script type="text/javascript">BBC.adverts.show("bottom");</script>
- <p id="blq-disclaim"><span id="blq-copy">BBC &copy; 2011</span> <a href="http://www.bbc.co.uk/help/web/links/">The BBC is not responsible for the content of external sites. Read more.</a></p> <div id="blq-obit"><p><strong>This page is best viewed in an up-to-date web browser with style sheets (CSS) enabled. While you will be able to view the content of this page in your current browser, you will not be able to get the full visual experience. Please consider upgrading your browser software or enabling style sheets (CSS) if you are able to do so.</strong></p></div> </div> </div> <script type="text/javascript">
- bbcdotcom.stats = {
- "adEnabled" : "yes",
- "contentType" : "HTML",
- "audience" : "us"
- };
- </script>
-<!--
- <script type="text/javascript" src="js/gw.js?csid=J08781"></script>
- <script type="text/javascript">
- DM_tag();
- </script>
--->
- <!-- Start Quantcast tag -->
- <script type="text/javascript">
- _qoptions={
- qacct:"p-ccrmZLtMqYB8w"
- };
- </script>
-<!--
- <script type="text/javascript" src="js/quant.js"></script>
--->
- <noscript>
- <div>
- <img src="../pixel.quantserve.com/pixel/p-ccrmZLtMqYB8w.gif" style="display: none;" height="1" width="1" alt="Quantcast"/>
- </div>
- </noscript>
- <!-- End Quantcast tag -->
-
- <!-- SiteCatalyst code version: H.21.
- Copyright 1996-2010 Adobe, Inc. All Rights Reserved
- More info available at http://www.omniture.com -->
-<!--
- <script type="text/javascript" src="js/s_code.js"></script>
--->
- <script type="text/javascript"><!--
- /* You may give each page an identifying name, server, and channel on
- the next lines. */
-
- /************* DO NOT ALTER ANYTHING BELOW THIS LINE ! **************/
- //var s_code=s.t();if(s_code)document.write(s_code)//--></script>
- <script type="text/javascript"><!--
- if(navigator.appVersion.indexOf('MSIE')>=0)document.write(unescape('%3C')+'\!-'+'-')
- //--></script><noscript><div><a href="http://www.omniture.com/" title="Web Analytics"><img
- src="http://bbc.112.2o7.net/b/ss/bbcwglobalprod/1/H.21--NS/0?AQB=1&amp;pccr=true&amp;g=none&amp;AQE=1" height="1" width="1" alt="" /></a></div></noscript><!--/DO NOT REMOVE/-->
- <!-- End SiteCatalyst code version: H.21. -->
-
- <!-- Begin comScore Tag -->
- <script type="text/javascript">
- document.write(unescape("%3Cscript src='" + "'js/beacon.js' %3E%3C/script%3E"));
- //document.write(unescape("%3Cscript src='" + (document.location.protocol == "https:" ? "https://sb" : "http://b") + ".scorecardresearch.com/beacon.js' %3E%3C/script%3E"));</script>
-<!--
- <script type="text/javascript">
- COMSCORE.beacon({
- c1:2,
- c2:"6035051",
- c3:"",
- c4:"www.bbc.co.uk/",
- c5:"",
- c6:"",
- c15:""
- });
- </script>
--->
- <noscript>
- <div>
-<!--
- <img src="http://b.scorecardresearch.com/b2?c1=2&amp;c2=6035051&amp;c3=&amp;c4=www.bbc.co.uk/&amp;c5=&amp;c6=&amp;c15=&amp;cv=1.3&amp;cj=1" style="display:none" width="0" height="0" alt="" />
--->
- </div>
- </noscript>
- <!-- End comScore Tag -->
-
-
- </div> </div> <script type="text/javascript"> pulse.init( 'homepage', false ); </script> <!--561-->
-<script type="text/javascript">
-//<![CDATA[
-var ref = encodeURIComponent(document.referrer);
-//var st_iu="http://su.sageanalyst.net/"+ (new Date()).getTime() +"/NS?ci=775&amp;di=d006&amp;pg=&amp;ai=hn:www.bbc.co.uk,uri:/home/u/index.shtml,qs:,rf:-,cgr:8,ver:1,eid:0,ump:0,prm:1,hp:0,uc:^+m00039|s0003a|s0003b|s0003c+m00026|s00027+m00001|s00003|s00004|s00005+m0000c|s0000f|s0000e+m00008|s0000a|s0000b+m0002l|s00010|s0002s|s00011|s00012|s0000o|s0000p|s0000q|s0000u|s0000v|s0000w|s00016|s00017|s00018+m0002y|s00030|s00031+m0001f|s0001g+m0001r|s0001s|s0002q|s0001t|s0001u|s0001w^+m0001c|s0001d+m0000i|s0000k|s0000l|s0000n+m00029|s0002a|s0002r+m0001k|s0001l^,uu:"+Homepage.fetchBbcCookie();
-if (ref.length) {
- st_iu = st_iu.replace(/rf:[^,]*/,'rf:'+ref);
-}
-st_iXz=new Image();
-//st_iXz.src=st_iu.replace(/\&amp;/g,'&');
-//]]>
-</script>
-<div style="display:none">
- <noscript>
- <p><img src="img/NS9dec.gif?ci=775&amp;di=d006&amp;pg=&amp;ai=hn:www.bbc.co.uk,uri:/home/u/index.shtml,qs:,rf:-,cgr:8,ver:1,eid:0,ump:0,prm:1,hp:0,uc:^+m00039|s0003a|s0003b|s0003c+m00026|s00027+m00001|s00003|s00004|s00005+m0000c|s0000f|s0000e+m00008|s0000a|s0000b+m0002l|s00010|s0002s|s00011|s00012|s0000o|s0000p|s0000q|s0000u|s0000v|s0000w|s00016|s00017|s00018+m0002y|s00030|s00031+m0001f|s0001g+m0001r|s0001s|s0002q|s0001t|s0001u|s0001w^+m0001c|s0001d+m0000i|s0000k|s0000l|s0000n+m00029|s0002a|s0002r+m0001k|s0001l^,uu:" alt="sage_tracking" /></p>
- </noscript>
-</div>
-</body>
-</html>
-<!-- BUILD 49, Released at 2011-04-15_17-49-01, TAG hudson-WWHomepage-3.5-US-49, Subversion revision 469827 -->
-<!-- [2011-05-12 22:04:20] -->
diff --git a/wlauto/workloads/bbench/patches/bbench.js b/wlauto/workloads/bbench/patches/bbench.js
deleted file mode 100644
index 05e2900f..00000000
--- a/wlauto/workloads/bbench/patches/bbench.js
+++ /dev/null
@@ -1,177 +0,0 @@
-//Author: Anthony Gutierrez
-
-var bb_site = [];
-var bb_results = [];
-var globalSiteIndex = 0;
-var numWebsites = 9;
-var bb_path = document.location.pathname;
-var bb_home = "file:///" + bb_path.substr(1, bb_path.lastIndexOf("bbench") + 5);
-var num_iters = 0;
-var init = false;
-
-function generateSiteArray(numTimesToExecute) {
- for (i = 0; i < numTimesToExecute * numWebsites; i += numWebsites) {
- bb_site[i+0] = bb_home + "/sites/amazon/www.amazon.com/index.html";
- bb_site[i+1] = bb_home + "/sites/bbc/www.bbc.co.uk/index.html";
- bb_site[i+2] = bb_home + "/sites/cnn/www.cnn.com/index.html";
- bb_site[i+3] = bb_home + "/sites/craigslist/newyork.craigslist.org/index.html";
- bb_site[i+4] = bb_home + "/sites/ebay/www.ebay.com/index.html";
- bb_site[i+5] = bb_home + "/sites/google/www.google.com/index.html";
-// bb_site[i+6] = bb_home + "/sites/youtube/www.youtube.com/index.html";
- bb_site[i+6] = bb_home + "/sites/msn/www.msn.com/index.html";
- bb_site[i+7] = bb_home + "/sites/slashdot/slashdot.org/index.html";
- bb_site[i+8] = bb_home + "/sites/twitter/twitter.com/index.html";
-// bb_site[i+10] = bb_home + "/sites/espn/espn.go.com/index.html";
- }
-
- bb_site[i] = bb_home + "/results.html";
-}
-
-
-/* gets the URL parameters and removes from window href */
-function getAndRemoveURLParams(windowURL, param) {
- var regex_string = "(.*)(\\?)" + param + "(=)([0-9]+)(&)(.*)";
- var regex = new RegExp(regex_string);
- var results = regex.exec(windowURL.value);
-
- if (results == null)
- return "";
- else {
- windowURL.value = results[1] + results[6];
- return results[4];
- }
-}
-
-/* gets the URL parameters */
-function getURLParams(param) {
- var regex_string = "(.*)(\\?)" + param + "(=)([0-9]+)(&)(.*)";
- var regex = new RegExp(regex_string);
- var results = regex.exec(window.location.href);
-
- if (results == null)
- return "";
- else
- return results[4];
-}
-
-/* gets all the parameters */
-function getAllParams() {
- var regex_string = "(\\?.*)(\\?siteIndex=)([0-9]+)(&)";
- var regex = new RegExp(regex_string);
- var results = regex.exec(window.location.href);
- /*alert(" Result is 1: " + results[1] + " 2: " + results[2] + " 3: " + results[3]);*/
-
- if (results == null)
- return "";
- else
- return results[1];
-}
-
-/* sets a cookie */
-function setCookie(c_name, value) {
- var c_value = escape(value) + ";";
- document.cookie = c_name + "=" + c_value + " path=/";
-}
-
-/* gets a cookie */
-function getCookie(c_name) {
- var cookies = document.cookie.split(";");
- var i, x, y;
-
- for (i = 0; i < cookies.length; ++i) {
- x = cookies[i].substr(0, cookies[i].indexOf("="));
- y = cookies[i].substr(cookies[i].indexOf("=") + 1);
- x = x.replace(/^\s+|\s+$/g,"");
-
- if (x == c_name)
- return unescape(y);
- }
-}
-
-/* start the test, simply go to site 1. */
-function startTest(n, del, y) {
- //var start_time = (new Date()).getTime();
- //setCookie("PreviousTime", start_time);
-
- init = true;
-
- generateSiteArray(n);
- siteTest(bb_site[0], globalSiteIndex, new Date().getTime(), "scrollSize=" + y + "&?scrollDelay=" + del + "&?iterations=" + n + "&?" + "StartPage");
- //siteTest(bb_site[0], globalSiteIndex, new Date().getTime(), "scrollDelay=" + del + "&?iterations=" + n + "&?" + "StartPage");
- //goToSite(bb_site[0], new Date().getTime());
-}
-
-/* jump to the next site */
-function goToSite(site) {
- curr_time = new Date().getTime();
- setCookie("CGTPreviousTime", curr_time);
- site+="?CGTPreviousTime="+curr_time+"&";
- window.location.href = site;
-}
-
-/*
- the test we want to run on the site.
- for now, simply scroll to the bottom
- and jump to the next site. in the
- future we will want to do some more
- realistic browsing tests.
-*/
-function siteTest(nextSite, siteIndex, startTime, siteName) {
- if (!init) {
- var iterations = getURLParams("iterations");
- var params = getAllParams();
- var delay = getURLParams("scrollDelay");
- var verticalScroll = getURLParams("scrollSize");
- generateSiteArray(iterations);
- nextSite = bb_site[siteIndex] + params;
- }
- else {
- var delay = 500;
- var verticalScroll = 500;
- }
- var cgtPreviousTime = getURLParams("CGTPreviousTime");
- var load_time = 0;
- siteIndex++;
- if (siteIndex > 1) {
- cur_time = new Date().getTime();
-// alert("previous " + cgtPreviousTime + " foo " + getCookie("CGTPreviousTime"));
- load_time = (cur_time - cgtPreviousTime);
- setCookie("CGTLoadTime", load_time);
-// diff = cur_time-startTime;
-// alert("starttime "+startTime+" currtime "+ cur_time + " diff " + diff + "load_time " + load_time );
- }
- setTimeout(function() {
- scrollToBottom(0, verticalScroll, delay,load_time,
- function(load_time_param){
- cur_time = new Date().getTime();
- load_time = (cur_time - startTime);
- //load_time = (cur_time - getCookie("PreviousTime"));
- // alert("Done with this site! " + window.cur_time + " " + startTime + " " + window.load_time);
- //alert("Done with this site! " + window.cur_time + " " + getCookie("PreviousTime") + " " + window.load_time);
- //goToSite(nextSite + "?iterations=" + iterations + "&?" + siteName + "=" + load_time + "&" + "?siteIndex=" + siteIndex + "&" );
-// alert("loadtime in cookie="+ getCookie("CGTLoadTime")+" loadtime in var="+load_time_param);
- goToSite(nextSite + "?" + siteName + "=" + load_time_param + "&" + "?siteIndex=" + siteIndex + "&" );
- }
- );},(siteIndex > 1) ? 1000 : 0);
-}
-
-/*
- scroll to the bottom of the page in
- num_y pixel increments. may want to
- do some horizontal scrolling in the
- future as well.
-*/
-function scrollToBottom(num_x, num_y, del, load_time, k) {
- ++num_iters;
- var diff = document.body.scrollHeight - num_y * num_iters;
- //var num_scrolls = 0;
-
- if (diff > num_y) {
- //self.scrollBy(num_x, num_y);
- //setTimeout(function(){self.scrollBy(num_x, num_y); /*diff -= 100;*/ scrollToBottom(num_x, num_y, k);}, 2);
- setTimeout(function(){self.scrollBy(num_x, num_y); /*diff -= 100;*/ scrollToBottom(num_x, num_y, del, load_time,k);}, del);
- }
- else{
- k(load_time);
- }
-}
diff --git a/wlauto/workloads/bbench/patches/cnn.html b/wlauto/workloads/bbench/patches/cnn.html
deleted file mode 100755
index 1205c70e..00000000
--- a/wlauto/workloads/bbench/patches/cnn.html
+++ /dev/null
@@ -1,1293 +0,0 @@
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN""http://www.w3.org/TR/html4/loose.dtd"><html lang="en">
-<!-- Mirrored from www.cnn.com/ by HTTrack Website Copier/3.x [XR&CO'2010], Fri, 11 Feb 2011 04:23:33 GMT -->
-<head>
-<!--
- added this for bbench
--->
-
-<script type="text/javascript" src="../../../bbench.js"></script>
-
-<script type="text/javascript">
- var bb_start_time = new Date().getTime();
- var newSiteIndex = getURLParams("siteIndex");
- var allParams = getAllParams();
-</script>
-
-<script type="text/javascript"> var bb_start_time = new Date().getTime() </script>
-<title>CNN.com - Breaking News, U.S., World, Weather, Entertainment &amp; Video News</title> <meta http-equiv="refresh" content="1800;url=http://www.cnn.com/?refresh=1">
-<meta name="Description" content="CNN.com delivers the latest breaking news and information on the latest top stories, weather, business, entertainment, politics, and more. For in-depth coverage, CNN.com provides special reports, video, audio, photo galleries, and interactive guides.">
-<meta name="Keywords" content="CNN, CNN news, CNN.com, CNN TV, news, news online, breaking news, U.S. news, world news, weather, business, CNN Money, sports, politics, law, technology, entertainment, education, travel, health, special reports, autos, developing story, news video, CNN Intl">
-
-<meta http-equiv="content-type" content="text/html; charset=iso-8859-1">
-<link rel="Shortcut Icon" type="image/x-icon" href="favicon.ie9.ico" />
-
-<meta name="application-name" content="CNN" />
-
-<meta name="msapplication-tooltip" content="Breaking News, U.S., World, Weather, Entertainment and Video News" />
-
-<meta name="msapplication-task" content="name=Election Center;action-uri=http://www.cnn.com/POLITICS/election.2010/the.basics/;icon-uri=favicon.ie9.ico" />
-
-<meta name="msapplication-task" content="name=News Pulse;action-uri=http://newspulse.cnn.com/;icon-uri=favicon.ie9.ico" />
-
-<meta name="msapplication-task" content="http://www.cnn.com/name=iReport;action-uri=http://ireport.cnn.com;icon-uri=http://ireport.cnn.com/favicon.ico" />
-<meta http-equiv="X-UA-Compatible" content="IE=EmulateIE8">
-<link rel="search" type="application/opensearchdescription+xml" href="tools/search/cnncom.xml" title="CNN.com">
-<link rel="search" type="application/opensearchdescription+xml" href="tools/search/cnncomvideo.xml" title="CNN.com Video">
-
-<link rel="canonical" href="index.html">
-<meta name="viewport" content="width=1024">
-<!--
-<link rel="apple-touch-icon" href="../i.cdn.turner.com/cnn/.element/img/3.0/global/misc/apple-touch-icon.png">
--->
-
-
-<link rel="alternate" type="application/rss+xml" title="CNN - Top Stories [RSS]" href="../rss.cnn.com/rss/cnn_topstories.rss">
-<link rel="alternate" type="application/rss+xml" title="CNN - Recent Stories [RSS]" href="../rss.cnn.com/rss/cnn_latest.rss">
-
-<link rel="stylesheet" type="text/css" href="../i.cdn.turner.com/cnn/.element/css/3.0/common.css">
-<link rel="stylesheet" type="text/css" href="../i.cdn.turner.com/cnn/.element/css/3.0/main.css">
-<link rel="stylesheet" type="text/css" href="../i.cdn.turner.com/cnn/.element/css/3.0/connect/overlay0af9.css?20100421">
-<style type="text/css">
-* html #hdr-auth,
-* html #pmUserPanel
-{display:none}
-.cnn_shdcamtt1 .cnn_mtlplnode { height:124px; }
-</style>
-<link rel="stylesheet" type="text/css" href="../i.cdn.turner.com/cnn/.element/css/3.0/personalization0af9.css?20100421">
-
-<meta name="fb_app_id" content="80401312489"/>
-<meta property="fb:page_id" content="129343697106537"/>
-
-
-<script src="js/protoaculous.1.8.2.min.js" type="text/javascript"></script>
-
-<script src="js/main.js" type="text/javascript"></script>
- <script src="js/swfobject-2.2.js" type="text/javascript"></script>
-
-<script src="js/csiManager.js" type="text/javascript"></script>
-
-<script src="js/StorageManager.js?20100728" type="text/javascript"></script>
-<!--
-<script type="text/javascript" src="js/connect-lite.js"></script>
--->
-
-<script src="js/local.js?20101109" type="text/javascript"></script>
-
-
-<!--include virtual="/.element/ssi/auto/3.0/sect/MAIN/videojs.html"-->
-<script src="js/cvp_suppl.js?id=20100816" type="text/javascript"></script>
-<script src="js/cvp.js" type="text/javascript"></script><script src="js/fwjslib_1.1.js?version=1.1" type="text/javascript"></script>
-<script type="text/javascript">
- var cnnIsHomePage = true;
-</script>
-
-
-
-<script language="JavaScript" type="text/javascript">var cnnCurrTime = new Date(1297398104614); var cnnCurrHour = 23; var cnnCurrMin = 21; var cnnCurrDay='Thu';</script>
-
-
-
-<style type="text/css">
-#cnn_mtt1rgtarea .cnn_bulletbin .cnnWOOL {margin-right:4px;}
-
-#cnn_mtt1lftarea ul { list-style-type:none; } /* Fixes non link bullets in T1 blurb. 20100227 PBB */
-</style>
-
-<script type="text/javascript" src="js/frame.js"></script>
-<script type="text/javascript" src="js/ad_head0.js"></script>
-<script type="text/javascript" src="js/cnn_adspaces.js"></script>
-
-</head>
-
-<!--
-<body id="cnnMainPage">
--->
-<body id="cnnMainPage" onload="siteTest(bb_site[newSiteIndex] + allParams, newSiteIndex, bb_start_time, 'cnn')">
-
-<div id="cnn_ipadappbanner"></div>
-
-
-<script>
-
-if(html5Check) {
-
- Event.observe(window, 'load', function() {
- $('cnn_ipadappbanner').update('<div class="cnn_ipadappbanner1"><a href="http://itunes.apple.com/us/app/cnn-app-for-ipad/id407824176?mt=8"><img src="img/CNN_iPad_banner_980x50_120910.png" width="980" height="50" alt="" border="0"></a></div>');
-
- });
-
-}
-
-</script>
-
-<style>
-
-.cnn_ipadappbanner1 { padding:10px;text-align:center; }
-
-</style>
-
-
-
-<!-- begin header -->
-<div id="cnn_hdr">
- <div id="cnn_hdr-prompt" style="display:none;">
- <div class="hdr-wrap" id="cnn_hdr-promptcntnt">
- </div>
- </div>
- <div id="cnn_hdr-main">
- <div class="hdr-wrap">
- <div id="hdr-banner">
-
-
- <a id="hdr-banner-title" href="http://www.cnn.com/" title="">
- <img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/header/hdr-main.gif" width="119" height="82" alt="CNN" usemap="#cnn_hdrimap"/>
- </a>
-
- </div>
- <div id="hdr-editions">
- <ul>
- <li class="no-pad-left"><span>EDITION: &nbsp;U.S.</span></li>
- <li><a id="cnn_switchEdition_intl" href="http://edition.cnn.com/" title="CNN INTERNATIONAL">INTERNATIONAL</a></li>
- <li class="no-border"><a href="http://www.cnnmexico.com/" title="CNN M&Eacute;XICO">M&Eacute;XICO</a></li>
- </ul>
- <div id="cnn_hdr-editionS"><a href="javascript:cnn_initeditionhtml(3);">Set edition preference</a></div>
- </div>
- <div id="hdr-auth">
- <ul>
- <li><a href="javascript:void(0)" onclick="showOverlay('profile_signup_overlay');return false;" title="">Sign up</a></li>
- <li class="no-border no-pad-right"><a href="javascript:void(0)" onclick="showOverlay('profile_signin_overlay');return false;" title="">Log in</a></li>
- </ul>
- </div>
- <div id="hdr-search">
- <form method="get" action="http://www.cnn.com/search/" onsubmit="return cnnSearch(this);">
- <div class="ftr-search-datacntr">
-<div class="ftr-search-tfield"><input type="text" name="query" size="12" maxlength="40" value="" id="hdr-search-box"></div>
-<div class="ftr-search-sicon"><input type="image" src="img/btn_search_hp_text.gif" width="55" height="21" alt=""></div>
-</div>
- <input type="hidden" name="primaryType" id="cnnHeadSrchType" value="mixed">
- </form>
-
-<script>
-
-//Event.observe(window, 'load', function() {
- // $('hdr-search-box').focus();
- //});
-
-</script>
-
-<style>
-
-#hdr-editions a { text-decoration:none; }
-#cnn_hdr-editionS { text-align:left;clear:both; }
-#cnn_hdr-editionS a { text-decoration:none;font-size:10px;top:7px;line-height:12px;font-weight:bold; }
-#hdr-prompt-text b { display:inline-block;margin:0 0 0 20px; }
-#hdr-editions li { padding:0 10px; }
-
-#hdr-editions ul li.no-pad-left span { font-size:12px; }
-.hdr-arrow-intl, .hdr-arrow-us, .hdr-arrow-us2 { left:148px; }
-.hdr-arrow-us2 { left:180px; }
-
-</style>
-
- </div>
- </div>
- </div>
- <div id="cnn_hdr-nav">
- <ul id="us-menu">
- <li class="no-border"><a id="nav-home" class="nav-media no-border nav-on" href="http://www.cnn.com/" title="Breaking News, U.S., World Weather Entertainment and Video News from CNN.com">Home</a></li>
- <li class="no-border"><a id="nav-video" class="nav-media no-border" href="http://www.cnn.com/video/" title="Video Breaking News Videos from CNN.com">Video</a></li>
- <li class="no-border"><a id="nav-newspulse" class="nav-media" href="http://newspulse.cnn.com/" title="NewsPulse from CNN.com">NewsPulse<img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/header/nav-beta.gif" width="21" height="9" alt="" /></a></li>
- <li><a id="nav-us" href="http://www.cnn.com/US/" title="U.S. News Headlines Stories and Video from CNN.com">U.S.</a></li>
- <li><a id="nav-world" href="http://www.cnn.com/WORLD/" title="World News International Headlines Stories and Video from CNN.com">World</a></li>
- <li><a id="nav-politics" href="http://www.cnn.com/POLITICS/" title="Election and Politics News from CNN.com">Politics</a></li>
- <li><a id="nav-justice" href="http://www.cnn.com/JUSTICE/" title="Justice News Courts Celebrity Docket and Law News from CNN.com">Justice</a></li>
- <li><a id="nav-entertainment" href="http://www.cnn.com/SHOWBIZ/" title="Entertainment News Celebrities Movies and TV from CNN.com">Entertainment</a></li>
- <li><a id="nav-tech" href="http://www.cnn.com/TECH/" title="Technology Computers Internet and Personal Tech News from CNN.com">Tech</a></li>
- <li><a id="nav-health" href="http://www.cnn.com/HEALTH/" title="Health News Medicine Diet Fitness and Parenting from CNN.com">Health</a></li>
- <li><a id="nav-living" href="http://www.cnn.com/LIVING/" title="Living News Personal Work and Home from CNN.com">Living</a></li>
- <li><a id="nav-travel" href="http://www.cnn.com/TRAVEL/" title="Travel News Vacations Destinations and Video from CNN.com">Travel</a></li>
- <li><a id="nav-opinion" href="http://www.cnn.com/OPINION/" title="Opinion Editorial Analysis and Insight from CNN.com">Opinion</a></li>
- <li><a id="nav-ireport" href="http://ireport.cnn.com/" title="CNN iReport &ndash; Share your story, discuss the issues with CNN.com">iReport</a></li>
- <li><a id="nav-money" href="http://money.cnn.com/" title="Business financial personal finance news from CNNMoney"><span>Money</span><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/header/nav-arrow.gif" width="3" height="5" alt="" /></a></li>
- <li><a id="nav-sports" href="http://sportsillustrated.cnn.com/?xid=cnnnav" title="Breaking news real-time scores and daily analysis from Sports Illustrated SI.com"><span>Sports</span><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/header/nav-arrow.gif" width="3" height="5" alt="" /></a></li>
- </ul>
- </div>
-</div>
-<!-- end header -->
-
-<script language="javascript" type="text/javascript">
-<!--
-var cnnPageName = "CNN Home Page";
-var cnnSectionName = "CNN Home Page";
-var selectedEdition = allCookies['SelectedEdition'];
-//--></script>
-
-
-<!-- Tracking values -->
-
-<div align="center">
- <!-- this is where the breaking news CSI code will go -->
-<div id="cnnBannerContainer"></div>
-<!--
-<script type="text/javascript">
-CSIManager.getInstance().call('http://www.cnn.com/.element/ssi/www/breaking_news/3.0/banner.html','','cnnBannerContainer',cnnRenderDomesticBanner);
-</script>
--->
-<div id="cnnSetEditionContainer"></div>
-<div id="cnnMakeHPContainer"></div>
- </div>
-
-<script>
-cnnad_newTileIDGroup(new Array('970x66_top', 'custom_rgt'));
-</script>
-
-<div class="cnn_adunit970x66" align="center">
- <!-- ADSPACE: homepage/main/top.970x66 -->
-
-
-<!-- CALLOUT|http://ads.cnn.com/html.ng/site=cnn&cnn_pagetype=main&cnn_position=970x66_top&cnn_rollup=homepage&page.allowcompete=yes&params.styles=fs|CALLOUT -->
-<div id="ad-236573" align="center" style="padding: 0; margin: 0; border: 0;"></div>
-<!--
-<script type="text/javascript">
-cnnad_createAd("236573","http://ads.cnn.com/html.ng/site=cnn&amp;cnn_pagetype=main&amp;cnn_position=970x66_top&amp;cnn_rollup=homepage&amp;page.allowcompete=yes&amp;params.styles=fs","66","970");
-cnnad_registerSpace(236573,970,66);
-</script>
--->
-
-
-
-
-
-
-
-
-
-
-
-
-
-</div>
-
-
-<div align="center"><div id="cnn_maincntnr"> <div class="cnn_contentarea cnn_shdcamtt1"><div id="cnn_toptstmparea"><span>updated 11:17 p.m.EST, Thu February 10, 2011</span></div><div class="cnn_shdcaheader"></div><div id="cnn_maintopt1"><div id="cnn_maint1lftf"><div id="cnn_maintt1imgbul"> <div class="cnn_relpostn"><div class="cnn_mtt1img"><a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.protests/index.html?hpt=T1"><img src="../i2.cdn.turner.com/cnn/2011/WORLD/africa/02/10/egypt.protests/t1main.mubarak.suleiman.nile.jpg" width="250" height="250" alt="Mubarak speech leaves world wondering who's in charge" border="0" hspace="0" vspace="0"></a></div><div class="cnn_mtt1imgh"><div class="cnn_mtt1imghtitle"><h1><span><a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.protests/index.html?hpt=T1">Mubarak speech leaves world wondering who's in charge</a></span></h1></div></div></div><div class="cnn_mtt1content"><div id="cnn_mtt1lftarea"><p>&#8226; Mubarak addresses Egypt without resigning <br /> &#8226; <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypts.protests.suleiman/index.html">Powers delegated to vice president</a><br /> &#8226; <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.protests.us.reax/index.html">Obama urges Egypt to explain changes</a><br /> &#8226; Protests planned for Friday after prayers<br /> <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.protests/index.html">FULL STORY</a> | <a href="http://news.blogs.cnn.com/2011/02/10/egypt-unrest-mubarak-may-address-nation-party-chief-says/">BLOG</a> <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.protests/index.html?hpt=T1"> </a></p></div><div id="cnn_mtt1rgtarea"><ul class="cnn_bulletbin"> <li> <a href="http://news.blogs.cnn.com/2011/02/10/zakaria-mubarak-baiting-protestors/">Zakaria: Mubarak 'baiting' protesters</a></li> <li> <a href="http://www.cnn.com/video/#/video/world/2011/02/10/sot.mubarak.stepping.down.niletv">Mubarak: I won't depart</a> <a href="http://www.cnn.com/video/#/video/world/2011/02/10/sot.mubarak.stepping.down.niletv"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Video" border="0" width="16" height="10" class="cnnVideoIcon"></a> | <a href="http://www.cnn.com/2011/WORLD/meast/02/10/egypt.mubarak.address/index.html">Highlights</a></li> <li> <a href="http://www.cnn.com/2011/OPINION/02/10/opinion.roundup.egypt/index.html">Opinion: No one satisfied</a> | <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.cairo.scene/index.html">The scene</a></li> <li> <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.pivotal.moments/index.html">Pivotal moments in Egyptian uprising</a></li> <li> <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.protests.reax/index.html">Mohamed ElBaradei: 'Egypt will explode'</a></li> <li> <a href="http://www.cnn.com/2011/WORLD/africa/01/25/photos.egypt.protest/index.html">Photos</a> | <a href="http://ireport.cnn.com/ir-topic-stories.jspa?topicId=544395">Share images</a> | <span class="cnnLiveWOOL">LIVE:</span> <a href="javascript:cnnLiveVideo('/2');">Nile TV</a> <a href="javascript:cnnLiveVideo('/2');"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Live Video" border="0" width="16" height="10" class="cnnVideoIcon"></a></li> </ul></div><div class="cnn_clear"></div><div class="cnn_divline"></div></div> </div><div class="cnn_maint1dline"></div><div id="cnn_maintt2bul"><div class="cnn_mtt1content"> <!--include virtual="/.element/ssi/www/sect/3.0/MAIN/belowT2Boxes/afghanistan.html"--><h4>Latest news</h4><!--include virtual="/editionssi/sect/3.0/MAIN/elex/belowT2Title.html"--><ul class="cnn_bulletbin"><li><a href="http://politicalticker.blogs.cnn.com/2011/02/10/kyl-will-retire/?hpt=T2">No. 2 GOP senator won't seek new term</a> </li><li><a href="http://money.cnn.com/2011/02/10/markets/dollar/index.htm?hpt=T2">IMF calls for dollar alternative</a> </li><li><a href="http://tech.fortune.cnn.com/2011/02/10/snapshot-of-an-apple-flash-crash/?hpt=T2">Apple's stock flash crash</a>&nbsp;<span class="cnnWOOL">Fortune</span></li><li><a href="http://religion.blogs.cnn.com/2011/02/10/three-philadelphia-priests-teacher-charged-with-sexually-abusing-boys/?hpt=T2">3 Philly priests charged with sex abuse</a> </li><li><a href="http://www.cnn.com/linkto/ticker.html?hpt=T2">Ticker: Cheney gets booed</a> </li><li><a href="http://www.cnn.com/2011/HEALTH/02/10/ohio.veterams/index.html?hpt=T2">VA clinic may have exposed vets to HIV</a> </li><li><a href="http://www.cnn.com/2011/US/02/10/california.einstein.granddaughter/index.html?hpt=T2">Einstein granddaughter wants a share</a> </li><li><a href="http://sportsillustrated.cnn.com/2011/writers/ian_thomsen/02/10/jerry.sloan.resigns/index.html?hpt=T2">NBA coach resigns after 23 years</a> &nbsp;<span class="cnnWOOL">SI</span></li> <li><a href="http://www.cnn.com/video/#/video/us/2011/02/10/dnt.prostitute.turned.teacher.WCBS?hpt=T2">Teacher outed over prostitute past</a><a href="http://www.cnn.com/video/#/video/us/2011/02/10/dnt.prostitute.turned.teacher.WCBS?hpt=T2"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Video" border="0" width="16" height="10" class="cnn_vidicon"></a> </li><li><a href="http://www.cnn.com/2011/SHOWBIZ/celebrity.news.gossip/02/10/lindsay.lohan.defense/index.html?hpt=T2">Lohan would consider a no-jail plea deal</a> </li> <li><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.sbt.lohan.dress.hln?hpt=T2">Hot mess over Lohan's dress</a><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.sbt.lohan.dress.hln?hpt=T2"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Video" border="0" width="16" height="10" class="cnn_vidicon"></a> </li><li><a href="http://www.people.com/people/package/article/0,,20395222_20465100,00.html" target="new">Wedding red for Prince William?</a> &nbsp;<span class="cnnWOOL">People</span></li><li><a href="http://insidetv.ew.com/2011/02/10/charlies-angels-has-its-bosley/" target="new">'Charlie's Angels' has its Bosley</a> &nbsp;<span class="cnnWOOL">EW</span></li><li><a href="http://news.blogs.cnn.com/?hpt=T2">This Just In: CNN's news blog</a> </li> </ul>
-
-<div class="cnn_mtt1more"><a href="http://newspulse.cnn.com/" class="cnn_mtpvmsbtn"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div>
-
-
-</div></div></div><div id="cnn_maintoplive"><!-- CVP START HTML --><script type="text/javascript">var playerOverRide = {headline : "What\'s next for Egyptian uprising?",headlineUrl : "\/2011\/WORLD\/africa\/02\/10\/egypt.whats.next\/index.html?hpt=C1",images : [{image : { width : "416", height : "234", text : "http:\/\/i2.cdn.turner.com\/cnn\/2011\/images\/02\/10\/c1main.egypt.protesters.afp.gi.jpg" }}]};</script><div class="cnn_relpostn"><div id="cnnCVP1"><div class="cnn_mtt1img"><img src="../i2.cdn.turner.com/cnn/2011/images/02/10/c1main.egypt.protesters.afp.gi.jpg" width="416" height="234" alt="" border="0"></div><div class="cnn_mtt1imgh"><div class="cnn_mtt1imghtitle"><h1><span><a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.whats.next/index.html?hpt=C1">What's next for Egyptian uprising?</a></span></h1></div></div><div id="cnnCVP2" class="cnn_mtt1plybttn"><div id="play_button"><a href="http://www.cnn.com/video/?/video/world/2011/02/10/ac.mubarak.stays.egypt.cnn" onclick="s_objectID='http://www.cnn.com/video/?/video/world/2011/02/10/ac.mubarak.stays.egypt.cnn';return false;"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div></div></div><script>$('cnnCVP2').onclick = function() { cnnLoadPLayer('world/2011/02/10/ac.mubarak.stays.egypt.cnn', 'cnnCVP1', '416x250_start', playerOverRide ); };$('cnnCVP2').onmouseover = function() { $('cnnCVP2').className = 'cnn_mtt1plybttn cnn_mtt1plybttnon'; };$('cnnCVP2').onmouseout = function() { $('cnnCVP2').className = 'cnn_mtt1plybttn'; };</script><!-- /CVP START HTML --><p>After morning prayers Friday, more Egyptian protesters are expected to arrive for what could be the largest demonstration yet. Some protesters say they fear &quot;a bloodbath.&quot; <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.whats.next/index.html">FULL STORY</a> | <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.protest.military/index.html">MILITARY NUDGE?</a> | <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.what.changed/index.html">WHAT CHANGED</a> <a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.whats.next/index.html?hpt=C1"> </a></p><div class="cnn_mc2hdrcntr cnn_mc2numhdr1"><div class="cnn_mc2header cnn_mc2hdr1"><h4><span> Don't miss</span></h4> <div><div class="cnn_divline"></div></div></div><div class="cnn_mc2header cnn_mc2hdr2"><h4><span></span></h4> <div><div class="cnn_divline"></div></div></div><div class="cnn_mc2header cnn_mc2hdr3"><h4><span></span></h4> <div><div class="cnn_divline"></div></div></div><div class="cnn_clear"></div></div><!-- /cnn_mc2hdrcntr --><div id="cnn_mtlplaylist" > <div class="cnn_mtlplnode "><div class="cnn_mtlplnimg"> <a href="http://www.cnn.com/2011/TRAVEL/02/10/ryanair.packing.tips/index.html?hpt=C2"><img src="../i2.cdn.turner.com/cnn/2011/TRAVEL/02/10/ryanair.packing.tips/tzvids.oleary.afp.gi.jpg" width="120" height="68" alt="Ryanair strikes back at 'mutiny' students" border="0"></a></div><a href="http://www.cnn.com/2011/TRAVEL/02/10/ryanair.packing.tips/index.html?hpt=C2">Ryanair strikes back at 'mutiny' students</a> </div> <div class="cnn_mtlplnode "><div class="cnn_mtlplnimg"> <a href="http://www.cnn.com/2011/TECH/mobile/02/10/cell.phone.perks/index.html?hpt=C2"><img src="../i2.cdn.turner.com/cnn/2011/TECH/mobile/02/10/cell.phone.perks/tzvids.verizon.iphone.gi.jpg" width="120" height="68" alt="Rivals' deals combat Verizon iPhone" border="0"></a></div><a href="http://www.cnn.com/2011/TECH/mobile/02/10/cell.phone.perks/index.html?hpt=C2">Rivals' deals combat Verizon iPhone</a> </div> <div class="cnn_mtlplnode "><div class="cnn_mtlplnimg"> <a href="http://ireport.cnn.com/ir-topic-stories.jspa?topicId=542414&amp;hpt=C2"><img src="../i2.cdn.turner.com/cnn/2011/images/02/10/tzvids.hallpass.jpg" width="120" height="68" alt="Got a question for 'Hall Pass' cast?" border="0"></a></div><a href="http://ireport.cnn.com/ir-topic-stories.jspa?topicId=542414&amp;hpt=C2">Got a question for 'Hall Pass' cast?</a> </div> <div class="cnn_mtlplnode "><div class="cnn_mtlplnimg"> <a href="http://www.cnn.com/2011/OPINION/02/10/borger.unruly.republicans/index.html?hpt=C2"><img src="../i2.cdn.turner.com/cnn/2011/OPINION/02/10/borger.unruly.republicans/tzvids.gloria.borger.cnn.jpg" width="120" height="68" alt="Borger: Beauty of unruly Republicans" border="0"></a></div><a href="http://www.cnn.com/2011/OPINION/02/10/borger.unruly.republicans/index.html?hpt=C2">Borger: Beauty of unruly Republicans</a> </div> <div class="cnn_mtlplnode "><div class="cnn_mtlplnimg"> <a href="http://www.cnn.com/video/#/video/showbiz/2011/02/10/sbt.jennifer.hudson.oprah.HLN?hpt=C2"><img src="../i2.cdn.turner.com/cnn/2011/images/02/10/hudson219da.jpg?hpt=C2" width="120" height="68" alt="Jennifer Hudson: 'I've lost 80 pounds'" border="0"></a><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/showbiz/2011/02/10/sbt.jennifer.hudson.oprah.HLN?hpt=C2"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div></div> <a href="http://www.cnn.com/video/#/video/showbiz/2011/02/10/sbt.jennifer.hudson.oprah.HLN?hpt=C2">Jennifer Hudson: 'I've lost 80 pounds'</a> <span>1:13</span> </div> <div class="cnn_mtlplnode "><div class="cnn_mtlplnimg"> <a href="http://www.cnn.com/2011/SHOWBIZ/Movies/02/09/iconic.movie.kisses.instyle/index.html?hpt=C2"><img src="../i2.cdn.turner.com/cnn/2011/SHOWBIZ/Movies/02/09/iconic.movie.kisses.instyle/tzvids.twilight.kiss.jpg" width="120" height="68" alt="The 10 most iconic movie kisses" border="0"></a></div><a href="http://www.cnn.com/2011/SHOWBIZ/Movies/02/09/iconic.movie.kisses.instyle/index.html?hpt=C2">The 10 most iconic movie kisses</a> </div> <div class="cnn_mtlplnode "><div class="cnn_mtlplnimg"> <a href="http://www.cnn.com/2011/SHOWBIZ/02/10/spiderman.ff.go/index.html?hpt=C2"><img src="../i2.cdn.turner.com/cnn/2011/SHOWBIZ/02/10/spiderman.ff.go/tzvids.ff.jpg" width="120" height="68" alt="Spider-Man replacing Human Torch" border="0"></a></div><a href="http://www.cnn.com/2011/SHOWBIZ/02/10/spiderman.ff.go/index.html?hpt=C2">Spider-Man replacing Human Torch</a> </div> <div class="cnn_mtlplnode "><div class="cnn_mtlplnimg"> <a href="http://www.cnn.com/video/#/video/showbiz/2011/02/10/behar.jets.qb.alleged.hookup.hln?hpt=C2"><img src="../i2.cdn.turner.com/cnn/2011/images/02/10/jets19da.jpg?hpt=C2" width="120" height="68" alt="Did Jets player 'hook up' with teen?" border="0"></a><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/showbiz/2011/02/10/behar.jets.qb.alleged.hookup.hln?hpt=C2"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div></div> <a href="http://www.cnn.com/video/#/video/showbiz/2011/02/10/behar.jets.qb.alleged.hookup.hln?hpt=C2">Did Jets player 'hook up' with teen?</a> <span>2:20</span> </div> <div class="cnn_mtlplnode "><div class="cnn_mtlplnimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.pn.cyber.marriage.hln?hpt=C2"><img src="../i2.cdn.turner.com/cnn/2011/images/02/10/married19da.jpg?hpt=C2" width="120" height="68" alt="Man accused of sex with cyber 'wife', 13" border="0"></a><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.pn.cyber.marriage.hln?hpt=C2"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div></div> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.pn.cyber.marriage.hln?hpt=C2">Man accused of sex with cyber 'wife', 13</a> <span>7:29</span> </div><!-- closing c2 wrapper --><div class="cnn_clear"></div></div><div class="cnn_pad18top"></div><!-- C2 Layout: 1a --><!--include virtual="/.element/ssi/sect/3.0/MAIN/elections/belowC2.html"--></div><div class="cnn_clearmt1t2"></div></div><div id="cnn_maintopprofile"><div class="cnn_shdcontent"> <!-include virtual="/editionssi/sect/3.0/MAIN/feedback.html" -->
-<div>
- <span id="medium_rectangle" class="_fwph">
- <form id="_fw_form_medium_rectangle" style="display:none">
- <input type="hidden" name="_fw_input_medium_rectangle" id="_fw_input_medium_rectangle" value="w=300&h=250&envp=g_js&sflg=-nrpl;">
- </form>
- <span id="_fw_container_medium_rectangle" class="_fwac">
- <!-- LEAVE THIS AREA EMPTY --><!-- ADSPACE: homepage/main/custom_rgt -->
-<!-- CALLOUT|http://ads.cnn.com/html.ng/site=cnn&cnn_pagetype=main&cnn_position=custom_rgt&cnn_rollup=homepage&page.allowcompete=yes&params.styles=fs|CALLOUT -->
-<div id="ad-125242" align="center" style="padding: 0; margin: 0; border: 0;"></div>
-<!--
-<script type="text/javascript">
-cnnad_createAd("125242","http://ads.cnn.com/html.ng/site=cnn&amp;cnn_pagetype=main&amp;cnn_position=custom_rgt&amp;cnn_rollup=homepage&amp;page.allowcompete=yes&amp;params.styles=fs","0","0");
-cnnad_registerSpace(125242,0,0);
-</script>
--->
- </span>
- </span>
-</div>
-
-<div class="cnn_adtitle"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/misc/advertisement.gif" width="58" height="5" alt="" border="0"></div>
- <div class="cnn_divline"></div>
-
-<div id="pmContainer">
-
- <!-- ### USER PANEL ### -->
- <div id="pmUserPanel">
- <img id="avatarImg" src="../i.cdn.turner.com/cnn/.element/img/3.0/personalization/35x35_generic_avatar.gif" alt="" height="35" width="35">
- <div id="pmLoggedIn" style="display: none;">
- <span id="pmUserName"></span>
- <span id="pmUserOpts"><a id="pmProfile" href="http://www.cnn.com/profile/?setTab=mysettings">settings</a></span>
- <div id="cnnUseFB" style="display:none"><a href="javascript:FB.login(fbSessionHandler);">Connect your CNN &amp; Facebook accounts</a></div>
- </div>
- <div id="pmLoggedOff">
- <span>Hi! <a href="javascript:CNN_handleOverlay('profile_signin_overlay')" >Log in</a> or <a href="javascript:CNN_handleOverlay('profile_signup_overlay')">sign up</a> to personalize!</span>
- </div>
- <div id="pmFBLoggedIn" style="display:none";>
- <span>Hi there!<div><a href="javascript:CNN_handleOverlay('profile_signin_overlay')" >Log in</a> | <a href="javascript:CNN_handleOverlay('profile_signup_overlay')">sign up</a></div></span>
- </div>
-
- </div>
-
-
- <!-- ### ACCORDION START ### -->
- <div id="pmSlidebox" class="accordion">
-
- <!-- startof "facebook activity" slide -->
- <!--[if lte IE 6]>
-<style>
-#cnn_maintopprofile #pmFacebook,
-#cnn_maintopprofile #pmFacebookToggle {
- display: none;
-}
-</style>
-<![endif]-->
-<div id="pmFacebookToggle" class="accordion-toggle">
- <span class="accTitle" id="pmFbTitle">Popular on Facebook</span>
- <span class="accArrow">&nbsp;</span>
-</div>
-<!-- startof "activity" slide -->
-<div id="pmFacebook" class="accordion-content">
-
- <div id="cnn_FBawOLY" class="cnn_dynone">
- <div class="cnn_FBawo1"><a href="javascript:CNN_FBpsnlzdOC();"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/personalization/close_bt.gif" width="11" height="11" alt="" border="0"></a></div>
- <div id="cnn_FBawOLYM" class="cnn_FBawo2"></div>
- </div>
-
- <div id="cnn_FBawDIM" class="cnn_dynone"></div>
-
- <div id="facebookUpsell" style="display:none;font-size: 9px; color: #666; background: #f4f4f4;">
- <p><a href="javascript: void(0);" onclick="FB.login(fbSessionHandler); return false;">Log in with Facebook</a> to see your friends' activity | <a href="javascript:CNN_FBpsnlzdOO();">What's this?</a></p>
- </div>
- <div id="facebookConnected" style="display:none;font-size: 9px; color: #666; background: #f4f4f4;">
- <p><a href="http://www.cnn.com/profile/?setTab=fbfriends">View more friends' activity</a> | <a href="javascript:CNN_FBpsnlzdOO();">What's this?</a></p>
- </div>
- <div id="pmTargetContainer">
- <div id="pmFacebookTab" class="pmOn">
-
- </div>
- <div id="pmActivityTab" class="pmOff">
- <div id="pmActList" class="makeScroll">
- <!-- class="makeScroll" -->
- <ul><li></li></ul>
- </div>
- <!-- scroll bar -->
- <div id="pmActListTrack">
- <div id="trackThumb">
- &nbsp;
- </div>
- </div>
- <!-- end scroll bar -->
- </div>
- </div>
-</div>
-<!-- endof "activity" slide -->
-
- <!-- end of "facebook" slide -->
-
- <!-- startof "newspulse" slide -->
- <div id="pmNewspulseToggle" class="accordion-toggle">
- <span class="accTitle">Newspulse</span>
- <span class="accArrow">&nbsp;</span>
-</div>
-<!-- startof "activity" slide -->
-<div id="pmNewspulse" class="accordion-content" style="height: 194px; display: none;">
- <!--div class="pmSectHead">
- <a id="nsNewsBtn" class="togOn"></a>
- <a id="nsActBtn" class="togOff"></a>
- </div-->
- <div id="pmTargetContainer">
- <div id="pmNewsPulseTab" class="pmOn">
- <h4 class="pmNplsSubhead">Most popular stories right now</h4>
- <ul>
- <li class="pmNsStory">
- <div class="pmNsHeadline"><a href="http://www.cnn.com/2011/US/02/10/california.einstein.granddaughter/index.html">Granddaughter wants share of Einstein profits</a></div>
- <div class="pmNsPopularity"><div style="width: 99.469%" class="pmNsPopImage"></div></div>
-</li>
-<li class="pmNsStory">
- <div class="pmNsHeadline"><a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.protests/index.html">Egypt's Mubarak refuses to stand down</a></div>
- <div class="pmNsPopularity"><div style="width: 59.177%" class="pmNsPopImage"></div></div>
-</li>
-<li class="pmNsStory">
- <div class="pmNsHeadline"><a href="http://www.cnn.com/2011/SHOWBIZ/Movies/02/09/iconic.movie.kisses.instyle/index.html">The 10 most iconic movie kisses</a></div>
- <div class="pmNsPopularity"><div style="width: 58.944%" class="pmNsPopImage"></div></div>
-</li>
-<li class="pmNsStory">
- <div class="pmNsHeadline"><a href="http://www.cnn.com/2011/TRAVEL/02/10/ryanair.packing.tips/index.html">Ryanair strikes back at 'mutiny' students</a></div>
- <div class="pmNsPopularity"><div style="width: 55.245%" class="pmNsPopImage"></div></div>
-</li>
-
- </ul>
- </div>
- <div id="pmActivityTab" class="pmOff">
- <div id="pmActList" class="makeScroll">
- <!-- class="makeScroll" -->
- <ul><li></li></ul>
- </div>
- <!-- scroll bar -->
- <div id="pmActListTrack">
- <div id="trackThumb">
- &nbsp;
- </div>
- </div>
- <!-- end scroll bar -->
- </div>
- </div>
- <div id="pmExploreNS">
- <a href="http://newspulse.cnn.com/">Explore the news with NewsPulse<span>&#187;</span></a>
- </div>
-</div>
-<!-- endof "activity" slide -->
-
- <!-- end of "newspulse" slide -->
-
- <!-- startof "weather" slide -->
- <div id="pmWeatherToggle" class="accordion-toggle">
- <script type="text/javascript">
- var weatherTitle = (location.pathname.indexOf('profile') > -1) ? 'News' : 'Weather &amp; News';
- </script>
- <span class="accTitle">Local&nbsp;
- <script type="text/javascript">document.write(weatherTitle);</script>
- </span>
- <span class="accArrow">&nbsp;</span>
- </div>
- <div id="pmWeather" class="accordion-content" style="height: 194px; display: none;">
-
- <!-- Default Search Box -->
- <div id="cnnGetLocalBox" style="display: none;">
- <form onsubmit="MainLocalObj.Weather.checkInput('weather',this.inputField.value);return false;" id="pmWeatherLocation" name="localAllLookupForm" action="#">
- <fieldset>
- <div class="pmWrapper">
- <label for="weatherLoc">
- Edit location
- </label>
- <input type="text" id="weatherLoc" class="pmWeatherHollow" name="inputField" value="Enter a U.S. Zip or Intl city" onfocus="MainLocalObj.Weather.inputFocus(this);" onblur="MainLocalObj.Weather.inputBlur(this);"><a id="weatherLocBtn" href="javascript:MainLocalObj.Weather.checkInput('weather', document.localAllLookupForm.inputField.value);"><span>Go</span></a>
- </div>
- <div id="pmLocResultsContainer"></div>
- </fieldset>
- </form>
- </div>
- <!-- End Default Search Box -->
-
- <!-- Weather Content -->
- <div class="pmWrapper" id="pmWeatherTab"></div>
- <!-- End Weather Content -->
-
- <!-- News Details -->
- <div id="pmWeatherHeadlines">
- <ul id="pmWeatherHeadlinesList"><li></li></ul>
- <div id="pmInfoSource"></div>
- </div>
- <!-- End News Details -->
- </div>
-
- <!-- endof "weather" slide -->
-
- <!-- startof "sports" slide -->
- <div id="pmSportsToggle" class="accordion-toggle">
- <span class="accTitle">Sports</span>
- <span class="accArrow">&nbsp;</span>
- </div>
- <!-- startof "sports" slide -->
- <div id="pmSports" class="accordion-content" style="height: 194px; display: none;">
- <div id="sectHead">
- See the latest Scoreboard for:
- </div>
- <div id="sportBtns" class="pmWrapper"></div>
- <div id="pmScoreHead" class="pmWrapper">
- <p class="pmScoreboard">
- Scoreboard
- </p>
- <p class="pmChooseTeam">
- <a id="pmSportsChooseBtn" class="pmOverlayChooseEdit"></a>&nbsp;my <span class="leagueChoose"></span>&nbsp;teams
- </p>
- </div>
- <ul id="pmScores"><li></li></ul>
- <ul id="pmNoGames">
- <li class="top">
- <p>No scheduled games in the next 24 hours. Catch up on the latest headlines:</p>
- </li>
- <li id="pmSportsHeadlines">
- <ul id="pmNoGamesHeadlines"><li></li></ul>
- </li>
- </ul>
- <div id="pmSportsMore">
- <p>
- <a id="moreLeagueLink" href="#"></a>&nbsp;&nbsp;|&nbsp;&nbsp;<a href="http://si.com/?xid=cnnwidget"><span>SI.com</span></a>
- </p>
- </div>
-
- <div id="choseTeamsOverlay" style="display: none;"></div>
- <div id="choseTeamsOverlayBox" style="display: none;">
- <a href="javascript:MainLocalObj.Sports.toggleSportsOverlay();" class="pmOverlayClose">&nbsp;</a>
- <div class="pmWrapper">
- <div class="pmOverlayHeader">
- <p class="pmOverlayTitle">
- <span class="pmOverlayChooseEdit"></span>&nbsp;my&nbsp;<span class="leagueChoose"></span>&nbsp;teams:
- </p>
- </div>
- <div class="pmTeamList">
- <div class="pmTeam">
- <div class="pmTeamNumTitle">First team</div>
- <div class="pmTeamInfo"></div>
- </div>
- <div class="pmTeam">
- <div class="pmTeamNumTitle">Second team</div>
- <div class="pmTeamInfo"></div>
- </div>
- </div>
- <div class="pmSaveTeams">
- <a class="pmCancelSave" href="javascript:MainLocalObj.Sports.toggleSportsOverlay();">No, cancel</a>
- <a class="pmSaveBtn" href="#"><span>Save</span></a>
- </div>
- </div>
- </div>
-
- </div>
-
- <!-- endof "sports" slide -->
-
- <!-- startof "market" slide -->
- <div id="pmMarketsToggle" class="accordion-toggle">
- <span class="accTitle">Markets</span>
- <span class="accArrow">&nbsp;</span>
- </div>
- <div id="pmMarkets" class="accordion-content" style="height: 194px; display: none;">
-
-<div class="pmSectHead">
- <span id="marketStatus">Markets Closed</span>
- <a id="cnnMoney" href="http://money.cnn.com/data/markets">CNNMoney.com &#187;</a>
-</div>
-<div id="pmIndecies">
- <div class="pmWrapper">
- <div class="marketTime">
- <span>Updated&nbsp;</span>
- <span id="indexUpdated">5:16 pm ET Feb 10</span>
- </div>
- <a id="myQuotesBtn" class="toggle togOff"><span>My quotes</span></a>
-
- <a id="defIndexBtn" class="toggle togOn"><span>Indexes</span></a>
- </div>
- <div id="pmDefaultIndecies" class="tab pmOn">
- <ul>
-
-
- <li class="market-0">
- <div class="marketInfo-left"> <!-- left side -->
- <span class="marketName">
- <a href="http://money.cnn.com/data/markets/dow/">Dow</a>
- </span>
- <span class="marketIndex">12,229.29</span>
- </div>
- <div class="marketNums-right down">
- <div class="percentDiff">
- <span>(<span class="plusMinus">-</span>0.09 &#37;)</span>
- </div>
-
- <div class="numDiff">
- <span><span class="plusMinus">-</span>10.6</span>
- </div>
- </div>
- </li>
-
- <li class="market-1">
- <div class="marketInfo-left"> <!-- left side -->
- <span class="marketName">
- <a href="http://money.cnn.com/data/markets/nasdaq/">Nasdaq</a>
- </span>
- <span class="marketIndex">2,790.45</span>
- </div>
- <div class="marketNums-right up">
- <div class="percentDiff">
- <span>(<span class="plusMinus">+</span>0.05 &#37;)</span>
- </div>
-
- <div class="numDiff">
- <span><span class="plusMinus">+</span>1.38</span>
- </div>
- </div>
- </li>
-
- <li class="market-2">
- <div class="marketInfo-left"> <!-- left side -->
- <span class="marketName">
- <a href="http://money.cnn.com/data/markets/sandp/">S&amp;P</a>
- </span>
- <span class="marketIndex">1,321.87</span>
- </div>
- <div class="marketNums-right up">
- <div class="percentDiff">
- <span>(<span class="plusMinus">+</span>0.07 &#37;)</span>
- </div>
-
- <div class="numDiff">
- <span><span class="plusMinus">+</span>0.99</span>
- </div>
- </div>
- </li>
-
- </ul>
- </div>
- <div id="pmMyQuotes" class="tab pmOff"></div>
-
-
- <form id="symbolSearch" action="#" onsubmit="MainLocalObj.Markets.lookupStockSymbol(); return false">
- <div id="searchLine">
- <input name="symb" id="searchQuote" type="text" class="formInput" AUTOCOMPLETE="OFF" /><a id="getSymbolBtn" href="javascript: MainLocalObj.Markets.lookupStockSymbol();"><span>Get Quotes</span></a>
- <div id="myContainer"></div>
- </div>
- </form>
- <div id="moneySponsor"></div>
-
-</div>
- </div>
-
- <!-- end of "market" slide -->
-
- </div>
- <!-- ### ACCORDION END ### -->
-
-</div>
-<!--include virtual="/editionssi/sect/3.0/MAIN/feedback.html" -->
-
-<!-- shared javascript -->
-<!--
-<script type="text/javascript">
- Event.observe(window, 'load', function() {
- MainLocalObj.init();
- });
-</script>
--->
-
-
-
-<div class="cnn_pad18top"></div>
- </div></div><!-- /cnn_maintopprofile --><div class="cnn_clear"></div><div class="cnn_shdcafooter"></div></div> <div class="cnn_pad2top cnn_shdspc"></div><div class="cnn_contentarea cnn_filterareabox"><div class="cnn_sdbx"><div class="cnn_sdbx1"><div class="cnn_sdbx2"><div class="cnn_sdbx3"><div class="cnn_sdbx4"><div class="cnn_sdbx5"><div class="cnn_sdbxcntnt"><div style="width:980px;"><div class="cnn_fabheader"><div class="cnn_fabh1">From our shows</div><div id="cnnGalleryTabs" class="cnn_fabh2"></div><div class="cnn_clear"></div></div><div id="cnn_fabcontent" class="cnn_fabcontentarea"><div id="cnn_fabcprev" class="cnn_fabcprv_off"></div><div class="cnn_fabcaholder"><div class="cnn_fabcawindow"><div class="cnn_fabcaslide"><div id="cnn_GalleryViewPort"><div id="cnn_GallerySliderContainer"><div class="cnn_fabcaslab"> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/crime/2011/02/10/ng.americas.missing.kleeschulte.hln?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="America's Missing: Scott Kleeschulte, 9" border="0" style="background:transparent url('img/ng.americas.missing.kleeschulte.hln.120x68.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/crime/2011/02/10/ng.americas.missing.kleeschulte.hln?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/crime/2011/02/10/ng.americas.missing.kleeschulte.hln?hpt=Mid"><span>America's Missing: Scott Kleeschulte, 9</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.am.intv.holmes.torres.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="American Morning: Dara ready for 2012" border="0" style="background:transparent url('img/exp.am.intv.holmes.torres.cnn.120x68.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.am.intv.holmes.torres.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.am.intv.holmes.torres.cnn?hpt=Mid"><span>American Morning: Dara ready for 2012</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/health/2011/02/10/nr.vets.hiv.exposure.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="Newsroom: Vets exposed to HIV" border="0" style="background:transparent url('img/dentist.jpg?hpt=Mid') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/health/2011/02/10/nr.vets.hiv.exposure.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/health/2011/02/10/nr.vets.hiv.exposure.cnn?hpt=Mid"><span>Newsroom: Vets exposed to HIV</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/showbiz/2011/02/09/behar.internet.porn.hln?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="Behar: Porn makes men 'fake it'?" border="0" style="background:transparent url('img/porn.jpg?hpt=Mid') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/showbiz/2011/02/09/behar.internet.porn.hln?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/showbiz/2011/02/09/behar.internet.porn.hln?hpt=Mid"><span>Behar: Porn makes men 'fake it'?</span></a></div></div></div><div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg" id="gallery_0_ad_0"></div></div><div id="gallery_0_ad_0_adgif" class="cnn_fabcatad"></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.tsr.todd.egypt.constitution.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="Situation Room: Egypt constitution" border="0" style="background:transparent url('img/exp.tsr.todd.egypt.constitution.cnn.120x68.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.tsr.todd.egypt.constitution.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.tsr.todd.egypt.constitution.cnn?hpt=Mid"><span>Situation Room: Egypt constitution</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.nr.lee.new.midlife.crisis.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="Newsroom: Rep. Lee apologizes" border="0" style="background:transparent url('img/exp.nr.lee.new.midlife.crisis.cnn.120x68.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.nr.lee.new.midlife.crisis.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.nr.lee.new.midlife.crisis.cnn?hpt=Mid"><span>Newsroom: Rep. Lee apologizes</span></a></div></div></div></div> <div class="cnn_fabcaslab"> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.am.intv.chetry.jordan.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="American Morning: Coach gives kidney" border="0" style="background:transparent url('img/exp.am.intv.chetry.jordan.cnn.120x68.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.am.intv.chetry.jordan.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/10/exp.am.intv.chetry.jordan.cnn?hpt=Mid"><span>American Morning: Coach gives kidney</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.ac.muslim.brotherhood.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="90" alt="AC360: The Muslim Brotherhood" border="0" style="background:transparent url('img/exp.ac.muslim.brotherhood.cnn.120x90.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.ac.muslim.brotherhood.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.ac.muslim.brotherhood.cnn?hpt=Mid"><span>AC360: The Muslim Brotherhood</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.piers.trump.on.women.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="Piers Morgan: Trump on women" border="0" style="background:transparent url('img/tzvids.trump.piers.set.2.jpg?hpt=Mid') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.piers.trump.on.women.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.piers.trump.on.women.cnn?hpt=Mid"><span>Piers Morgan: Trump on women</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/world/2011/02/09/bs.farc.release.hostage.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="BackStory: FARC releases hostage" border="0" style="background:transparent url('img/bs.farc.release.hostage.cnn.120x68.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/world/2011/02/09/bs.farc.release.hostage.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/world/2011/02/09/bs.farc.release.hostage.cnn?hpt=Mid"><span>BackStory: FARC releases hostage</span></a></div></div></div><div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg" id="gallery_0_ad_1"></div></div><div id="gallery_0_ad_1_adgif" class="cnn_fabcatad"></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/crime/2011/02/09/ng.americas.missing.skelton.hln?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="America's Missing: Skelton brothers" border="0" style="background:transparent url('img/ng.americas.missing.skelton.hln.120x68.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/crime/2011/02/09/ng.americas.missing.skelton.hln?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/crime/2011/02/09/ng.americas.missing.skelton.hln?hpt=Mid"><span>America's Missing: Skelton brothers</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/showbiz/2011/02/09/behar.brian.dunkleman.int.hln?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="Behar: 'Idol's' one season co-host" border="0" style="background:transparent url('img/behar.brian.dunkleman.int.hln.120x68.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/showbiz/2011/02/09/behar.brian.dunkleman.int.hln?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/showbiz/2011/02/09/behar.brian.dunkleman.int.hln?hpt=Mid"><span>Behar: 'Idol's' one season co-host</span></a></div></div></div></div> <div class="cnn_fabcaslab"> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.jk.christopher.lee.resigns.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="JKUSA: 'Craigslist' scandal" border="0" style="background:transparent url('img/exp.jk.christopher.lee.resigns.cnn.120x68.jpg') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.jk.christopher.lee.resigns.cnn?hpt=Mid"><img border="0" src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.jk.christopher.lee.resigns.cnn?hpt=Mid"><span>JKUSA: 'Craigslist' scandal</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/world/2011/02/08/tsr.blitzer.korea.preview.tues.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="Situation Room: Inside North Korea" border="0" style="background:transparent url('img/blitzer.new.120x68.jpg?hpt=Mid') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/world/2011/02/08/tsr.blitzer.korea.preview.tues.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/world/2011/02/08/tsr.blitzer.korea.preview.tues.cnn?hpt=Mid"><span>Situation Room: Inside North Korea</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.cho.bath.salt.crackdown.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="American Morning: 'Bath salt' drugs" border="0" style="background:transparent url('img/metrics.gif') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.cho.bath.salt.crackdown.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.cho.bath.salt.crackdown.cnn?hpt=Mid"><span>American Morning: 'Bath salt' drugs</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.jvm.relabel.rape.victims.hln?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="Issues: Rape victims relabeled?" border="0" style="background:transparent url('img/rapelabel120x68.jpg?hpt=Mid') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.jvm.relabel.rape.victims.hln?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.jvm.relabel.rape.victims.hln?hpt=Mid"><span>Issues: Rape victims relabeled?</span></a></div></div></div><div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg" id="gallery_0_ad_2"></div></div><div id="gallery_0_ad_2_adgif" class="cnn_fabcatad"></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/living/2011/02/09/howard.movie.rental.mess.hln?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="Clark Howard: Movie rental mess" border="0" style="background:transparent url('img/rental120x68.jpg?hpt=Mid') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/living/2011/02/09/howard.movie.rental.mess.hln?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/living/2011/02/09/howard.movie.rental.mess.hln?hpt=Mid"><span>Clark Howard: Movie rental mess</span></a></div></div></div> <div class="cnn_fabcatz"><div class="cnn_relpostn"><div class="cnn_fabcatimg"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.am.hippo.chase.cnn?hpt=Mid"><img src="../i2.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="120" height="68" alt="American Morning: Hippo chases boat" border="0" style="background:transparent url('img/metrics.gif') 0 0 no-repeat;display:none;" class="cnnContentImg"></a></div><div class="cnn_vidplyb38x38"><a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.am.hippo.chase.cnn?hpt=Mid"><img border="0" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif"/></a></div><div class="cnn_fabcattxt"> <a href="http://www.cnn.com/video/#/video/bestoftv/2011/02/09/exp.am.hippo.chase.cnn?hpt=Mid"><span>American Morning: Hippo chases boat</span></a></div></div></div></div> </div></div></div></div></div><div id="cnn_fabcnext" class="cnn_fabcnxt"></div><div class="cnn_clear"></div></div></div></div></div></div></div></div></div></div></div> <script type="text/javascript">
- var CNN_gallery_0_ad_0 = "/cnn_adspaces/3.0/homepage/main/bot1.120x90.ad"; //path to ad file
- var CNN_gallery_0_ad_1 = "/cnn_adspaces/3.0/homepage/main/bot2.120x90.ad"; //path to ad file
- var CNN_gallery_0_ad_2 = "/cnn_adspaces/3.0/homepage/main/bot3.120x90.ad"; //path to ad file
-
- var CNN_gallery_1_ad_0 = "/cnn_adspaces/3.0/homepage/main/bot4.120x90.ad"; //path to ad file
- var CNN_gallery_1_ad_1 = "/cnn_adspaces/3.0/homepage/main/bot5.120x90.ad"; //path to ad file
- var CNN_gallery_1_ad_2 = "/cnn_adspaces/3.0/homepage/main/bot6.120x90.ad"; //path to ad file
-
- var CNN_gallery_2_ad_0 = "/cnn_adspaces/3.0/homepage/main/bot7.120x90.ad"; //path to ad file
- var CNN_gallery_2_ad_1 = "/cnn_adspaces/3.0/homepage/main/bot8.120x90.ad"; //path to ad file
- var CNN_gallery_2_ad_2 = "/cnn_adspaces/3.0/homepage/main/bot9.120x90.ad"; //path to ad file
-
- if ( $('cnn_GallerySliderContainer') ) {
- cnn_SectionGallery = new cnn_GallerySlider();
- }
-
-</script>
- <div class="cnn_pad2top cnn_shdspc"></div>
-<div class="cnn_contentarea cnn_shdsectbin">
- <div class="cnn_shdcaheader"></div>
-
-<div class="cnn_sectbin1">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.cnn.com/US/?hpt=Sbin">U.S.</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.cnn.com/2011/US/02/10/pennsylvania.explosion/index.html?hpt=Sbin">4 dead in Pennsylvania explosion</a></li><li><a href="http://www.cnn.com/2011/US/02/10/winter.storm/index.html?hpt=Sbin">Snowstorm sweeps across Southeast</a></li><li><a href="http://ireport.cnn.com/docs/DOC-553055?hpt=Sbin">Salt, tarp, roll up that snow</a></li><li><a href="http://ireport.cnn.com/docs/DOC-552979?hpt=Sbin">Running shirtless in Okla. snow</a></li><li><a href="http://www.cnn.com/2011/US/02/10/mobile.medical.marijuana/index.html?hpt=Sbin">Trailers bring legal pot to masses</a></li><li><a href="http://news.blogs.cnn.com/2011/02/10/man-vs-computer-a-gaming-history/?hpt=Sbin">Man vs. computer: gaming history</a></li><li><a href="http://ireport.cnn.com/docs/DOC-552830?hpt=Sbin">NYC potholes make 'lunar landscapes'</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.cnn.com/US/?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div>
-</div><!-- /cnn_sectbin1 -->
-
-
-<div class="cnn_sectbin2">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.cnn.com/WORLD/?hpt=Sbin">World</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.cnn.com/video/#/video/world/2011/02/10/holmes.understanding.mubarak.cnn?hpt=Sbin">Understanding Hosni Mubarak</a><a href="http://www.cnn.com/video/#/video/world/2011/02/10/holmes.understanding.mubarak.cnn?hpt=Sbin"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Video" border="0" width="16" height="10" class="cnn_vidicon"></a></li><li><a href="http://www.cnn.com/2011/WORLD/africa/02/10/egypt.protests/index.html?hpt=Sbin">Protesters enraged as Mubarak to stay</a></li><li><a href="http://www.cnn.com/2011/WORLD/asiapcf/02/10/pakistan.violence/index.html?hpt=Sbin">Teenage suicide bomber kills at least 27 in Pakistan</a></li><li><a href="http://www.cnn.com/2011/WORLD/europe/02/09/italy.berlusconi/index.html?hpt=Sbin">Prosecutors seek trial of Berlusconi</a></li><li><a href="http://www.cnn.com/video/#/video/world/2011/02/10/sot.mubarak.stepping.down.niletv?hpt=Sbin">Mubarak: I am not leaving Egypt</a><a href="http://www.cnn.com/video/#/video/world/2011/02/10/sot.mubarak.stepping.down.niletv?hpt=Sbin"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Video" border="0" width="16" height="10" class="cnn_vidicon"></a></li><li><a href="http://www.cnn.com/2011/WORLD/asiapcf/02/09/koreas.talks.end/index.html?hpt=Sbin">Korea talks stall as North walks out</a></li><li><a href="http://www.cnn.com/2011/WORLD/americas/02/09/colombia.hostages/index.html?hpt=Sbin">FARC frees first of 5 hostages</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.cnn.com/WORLD/?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div> <div class="cnn_adspc155x31"><!--include virtual="/cnn_adspaces/3.0/homepage/spon1.126x31.ad" --></div>
-
-</div><!-- /cnn_sectbin2 -->
-
-
-<div class="cnn_sectbin3">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://money.cnn.com/?hpt=Sbin">Business</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"> <li><a href="http://money.cnn.com/2011/02/10/markets/dollar/index.htm?source=cnn_bin">IMF calls for dollar alternative</a></li><li><a href="http://money.cnn.com/2011/02/10/real_estate/mortgages_break_barrier/index.htm?source=cnn_bin">Mortgage rates break 5%</a></li><li><a href="http://money.cnn.com/2011/02/09/smallbusiness/black_owned_businesses_census/index.htm?source=cnn_bin">Fast growth for black-owned firms</a></li><li><a href="http://money.cnn.com/2011/02/10/markets/markets_newyork/index.htm?source=cnn_bin">Stocks end mixed as Egypt, tech weigh on market</a></li><li><a href="http://money.cnn.com/video/technology/2011/02/10/t-tt-verizon-iphone-release.cnnmoney/?source=cnn_bin">Verizon iPhone shoppers at dawn</a><a href="http://money.cnn.com/video/technology/2011/02/10/t-tt-verizon-iphone-release.cnnmoney/?source=cnn_bin"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Video" border="0" width="16" height="10" class="cnn_vidicon"></a></li><li><a href="http://money.cnn.com/video/smallbusiness/2011/02/09/smb_tips_dinner_in_the_sky.cnnmoney/?source=cnn_bin">Romantic dinner 180 ft. in the air</a><a href="http://money.cnn.com/video/smallbusiness/2011/02/09/smb_tips_dinner_in_the_sky.cnnmoney/?source=cnn_bin"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Video" border="0" width="16" height="10" class="cnn_vidicon"></a></li><li><a href="http://money.cnn.com/galleries/2011/pf/1102/gallery.valentines_day_extreme_gifts?source=cnn_bin">$40,000 Valentine's Day dinner</a></li></ul></div><div class="cnn_mtpmore"><a href="http://money.cnn.com/?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div> <div class="cnn_adspc155x31">
-
-
-
-
-</div>
-
-</div><!-- /cnn_sectbin3 -->
-
-
-<div class="cnn_sectbin4 cnn_quickvotebin">
- <div class="cnn_sectbincntnt">
- <h4>Quick vote</h4><div class="cnn_clear"></div><div class="cnn_divline"></div><script> var qvq_count = 0; </script>
-
-<div id="cnnQV_Content_54919">
-<form id="qv_poll_54919" method="post" action="http://polls.cnn.com/poll" target="qv_iframe_54919">
-<INPUT TYPE=HIDDEN NAME="poll_id" VALUE="54919">
-<div class="cnn_qvbv3">
-<h5 id="cnnQV_quesTxT">Would you step in to stop a street crime?</h5>
-</div>
-<div class="cnn_qvbv4">
-<ul>
-<li><input type="radio" name="question_1" value="1" id="cnnPollA1"> <label for="cnnPollA1" id="cnnPoll_Q1L1">Yes</label></li>
-<script> qvq_count++; </script>
-<li><input type="radio" name="question_1" value="2" id="cnnPollA2"> <label for="cnnPollA2" id="cnnPoll_Q1L2">No</label></li>
-<script> qvq_count++; </script>
-</ul>
-<div class="cnn_clear"></div>
-</div>
-<!-- /end Question 1 -->
-<div class="cnn_qvbvote">
-<div class="cnn_qvbv1"><a href="javascript:qvSubmitVote_54919();" class="cnn_frmqvtbtn"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div>
-<div class="cnn_qvbv2"><span>or </span><a href="javascript:qvGetResults_54919();">view results</a></div>
-<div class="cnn_clear"></div>
-</div>
-</form>
-</div>
-<iframe width="1" height="1" frameborder="0" style="visibility:hidden;height:1px;border:0px" id="qv_iframe_54919" name="qv_iframe_54919"></iframe>
-<script type="text/javascript">
-
- var qv_submitted_54919;
- function qvSubmitVote_54919() { qv_submitted_54919 = 1;$('qv_poll_54919').submit();qvGetResults_54919(); }
- function qvGetResults_54919() {
-
- CSIManager.getInstance().call('http://www.cnn.com/\/www.cnn.com/POLLSERVER/results/54919.content.html', '', 'cnnQV_Content_54919', cnn_qvBPHTML_54919, true);
-
- }
-
- function cnn_qvBPHTML_54919(obj, id, configObj) {
-
- var qvtemp_arr = new Array();
- for(i=1;i <= qvq_count;i++) { qvtemp_arr[(i - 1)] = { 'a_txt' : $('cnnPoll_Q1L' + i).innerHTML, 'vote_c' : parseInt(obj.poll_values[i].vote_count), 'vote_p' : parseInt(obj.poll_values[i].vote_percent) }; }
- qvtemp_arr.sort(cnn_qvCompRes);
- return cnn_qvBResHTML(obj, qvtemp_arr, '54919');
-
- }
-
-
- function cnn_qvBResHTML(obj, qv_arr, poll_id) {
-
- var qvtemp_html = '<div class="cnn_qvbv14">';
- qvtemp_html += '<div class="cnn_qvbv16">';
- qvtemp_html += '<h5>' + $('cnnQV_quesTxT').innerHTML + '</h5>';
- if(obj.related_story) { qvtemp_html += '<div class="cnn_qvbv5"><a href="' + obj.related_story + '">Read Related Articles</a></div>'; }
- qvtemp_html += '</div>';
- qvtemp_html += '<div class="cnn_qvbv15">This is not a scientific poll</div>';
- qvtemp_html += '</div>';
-
- qvtemp_html += '<div class="cnn_qvbv6">';
-
-
- for(i=0;i < qv_arr.length;i++) {
-
- qvtemp_html += '<div class="cnn_qvbv7';
- if(i == 0) { qvtemp_html += ' cnn_qvbvactv'; }
- qvtemp_html += '">';
- qvtemp_html += '<div class="cnn_qvbv8">' + qv_arr[i].a_txt + '</div>';
- qvtemp_html += '<div class="cnn_qvbv9"><div style="';
- if(qv_arr[i].vote_p > 0) { qvtemp_html += 'width:' + qv_arr[i].vote_p + 'px;'; }
- qvtemp_html += '"></div></div>';
- qvtemp_html += '<div class="cnn_qvbv10">' + qv_arr[i].vote_p + '%</div>';
- qvtemp_html += '<div class="cnn_qvbv11">' + qv_arr[i].vote_c + '</div>';
- qvtemp_html += '<div class="cnn_clear"></div>';
- qvtemp_html += '</div>';
-
- }
-
- qvtemp_html += '<div class="cnn_qvbv12">Total votes: ' + obj.total_vote_count + '</div>';
- qvtemp_html += '<div class="cnn_qvbv13">This is not a scientific poll</div>';
-
- qvtemp_html += '</div>';
-
- return qvtemp_html;
-
- }
-
- function cnn_qvCompRes(a, b) { return b.vote_c - a.vote_c; }
-
-</script>
-
- </div>
- <div class="cnn_adspc155x31"><!-- ADSPACE: homepage/spon9.126x31 -->
-
-
-<!-- CALLOUT|http://ads.cnn.com/html.ng/site=cnn&cnn_pagetype=main&cnn_position=126x31_spon9&cnn_rollup=homepage&page.allowcompete=yes&params.styles=fs|CALLOUT -->
-<div id="ad-210633" align="center" style="padding: 0; margin: 0; border: 0;"></div>
-<script type="text/javascript">
-//cnnad_createAd("210633","http://ads.cnn.com/html.ng/site=cnn&amp;cnn_pagetype=main&amp;cnn_position=126x31_spon9&amp;cnn_rollup=homepage&amp;page.allowcompete=yes&amp;params.styles=fs","31","126");
-cnnad_registerSpace(210633,126,31);
-</script>
-
-</div>
-</div><!-- /cnn_quickvotebin -->
-
-<div class="cnn_clear"></div>
-
-<div class="cnn_shdcafooter"></div>
-
-<div class="cnn_pad2top cnn_shdspc"></div>
-
-<div class="cnn_shdcaheader"></div>
-
-<div class="cnn_sectbin1">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.cnn.com/POLITICS/?hpt=Sbin">Politics</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.cnn.com/2011/POLITICS/02/10/trump.presidency/index.html?hpt=Sbin">Trump 'seriously considering' 2012 </a></li><li><a href="http://www.cnn.com/2011/OPINION/02/10/rollins.republicans.2012/index.html?hpt=Sbin">Obama be beaten in 2012?</a></li><li><a href="http://stylenews.peoplestylewatch.com/2011/02/10/michelle-obama-35-dollar-hm-dress/" target="new">First Lady rocks $35 dress</a></li><li><a href="http://www.cnn.com/2011/HEALTH/02/09/giffords.recovery/index.html?hpt=Sbin">Aide: Giffords can speak</a></li><li><a href="http://www.cnn.com/2011/POLITICS/02/09/pol.virginia.webb/index.html?hpt=Sbin">Sen. Webb to step down in 2012</a></li><li><a href="http://www.cnn.com/2011/POLITICS/02/08/republican.contenders/index.html?hpt=Sbin">Familiar GOP faces coy on run</a></li><li><a href="http://politicalticker.blogs.cnn.com/2011/02/08/paul-strongly-considering-another-presidential-bid/?hpt=Sbin">Another go for Ron Paul?</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.cnn.com/POLITICS/?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div> <div class="cnn_adspc155x31"></div>
-
-</div><!-- /cnn_sectbin1 -->
-
-
-<div class="cnn_sectbin2">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.cnn.com/SHOWBIZ/?hpt=Sbin">Entertainment</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.cnn.com/2011/SHOWBIZ/Movies/02/10/cedar.rapids.ew/index.html?hpt=Sbin">Review: 'Cedar Rapids' is truly original </a></li><li><a href="http://www.cnn.com/2011/SHOWBIZ/Movies/02/09/iconic.movie.kisses.instyle/index.html?hpt=Sbin">The 10 most iconic movie kisses</a></li><li><a href="http://marquee.blogs.cnn.com/2011/02/10/did-i-say-that/?hpt=Sbin">Did I say that?! Regrettable quotables</a></li><li><a href="http://marquee.blogs.cnn.com/2011/02/10/look-like-lilo-for-575/?hpt=Sbin">Look like LiLo for $575</a></li><li><a href="http://marquee.blogs.cnn.com/2011/02/10/jennifer-hudson-to-oprah-ive-lost-80-pounds/?hpt=Sbin">Jennifer Hudson: I've lost 80 pounds!</a></li><li><a href="http://marquee.blogs.cnn.com/2011/02/10/%E2%80%98top-chef%E2%80%99-too-good-for-its-own-good/?hpt=Sbin">TV recap: 'Top Chef'</a></li><li><a href="http://www.cnn.com/2011/SHOWBIZ/Music/02/09/g.love.soundcheck/index.html?hpt=Sbin">Musician G. Love on 'Fixin' to Die'</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.cnn.com/SHOWBIZ/?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div>
-</div><!-- /cnn_sectbin2 -->
-
-
-<div class="cnn_sectbin3">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.cnn.com/HEALTH/?hpt=Sbin">Health</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.cnn.com/2011/HEALTH/02/10/ep.conception.truths.myths/index.html?hpt=Sbin">Conception facts and myths</a></li><li><a href="http://pagingdrgupta.blogs.cnn.com/2011/02/10/pediatrician-tris-to-practice-what-he-preaches/?hpt=Sbin">Pediatrician 'tris' to set example</a></li><li><a href="http://www.cnn.com/video/#/video/health/2011/02/10/nr.vets.hiv.exposure.cnn?hpt=Sbin">Vets possibly exposed to HIV</a><a href="http://www.cnn.com/video/#/video/health/2011/02/10/nr.vets.hiv.exposure.cnn?hpt=Sbin"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Video" border="0" width="16" height="10" class="cnn_vidicon"></a></li><li><a href="http://pagingdrgupta.blogs.cnn.com/2011/02/10/diet-soda-and-stroke-is-there-a-link/?hpt=Sbin">Are diet soda and stroke linked?</a></li><li><a href="http://www.cnn.com/video/#/video/health/2011/02/10/hm.healthy.conflict.cnn?hpt=Sbin">Ground rules for healthy conflict</a><a href="http://www.cnn.com/video/#/video/health/2011/02/10/hm.healthy.conflict.cnn?hpt=Sbin"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/icons/video_icon.gif" alt="Video" border="0" width="16" height="10" class="cnn_vidicon"></a></li><li><a href="http://pagingdrgupta.blogs.cnn.com/2011/02/10/valentines-day-skip-the-fancy-meal-and-go-straight-to-the-sex/?hpt=Sbin">Valentine's Day: Tips for great sex</a></li><li><a href="http://www.cnn.com/2011/HEALTH/02/09/surgery.spina.bifada/index.html?hpt=Sbin">Spina bifida surgery in the womb</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.cnn.com/HEALTH/?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div>
-</div><!-- /cnn_sectbin3 -->
-
-
-<div class="cnn_sectbin4 cnn_adbygbin">
-<!-- ADSPACE: homepage/main/adlinks.230x250 -->
-
-
-<!-- CALLOUT|http://ads.cnn.com/html.ng/site=cnn&cnn_pagetype=main&cnn_position=230x250_adlinks&cnn_rollup=homepage&page.allowcompete=yes&params.styles=fs|CALLOUT -->
-<div id="ad-825207" align="center" style="padding: 0; margin: 0; border: 0;"></div>
-<script type="text/javascript">
-//cnnad_createAd("825207","http://ads.cnn.com/html.ng/site=cnn&amp;cnn_pagetype=main&amp;cnn_position=230x250_adlinks&amp;cnn_rollup=homepage&amp;page.allowcompete=yes&amp;params.styles=fs","250","230");
-cnnad_registerSpace(825207,230,250);
-</script>
-
-
-
-
-
-
-
-
-
-
-
-
-
-</div><!-- /cnn_adbygbin -->
-
-<div class="cnn_clear"></div>
-
-<div class="cnn_shdcafooter"></div>
-
-<div class="cnn_pad2top cnn_shdspc"></div>
-
-<div class="cnn_shdcaheader"></div>
-
-<div class="cnn_sectbin1">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.cnn.com/TECH/?hpt=Sbin">Tech</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.cnn.com/2011/TECH/mobile/02/10/verizon.iphone/index.html?hpt=Sbin">Few lining up for Verizon iPhone</a></li><li><a href="http://www.cnn.com/2011/TECH/gaming.gadgets/02/10/guitar.hero.went.wrong.mashable/index.html?hpt=Sbin">Guitar Hero: What went wrong?</a></li><li><a href="http://www.cnn.com/2011/TECH/mobile/02/09/ipad.2.production.mashable/index.html?hpt=Sbin">Apple iPad 2 now in production</a></li><li><a href="http://www.cnn.com/2011/TECH/mobile/02/09/hp.webos/index.html?hpt=Sbin">HP unveils TouchPad tablet</a></li><li><a href="http://www.cnn.com/2011/CRIME/02/09/california.facebook.stalker/index.html?hpt=Sbin">A TRO for Facebook CEO</a></li><li><a href="http://www.cnn.com/2011/TECH/mobile/02/07/dual.band.iphone/index.html?hpt=Sbin">Apple to make 'universal' iPhone?</a></li><li><a href="http://www.cnn.com/2011/TECH/mobile/02/08/verizon.volte/index.html?hpt=Sbin">Verizon plans 4G calling service</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.cnn.com/TECH/?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div>
-</div><!-- /cnn_sectbin1 -->
-
-
-<div class="cnn_sectbin2">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.cnn.com/LIVING/?hpt=Sbin">Living and Eatocracy</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.cnn.com/2011/LIVING/02/09/ornaments.christmas.future/index.html?hpt=Sbin">Who plans your next Christmas?</a></li><li><a href="http://religion.blogs.cnn.com/2011/02/10/my-take-why-egypts-christians-are-excited-but-nervous/?hpt=Sbin">Egypt's Christians are hopeful </a></li><li><a href="http://eatocracy.cnn.com/2011/02/10/hello-my-name-is-chiggy/?hpt=Sbin">What's your Starbucks alter ego?</a></li><li><a href="http://eatocracy.cnn.com/2011/02/10/heston-blumenthal/?hpt=Sbin">A look inside The Fat Duck</a></li><li><a href="http://money.cnn.com/galleries/2011/pf/1102/gallery.valentines_day_extreme_gifts/?hpt=Sbin">Extreme Valentine's Day gifts</a></li><li><a href="http://www.kdvr.com/news/kdvr-voodoo-mends-a-broken-heart-this-valentines-day-20110209,0,4880651.story" target="new">Voodoo dolls help mend hearts</a>&nbsp;<span class="cnnWOOL">KDVR</span></li><li><a href="http://www.cnn.com/2011/LIVING/02/09/cop.to.teacher/index.html?hpt=Sbin">From cop to Buddhist teacher</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.cnn.com/LIVING/?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div>
-</div><!-- /cnn_sectbin2 -->
-
-
-<div class="cnn_sectbin3">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.cnn.com/JUSTICE/?hpt=Sbin">Justice</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.cnn.com/2011/CRIME/02/10/virginia.granddad.bandit/index.html?hpt=Sbin">'Granddad Bandit,' feds talking plea</a></li><li><a href="http://www.cnn.com/2011/CRIME/02/10/georgia.killings/index.html?hpt=Sbin">Brothers killed in attack on home</a></li><li><a href="http://www.cnn.com/2011/CRIME/02/09/pennsylvania.kensington.strangler/index.html?hpt=Sbin">DA to seek death in Philly attacks</a></li><li><a href="http://www.cnn.com/2011/CRIME/02/09/california.facebook.stalker/index.html?hpt=Sbin">A TRO for Facebook CEO</a></li><li><a href="http://www.cnn.com/2011/CRIME/02/08/california.bell.plea.offer/index.html?hpt=Sbin">No plea deals in city pay scandal</a></li><li><a href="http://www.cnn.com/2011/CRIME/02/09/missouri.death.busch/index.html?hpt=Sbin">Busch death ruled accidental</a></li><li><a href="http://www.cnn.com/2011/CRIME/02/09/pennsylvania.abortion.doctor/index.html?hpt=Sbin">No prelim for Philly abortion doc</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.cnn.com/JUSTICE/?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div>
-</div><!-- /cnn_sectbin3 -->
-
-<style>
-
-.cnn_hppolbreakn { margin:0 0 0 7px;width:217px; }
-.cnn_hppolbreakn1 { padding:13px 0 10px 0; }
-.cnn_hppolbreakn p { padding:7px 0 0 0;font-size:11px;line-height:14px; }
-
-</style>
-
-<div class="cnn_sectbin4 cnn_hppolbreakn">
-
-<div class="cnn_hppolbreakn1"><a href="http://www.cnn.com/profile/#my_alerts"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/sect/politics/political_alert_singup.gif" width="217" height="18" alt="" border="0"></a></div>
-
-<div class="cnn_divline"></div>
-
-<div><a href="http://www.cnn.com/profile/#my_alerts"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/sect/politics/217x103_graph.jpg" width="217" height="103" alt="" border="0"></a></div>
-
-<p><a href="http://www.cnn.com/profile/#my_alerts">Sign up</a> for breaking political news alerts from CNN and be the first to know when news happens inside the Beltway and across the country.</p>
-
-</div><!-- / -->
-
-<div class="cnn_clear"></div>
-
-<div class="cnn_shdcafooter"></div>
-
-<div class="cnn_pad2top cnn_shdspc"></div>
-
-<div class="cnn_shdcaheader"></div>
-
-<div class="cnn_sbalternate">
-
-<div class="cnn_sectbin1">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://sportsillustrated.cnn.com/?xid=cnnbin&amp;hpt=Sbin">Sports</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin">
-<li><a href="http://sportsillustrated.cnn.com/2011/writers/stewart_mandel/02/10/unc.duke/index.html?xid=cnnbin">Behind Duke's comeback over UNC</a></li>
-<li><a href="http://joeposnanski.si.com/2011/02/06/looking-at-hall-of-fame-pitchers/?xid=cnnbin">Here's what makes a HOF pitcher</a></li>
-<li><a href="http://sportsillustrated.cnn.com/2011/writers/jon_wertheim/02/08/rafael.nadal.justine.henin/index.html?xid=cnnbin">Should Nadal take year off to heal?</a></li>
-<li><a href="http://www.golf.com/golf/tours_news/article/0,28136,2047146,00.html?xid=cnnbin">Tiger still searching for old magic</a></li>
-<li><a href="http://sportsillustrated.cnn.com/2011/writers/andy_staples/02/08/2008-recruiting-classes/index.html?xid=cnnbin">Re-ranking 2008 recruiting class</a></li>
-<li><a href="http://www.fannation.com/truth_and_rumors/view/265871-now-playing-second-fiddle-dwyane-wade?xid=cnnbin">LeBron now overshadowing Wade</a></li>
-<li><a href="http://sportsillustrated.cnn.com/multimedia/photo_gallery/1102/duke.unc.classic.photos/content.1.html?xid=cnnbin">Classic pix: Duke-North Carolina</a></li>
-</ul>
-
-</div><div class="cnn_mtpmore"><a href="http://sportsillustrated.cnn.com/?xid=cnnbin&amp;hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div>
-</div><!-- /cnn_sectbin1 -->
-
-
-<div class="cnn_sectbin2">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.rollingstone.com/" target="new">Rolling Stone</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.rollingstone.com/culture/blogs/gear-up/guitar-hero-dead-20110209" target="new">'Guitar Hero' Dead</a></li> <li><a href="http://www.rollingstone.com/culture/news/inside-scientology-20110208" target="new">Inside Scientology</a></li> <li><a href="http://www.rollingstone.com/culture/blogs/gear-up/the-new-celebrity-music-economy-20110210" target="new">The New Celebrity Music Economy</a></li> <li><a href="http://www.rollingstone.com/politics/news/matt-taibbi-on-the-tea-party-20100928" target="new">The Truth about the Tea Party</a></li> <li><a href="http://www.rollingstone.com/music/photos/rolling-stone-readers-pick-best-drummers-of-all-time-20110208" target="new">Best Drummers of all Time</a></li> <li><a href="http://www.rollingstone.com/movies/news/jennifer-lawrence-enters-oscar-country-20110208" target="new">Jen Lawrence enters Oscar country</a></li> <li><a href="http://www.rollingstone.com/music/photos/lady-gagas-wild-looks-00040401" target="new">Lady Gaga's best looks</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.rollingstone.com/" class="cnn_mtpmorebtn" target="new"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div>
-</div><!-- /cnn_sectbin2 -->
-
-
-<div class="cnn_sectbin3">
- <div class="cnn_sectbincntnt"><div class="cnn_sectbincntnt2"> <h4><a href="http://www.cnn.com/TRAVEL?hpt=Sbin">Travel</a></h4><div class="cnn_clear"></div><div class="cnn_divline"></div> <ul class="cnn_bulletbin"><li><a href="http://www.cnn.com/2011/TRAVEL/02/10/romantic.destinations/index.html?hpt=Sbin">Best places for a Valentine proposal</a></li><li><a href="http://www.cnn.com/2011/TRAVEL/02/10/increase.airfare.abroad/index.html?hpt=Sbin">Travel abroad (without going broke)</a></li><li><a href="http://www.cnn.com/2011/TRAVEL/02/10/ryanair.packing.tips/index.html?hpt=Sbin">Ryanair offers tips to 'mutiny' students</a></li><li><a href="http://www.cnn.com/2011/TRAVEL/02/10/tsa.airport.screeners/index.html?hpt=Sbin">TSA chief 'willing to' fire screeners</a></li><li><a href="http://www.cnn.com/2011/US/02/09/district.of.columbia.person.cargo.hold/index.html?hpt=Sbin">Baggage handler locked in cargo hold</a></li><li><a href="http://www.cnn.com/2011/TRAVEL/02/09/egypt.tourism/index.html?hpt=Sbin">Long road ahead for Egypt tourism</a></li><li><a href="http://www.cnn.com/2011/TRAVEL/02/09/cockpit.view.videos.youtube/index.html?hpt=Sbin">Five unforgettable cockpit-view videos</a></li></ul></div><div class="cnn_mtpmore"><a href="http://www.cnn.com/TRAVEL?hpt=Sbin" class="cnn_mtpmorebtn"><img alt="More" src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div></div>
-</div><!-- /cnn_sectbin3 -->
-
-
-<div class="cnn_clear"></div>
-
-
-<div class="cnn_shd755pxfooter"></div>
-
-<div class="cnn_shd755px2header"></div>
-
-<div class="cnn_tvsectionbin">
- <div class="cnn_tsbnav">
- <form>
- <a href="http://www.cnn.com/Programs/?hpt=tv">CNN TV Schedule</a> | <a href="http://www.cnn.com/HLN/?hpt=tv">HLN</a> | <a href="javascript:CNN_openPopup('http://www.cnn.com/audio/radio/cnntv.html?hpt=tv','radioplayer','toolbar=no,location=no,directories=no,status=no,menubar=no,scrollbars=no,resizable=no,width=360,height=573');">Listen to CNN</a> |
- <select onchange="if (this.options[selectedIndex].value != '') location.href = this.options[selectedIndex].value;">
- <option value="/Programs/?hpt=tvdrop">TV Programs</option>
- <option value="" disabled="disabled" class="disabledProgram">----------------------------------------</option>
- <option value="/CNN/Programs/?hpt=tvdrop">Today's schedule</option>
- <option value="" disabled="disabled" class="disabledProgram">----- NEWS -----</option>
- <option value="/CNN/Programs/american.morning/?hpt=tvdrop">American Morning</option>
- <option value="/CNN/Programs/anderson.cooper.360/?hpt=tvdrop">Anderson Cooper 360</option>
- <option value="/CNN/Programs/cnn.newsroom/?hpt=tvdrop">CNN Newsroom</option>
- <option value="/CNN/Programs/presents/?hpt=tvdrop">CNN Presents</option>
- <option value="/CNN/Programs/prime.news/?hpt=tvdrop">Prime News</option>
- <option value="/CNN/Programs/morning.express/?hpt=tvdrop">Morning Express with Robin Meade</option>
- <option value="/CNN/Programs/siu/?hpt=tvdrop">Spec. Investigations Unit</option>
- <option value="/CNN/Programs/student.news/?hpt=tvdrop">CNN Student News</option>
- <option value="/CNN/Programs/showbiz.tonight/?hpt=tvdrop">Showbiz Tonight</option>
- <option value="/CNN/Programs/situation.room/?hpt=tvdrop">The Situation Room</option>
- <option value="/CNN/Programs/state.of.the.union/?hpt=tvdrop">State of the Union</option>
- <option value="/CNN/Programs/this.week.at.war/?hpt=tvdrop">This Week at War</option>
- <option value="" disabled="disabled" class="disabledProgram">----- INTERVIEW and DEBATE -----</option>
- <option value="/CNN/Programs/fareed.zakaria.gps/?hpt=tvdrop">Fareed Zakaria GPS</option>
- <option value="/CNN/Programs/issues.with.jane/?hpt=tvdrop">ISSUES with Jane Velez-Mitchell</option>
- <option value="/CNN/Programs/larry.king.live/?hpt=tvdrop">Larry King Live</option>
- <option value="/CNN/Programs/nancy.grace/?hpt=tvdrop">Nancy Grace</option>
- <option value="http://parkerspitzer.blogs.cnn.com/">Parker Spitzer</option>
- <option value="http://piersmorgan.blogs.cnn.com/">Piers Morgan Tonight</option>
- <option value="/CNN/Programs/reliable.sources/?hpt=tvdrop">Reliable Sources</option>
- <option value="http://joybehar.blogs.cnn.com/?hpt=tvdrop">The Joy Behar Show</option>
- <option value="" disabled="disabled" class="disabledProgram">----- NEWS for LIVING -----</option>
- <option value="/CNN/Programs/clark.howard/?hpt=tvdrop">Clark Howard</option>
- <option value="/CNN/Programs/sanjay.gupta.md/?hpt=tvdrop">Sanjay Gupta MD</option>
- <option value="/CNN/Programs/your.bottom.line/?hpt=tvdrop">Your Bottom Line</option>
- <option value="/CNN/Programs/yourmoney/?hpt=tvdrop">Your $$$$$</option>
- </select>
-
- </form>
- </div>
-
- <h4><a href="http://www.cnn.com/CNN/Programs/?hpt=tv">CNN TV</a></h4>
-
- <div class="cnn_clear"></div>
- <div class="cnn_divline"></div>
- <!-- /cnn TV Promo tool include -->
-<div class="cnn_tsbtz">
- <div class="cnn_tsbtimg"><a href="http://piersmorgan.blogs.cnn.com/?hpt=tv"><img src="../i2.cdn.turner.com/cnn/2010/images/12/17/piers.120x68.2.jpg" width="120" height="68" alt="" border="0"></a></div>
- <div class="cnn_tsbttxt">
- <div>
- <a href="http://piersmorgan.blogs.cnn.com/?hpt=tv"><b>The very latest from Egypt</b></a><br>
- <span>Tonight, 9 ET on CNN</span>
- </div>
- </div>
-</div>
-
-<div class="cnn_tsbtz">
- <div class="cnn_tsbtimg"><a href="http://ac360.blogs.cnn.com/?hpt=tv"><img src="../i2.cdn.turner.com/cnn/2009/images/10/27/ac120x68.jpg" width="120" height="68" alt="" border="0"></a></div>
- <div class="cnn_tsbttxt">
- <div><a href="http://ac360.blogs.cnn.com/?hpt=tv"><b>Egypt Uprising</b></a><br><span>Tonight, 10 ET on CNN</span></div>
- </div>
-</div>
-
-<div class="cnn_tsbtz">
- <div class="cnn_tsbtimg"><a href="http://amfix.blogs.cnn.com/?hpt=tv"><img src="../i2.cdn.turner.com/cnn/2010/images/12/29/kiran.120x68.jpg" width="120" height="68" alt="" border="0"></a></div>
- <div class="cnn_tsbttxt">
- <div><a href="http://amfix.blogs.cnn.com/?hpt=tv"><b>Egypt in Crisis</b></a><br><span>'AM,' 6 a.m. ET on CNN</span></div>
- </div>
-</div>
-<!-- /cnn TV Promo tool include -->
-
-
- <div class="cnn_clear"></div>
-
-</div><!-- /cnn_tvsectionbin -->
-
-
-</div><!-- /cnn_sbalternate -->
-
-<div class="cnn_sectbin4 cnn_sbhottopics">
-<!-- hot topic include -->
-<h4><a href="http://www.cnn.com/SPECIALS/?hpt=htopic">Hot topics</a></h4>
-<div class="cnn_clear"></div>
-<div class="cnn_divline"></div>
-
-
-<div class="cnn_sbhttopiccntr">
-
- <a href="http://topics.cnn.com/topics/egypt?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl1"><span>Egypt</span></a><div class="cnn_divline"></div> <a href="http://topics.cnn.com/topics/hosni_mubarak?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl2"><span>Hosni Mubarak</span></a><div class="cnn_divline"></div> <a href="http://topics.cnn.com/topics/winter_weather?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl3"><span>Winter Weather</span></a><div class="cnn_divline"></div> <a href="http://topics.cnn.com/topics/pakistan?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl4"><span>Pakistan</span></a><div class="cnn_divline"></div> <a href="http://topics.cnn.com/topics/mexico?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl5"><span>Mexico</span></a><div class="cnn_divline"></div> <a href="http://topics.cnn.com/topics/donald_trump?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl6"><span>Donald Trump</span></a><div class="cnn_divline"></div> <a href="http://topics.cnn.com/topics/sarah_palin?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl7"><span>Sarah Palin</span></a><div class="cnn_divline"></div> <a href="http://topics.cnn.com/topics/lindsay_lohan?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl8"><span>Lindsay Lohan</span></a><div class="cnn_divline"></div> <a href="http://www.cnn.com/SPECIALS/2011/academy.awards/?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl9"><span>Academy Awards</span></a><div class="cnn_divline"></div> <a href="http://topics.cnn.com/topics/miley_cyrus?hpt=htopic" class="cnn_sbhttopicl cnn_sbhttl10"><span>Miley Cyrus</span></a><div class="cnn_divline"></div>
-
-</div><!-- /cnn_sbhttopiccntr -->
-
-<div class="cnn_pad11top"></div>
-<div class="cnn_pad9top"></div>
-
-<div><a href="http://www.cnn.com/SPECIALS/?hpt=htopic" class="cnn_mtpvatbtn"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" border="0"></a></div>
-
-<div class="cnn_pad7top"></div>
-
-<div class="cnn_adspc155x31"><!-- ADSPACE: homepage/spon8.126x31 --></div>
-<div class="cnn_clear"></div>
-
-<!-- /hot topic include -->
-
-</div><!-- /cnn_sbhottopics -->
-
-
-<div class="cnn_shdmnfooter"></div>
-
-
-
-</div><!-- /cnn_shdsectbin -->
-
-<div class="cnn_pad2top cnn_shdspc"></div>
-
-<div class="cnn_contentarea cnn_filterareabox">
- <div class="cnn_sdbx">
- <div class="cnn_sdbx1">
- <div class="cnn_sdbx2">
- <div class="cnn_sdbx3">
- <div class="cnn_sdbx4">
- <div class="cnn_sdbx5">
- <div class="cnn_sdbxcntnt">
-
-<script type="text/javascript">
- cnnad_newTileIDGroup(new Array('300x100_bot1', '300x100_bot2', '300x100_bot3'));
-</script>
- <!-- ad snippet include -->
-<div class="cnn_adcntr300x100">
-
-<div class="cnn_adspc300x100">
-<div><!-- ADSPACE: homepage/main/bot1.300x100 -->
-
-
-<!-- CALLOUT|http://ads.cnn.com/html.ng/site=cnn&cnn_pagetype=main&cnn_position=300x100_bot1&cnn_rollup=homepage&page.allowcompete=no&params.styles=fs|CALLOUT -->
-<div id="ad-384575" align="center" style="padding: 0; margin: 0; border: 0;"></div>
-<script type="text/javascript">
-//cnnad_createAd("384575","http://ads.cnn.com/html.ng/site=cnn&amp;cnn_pagetype=main&amp;cnn_position=300x100_bot1&amp;cnn_rollup=homepage&amp;page.allowcompete=no&amp;params.styles=fs","100","300");
-cnnad_registerSpace(384575,300,100);
-</script>
-
-
-
-
-
-
-</div>
-<div class="cnn_adspctimg"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/misc/advertisement.gif" width="58" height="5" alt="" border="0"></div>
-</div>
-
-<div class="cnn_adspc300x100 cnn_adspcmid">
-<div><!-- ADSPACE: homepage/main/bot2.300x100 -->
-
-
-<!-- CALLOUT|http://ads.cnn.com/html.ng/site=cnn&cnn_pagetype=main&cnn_position=300x100_bot2&cnn_rollup=homepage&page.allowcompete=no&params.styles=fs|CALLOUT -->
-<div id="ad-965361" align="center" style="padding: 0; margin: 0; border: 0;"></div>
-<script type="text/javascript">
-//cnnad_createAd("965361","http://ads.cnn.com/html.ng/site=cnn&amp;cnn_pagetype=main&amp;cnn_position=300x100_bot2&amp;cnn_rollup=homepage&amp;page.allowcompete=no&amp;params.styles=fs","100","300");
-cnnad_registerSpace(965361,300,100);
-</script>
-
-
-
-
-
-
-</div>
-<div class="cnn_adspctimg"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/misc/advertisement.gif" width="58" height="5" alt="" border="0"></div>
-</div>
-
-<div class="cnn_adspc300x100">
-<div><!-- ADSPACE: homepage/main/bot3.300x100 -->
-
-
-<!-- CALLOUT|http://ads.cnn.com/html.ng/site=cnn&cnn_pagetype=main&cnn_position=300x100_bot3&cnn_rollup=homepage&page.allowcompete=no&params.styles=fs|CALLOUT -->
-<div id="ad-201357" align="center" style="padding: 0; margin: 0; border: 0;"></div>
-<script type="text/javascript">
-//cnnad_createAd("201357","http://ads.cnn.com/html.ng/site=cnn&amp;cnn_pagetype=main&amp;cnn_position=300x100_bot3&amp;cnn_rollup=homepage&amp;page.allowcompete=no&amp;params.styles=fs","100","300");
-cnnad_registerSpace(201357,300,100);
-</script>
-
-
-
-
-
-
-</div>
-<div class="cnn_adspctimg"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/misc/advertisement.gif" width="58" height="5" alt="" border="0"></div>
-</div>
-
-<div class="cnn_clear"></div>
-
-</div>
- <!--/ad snippet include -->
-
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
- </div>
-</div><!-- /cnn_contentarea -->
-
-
-<div id="cnn_ftrcntnt">
-<div id="cnn_ftrcntntinner" class="clearfix">
-<div class="cnn_ftrdivl1"></div>
-<div id="cnnLWPWeather" style="visibility: hidden">
- <p><b>Loading weather data ...</b></p>
-</div>
-
-<div id="ftr-search">
- <form method="get" action="http://www.cnn.com/search/" onsubmit="return cnnFootSearch(this);">
-
-
-<div class="ftr-search-datacntr">
-<div class="ftr-search-tfield"><input type="text" name="query" size="12" maxlength="40" value="" id="ftr-search-box"></div>
-<div class="ftr-search-sicon"><input type="image" src="img/search_btn_footer.gif" alt=""></div>
-</div>
-
- <input type="hidden" name="cnnFtrSrchType" id="cnnFtrSrchType" value="mixed">
-
-<div class="cnn_ftrggle"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/footer/pngs/footer_google.png" width="88" height="13" alt="" border="0" class="cnn_ie6png"></div>
-
- </form>
-</div>
-
-<div class="cnn_clear"></div>
-
-<div class="cnn_divline" style="background-color:#EBEBEB; margin-top:3px"></div>
-
-<div class="cnn_ftrnvlnks">
-
-<div><a href="http://www.cnn.com/">Home</a> | <a href="http://www.cnn.com/video/">Video</a> | <a href="http://newspulse.cnn.com/">NewsPulse</a> | <a href="http://www.cnn.com/US/">U.S.</a> | <a href="http://www.cnn.com/WORLD/">World</a> | <a href="http://www.cnn.com/POLITICS/">Politics</a> | <a href="http://www.cnn.com/JUSTICE/">Justice</a> | <a href="http://www.cnn.com/SHOWBIZ/">Entertainment</a> | <a href="http://www.cnn.com/TECH/">Tech</a> | <a href="http://www.cnn.com/HEALTH/">Health</a> | <a href="http://www.cnn.com/LIVING/">Living</a> | <a href="http://www.cnn.com/TRAVEL/">Travel</a> | <a href="http://www.cnn.com/OPINION/">Opinion</a> | <a href="http://ireport.cnn.com/?cnn=yes">iReport</a> | <a href="http://money.cnn.com/?cnn=yes">Money</a> | <a href="http://sportsillustrated.cnn.com/?xid=cnnfoot">Sports</a></div>
-
-<div><a href="http://www.cnn.com/tools/index.html" title="">Tools &amp; widgets</a> | <a href="http://www.cnn.com/services/rss/" title="">RSS</a> | <a href="http://www.cnn.com/services/podcasting/" title="">Podcasts</a> | <a href="http://www.cnn.com/exchange/blogs/index.html" title="">Blogs</a> | <a href="http://www.cnn.com/mobile/" title="">CNN mobile</a> | <a href="http://www.cnn.com/profile/" title="">My profile</a> | <a href="http://www.cnn.com/profile/" title="">E-mail alerts</a> <!--| <a href="/audio/radio/winmedia.html" title="">CNN Radio</a>--> | <a href="http://www.turnerstoreonline.com/" title="">CNN shop</a> | <a href="http://www.cnn.com/sitemap/" title="">Site map</a></div>
-
-</div>
-
-<div class="cnn_ftrdivl2"></div>
-
-<div class="cnn_ftrlnggcntr">
-<div><a href="http://www.cnn.com/espanol/" title="">CNN en ESPA&Ntilde;OL</a> | <a href="http://www.cnnchile.com/" title="">CNN Chile</a> | <a href="http://www.cnnexpansion.com/" title="">CNN Expansion</a> | <span class="cnn_ftrlarabic"><a href="http://arabic.cnn.com/" class="cnn_ie6png"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="32" height="11" alt="" border="0"></a></span> | <span class="cnn_ftrlkorean"><a href="http://cnn.joins.com/" class="cnn_ie6png"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="32" height="11" alt="" border="0"></a></span> | <span class="cnn_ftrljapan"><a href="http://www.cnn.co.jp/" class="cnn_ie6png"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="32" height="11" alt="" border="0"></a></span> | <span class="cnn_ftrlturkish"><a href="http://www.cnnturk.com/" class="cnn_ie6png"><img src="../i.cdn.turner.com/cnn/.element/img/3.0/1px.gif" width="32" height="11" alt="" border="0"></a></span></div>
-<div><a href="http://www.cnn.com/CNN/Programs/" title="">CNN TV</a> | <a href="http://www.cnn.com/HLN/" title="">HLN</a> | <a href="http://transcripts.cnn.com/TRANSCRIPTS/" title="">Transcripts</a></div>
-</div>
-
-<div class="cnn_ftrlgcpy">
-<div><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/footer/pngs/footer_cnn_logo.png" width="23" height="11" alt="" border="0" class="cnn_ie6png">&copy; 2011 Cable News Network. <a href="http://www.turner.com/" class="cnn_ftrtbslink" title="">Turner Broadcasting System, Inc.</a> All Rights Reserved.</div>
-<div class="cnn_ftrlgcpy1"><a href="http://www.cnn.com/interactive_legal.html" title="" rel="nofollow">Terms of service</a> | <a href="http://www.cnn.com/privacy.html" title="" rel="nofollow">Privacy guidelines</a> | <a href="http://www.cnn.com/services/ad.choices/" title="">Ad choices</a><img src="../i.cdn.turner.com/cnn/.element/img/3.0/global/misc/logo_ad_choices_footer.png" width="12" height="12" alt="" border="0" style="margin:0 0 0 4px;" class="cnn_ie6png"> | <a href="http://www.cnn.com/services/advertise/main.html" title="" rel="nofollow">Advertise with us</a> | <a href="http://www.cnn.com/about/" title="">About us</a> | <a href="http://www.cnn.com/feedback/" title="" rel="nofollow">Contact us</a> | <a href="http://www.turner.com/careers/" title="" rel="nofollow">Work for us</a> | <a href="http://www.cnn.com/help/" title="" rel="nofollow">Help</a></div>
-</div>
-
-<div class="cnn_clear"></div>
-</div><!-- /cnn_ftrcntntinner -->
-</div><!-- /cnn_ftrcntnt -->
-<!--
-<script type="text/javascript" src="js/weather.footer.js?20100802"></script>
--->
-<script type="text/javascript">
- var cnn_edtnswtchver = "www";
-</script>
-<script type="text/javascript" src="js/edition.vars.js"></script>
-
-<!--
-<script type="text/javascript">
-//if(ms_isLoggedIn()) {
- //CNN_setCookie('CNN_member', true, 854400, 'http://www.cnn.com/', document.domain);
-//}
-</script>
--->
-
-
-
-<!--
-<script src="js/s_code.js" type="text/javascript"></script>
--->
-<script src="js/jsmd.min.js" type="text/javascript"></script>
-<script language="JavaScript">
-<!--
-/************* DO NOT ALTER ANYTHING BELOW THIS LINE ! **************/
-if (typeof(cnnOmniSearchPage) == "undefined"){
-//var s_code=s.t();if(s_code)document.write(s_code)
-}
-//-->
-
-try{window.onload = cnnCheckElement; }catch(err){}
-</script>
-
-<script language="JavaScript"><!--
-if(navigator.appVersion.indexOf('MSIE')>=0)document.write(unescape('%3C')+'\!-'+'-')
-//--></script>
-<noscript><img
-src="img/metrics.gif"
-height="1" width="1" border="0" alt="" /></noscript><!--/DO NOT REMOVE/-->
-
-<!-- End SiteCatalyst code -->
-
-<script src="js/nodetag.js"></script>
-<script language="javascript" src="js/97.js"></script>
-
-
-<!-- ADSPACE: homepage/bot.1x1 -->
-
-
-<!-- CALLOUT|http://ads.cnn.com/html.ng/site=cnn&cnn_pagetype=main&cnn_position=1x1_bot&cnn_rollup=homepage&page.allowcompete=yes&params.styles=fs|CALLOUT -->
-<div id="ad-638159" align="center" style="padding: 0; margin: 0; border: 0;"></div>
-<script type="text/javascript">
-//cnnad_createAd("638159","http://ads.cnn.com/html.ng/site=cnn&amp;cnn_pagetype=main&amp;cnn_position=1x1_bot&amp;cnn_rollup=homepage&amp;page.allowcompete=yes&amp;params.styles=fs","1","1");
-cnnad_registerSpace(638159,1,1);
-</script>
-
-
-
-
-
-
-
-
-
-
-
-
-
-<script src="js/hpsectiontracking.js?date=20101102" type="text/javascript"></script>
-
-
-
-
-<span style="display: none" id="dn_category_slice" class="hslice">
-
- <div id="slice" class="hslice">
-
- <div class="entry-title">Top Stories</div>
-
- <div class="entry-content">CONTENT</div>
-
- <a rel="entry-content" href="http://ie8.smoothfusion.com/cnn/view.aspx" style="display: none;">LINKS</a>
-
- <a rel="bookmark" target="_blank" href="http://www.cnn.com/" style="display: none;" />
-
- <span class="ttl" style="display: none;">15</span>
-
- </div>
-
-</span>
-<img src="../i.cdn.turner.com/cnn/images/1.gif" alt="" id="TargetImage" name="TargetImage" width="1" height="1" onLoad="getAdHeadCookie(this)"><img src="../i.cdn.turner.com/cnn/images/1.gif" alt="" id="TargetImageDE" name="TargetImageDE" width="1" height="1" onLoad="getDEAdHeadCookie(this)">
-
-<script src="js/gw.js?csid=A09801"></script>
-<script language="JavaScript" type="text/javascript">
- //ADM
- cnnad_sendADMData();
-</script>
-
-<noscript>
- <style type="text/css">
- #cnnTxtCmpnt {display:block;}
- #cnnTabNav {display:none;}
- </style>
-</noscript>
-<div class="cnn_pad18top"></div></div></div><script type="text/javascript">Event.observe(window, 'load', function() {MainLocalObj.init(false);}); </script></body>
-<!-- Mirrored from www.cnn.com/ by HTTrack Website Copier/3.x [XR&CO'2010], Fri, 11 Feb 2011 04:23:50 GMT -->
-</html>
diff --git a/wlauto/workloads/bbench/patches/index_noinput.html b/wlauto/workloads/bbench/patches/index_noinput.html
deleted file mode 100644
index 072c9ad8..00000000
--- a/wlauto/workloads/bbench/patches/index_noinput.html
+++ /dev/null
@@ -1,56 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1.dtd">
-<!--
- Author: Anthony Gutierrez
--->
-
-<html xmlns="http://www.w3.org/1999/xhtml">
-
-<head>
- <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
- <title>University of Michigan - BBench 2.0</title>
- <script type="text/javascript" src="bbench.js"></script>
- <script type="text/javascript" src="forms.js"></script>
-</head>
-
-<body onload="startTest(2,2000,500)">
-<!--
-<body>
- <img src="mich_engin.png" width="35%"/>
- <h2>University of Michigan BBench version 2.0</h2>
-
- <form name="config_form">
- <b>Number of iterations:</b> <input type="text" name="numIterations" value="5" size="4" onchange="setIters();">
- <input type="button" value="-" name="iterPlusButton" onClick="document.config_form.numIterations.value=numItersDec(); return true;">
- <input type="button" value="+" name="iterMinusButton" onClick="document.config_form.numIterations.value=numItersInc(); return true;">
- (Number of times the page set is iterated through.)
- <br/><br/>
-
- <b>Scroll Delay (ms):</b> <input type="text" name="scrollDelay" value="0" size="8" onchange="setScrollDelay();">
- <input type="button" value="-" name="scrollDelayPlusButton" onClick="document.config_form.scrollDelay.value=scrollDelayDec(); return true;">
- <input type="button" value="+" name="scrollDelayMinusButton" onClick="document.config_form.scrollDelay.value=scrollDelayInc(); return true;">
- (Number of milliseconds to pause before scrolling.)
- <br/><br/>
-
- <b>Scroll Size:</b> <input type="text" name="scrollSize" value="500" size="8" onchange="setScrollSize();">
- <input type="button" value="-" name="scrollSizePlusButton" onClick="document.config_form.scrollSize.value=scrollSizeDec(); return true;">
- <input type="button" value="+" name="scrollSizeMinusButton" onClick="document.config_form.scrollSize.value=scrollSizeInc(); return true;">
- (Number of pixel to scroll.)
- <br/><br/>
- </form>
-
- <p>
- <b>Click on the start button to begin the benchmark.</b>
- </p>
- <button onclick="startTest(numIters, scrollDelay, scrollSize)">start</button>
-
- <p>
- If you use BBench in your work please cite our <a href="http://www.eecs.umich.edu/~atgutier/iiswc_2011.pdf">2011 IISWC paper</a>:<br/><br/>
-
- A. Gutierrez, R.G. Dreslinksi, T.F. Wenisch, T. Mudge, A. Saidi, C. Emmons, and N. Paver. Full-System Analysis and Characterization
- of Interactive Smartphone Applications. <i>IEEE International Symposium on Workload Characterization</i>, 2011.
- </p>
---!>
-</body>
-
-</html>
-
diff --git a/wlauto/workloads/bbench/patches/results.html b/wlauto/workloads/bbench/patches/results.html
deleted file mode 100644
index a7eb2e33..00000000
--- a/wlauto/workloads/bbench/patches/results.html
+++ /dev/null
@@ -1,158 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.1//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1.dtd">
-<!--
- Author: Anthony Gutierrez
--->
-
-<html xmlns="http://www.w3.org/1999/xhtml">
-
-<head>
- <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
- <title>University of Michigan - BBench 2.0</title>
- <script type="text/javascript" src="bbench.js"></script>
-
- <script type="text/javascript">
- var numTimesToExecute = getURLParams("iterations");
-
- function closeWindow() {
- window.open('','_self','');
- window.close();
- }
-
- function averageWarm(siteTimes) {
- var sum = 0;
-
- if (numTimesToExecute == 1)
- return siteTimes[0];
-
- for (i = 0; i < numTimesToExecute - 1; ++i)
- sum = eval(sum + siteTimes[i]);
-
- return (sum / (numTimesToExecute - 1));
- }
-
- function stdDevWarm(siteTimes) {
- var avg = averageWarm(siteTimes)
- var tmpArray = [];
-
- if (numTimesToExecute == 1)
- return 0;
-
- for (i = 0; i < numTimesToExecute - 1; ++i)
- tmpArray[i] = Math.pow((siteTimes[i] - avg), 2);
-
- avg = averageWarm(tmpArray);
-
- return Math.sqrt(avg);
- }
-
- function geoMean(avgTimes) {
- var prod = 1;
-
- for (i = 0; i < numWebsites; ++i)
- prod = eval(prod * avgTimes[i]);
-
- return Math.pow(prod, (1/numWebsites));
- }
- </script>
-</head>
-
-<body>
- <img src="mich_engin.png" width="35%"/>
- <h2>University of Michigan BBench version 2.0</h2>
- <h3>Results</h3>
-
- <script type="text/javascript">
- var bbSiteColdTimes = [];
- var bbSiteTimes = [];
- var bbSiteAvgRunTime = [];
- var bbSiteStdDev = [];
- var bbSiteCoeffVar = [];
- var bbSiteNames = ["amazon",
- "bbc",
- "cnn",
- "craigslist",
- "ebay",
-// "espn",
- "google",
- "msn",
- "slashdot",
- "twitter"];
-// "youtube"];
-
- var windowURL = new Object();
- var windowURL2 = new Object();
- windowURL.value = window.location.href;
- windowURL2.value = window.location.href;
-
- for (j = 0; j < numWebsites; ++j) {
-
- for (i = 0; i < numTimesToExecute; ++i) {
- var site_time = getAndRemoveURLParams(windowURL, bbSiteNames[j]) - 0;
- bbSiteTimes[i] = site_time;
- }
-
- bbSiteColdTimes[j] = bbSiteTimes[i - 1];
- bbSiteAvgRunTime[j] = averageWarm(bbSiteTimes);
- bbSiteStdDev[j] = stdDevWarm(bbSiteTimes);
- bbSiteCoeffVar[j] = (bbSiteStdDev[j] / bbSiteAvgRunTime[j]) * 100;
- }
-
- var bbSiteAvgGeoMean = geoMean(bbSiteAvgRunTime);
- </script>
-
- <table border="1">
- <script type="text/javascript">
- document.write("<tr align=\"right\"><td>Site Name</td><td>Cold Start Time</td><td>Avg Warm Page Rendering Time (ms)</td><td>Std Dev of Warm Runs</td><td>%Coeff Var of Warm Runs</td>");
- for (i = 0; i < numWebsites; ++i) {
- document.write("<tr align=\"right\">");
- document.write("<td>" + bbSiteNames[i] + "</td>");
- document.write("<td>" + bbSiteColdTimes[i] + "</td>");
- document.write("<td>" + bbSiteAvgRunTime[i].toFixed(2) + "</td>");
- document.write("<td>" + bbSiteStdDev[i].toFixed(2) + "</td>");
- document.write("<td>" + bbSiteCoeffVar[i].toFixed(2) + "</td>");
- document.write("</tr>");
- }
- </script>
- </table>
-
- <br />
-
- <table border="1">
- <script type="text/javascript">
- document.write("<tr><td>Geometric Mean of Average Warm Runs</td><td>" + bbSiteAvgGeoMean.toFixed(2) + "</td></tr>");
- console.log("metrics:" + "Mean = " + bbSiteAvgGeoMean.toFixed(2) + ":")
- </script>
- </table>
-
- <h3>CSV version of the table:</h3>
-
- <script type="text/javascript">
- document.write("Site Name,Cold Start Time, Avg Warm Page Rendering Time (ms),Std Dev of Warm Runs,%Coeff Var of Warm Runs<br />");
- for (i = 0; i < numWebsites; ++i) {
- document.write(bbSiteNames[i] + ",");
- document.write(bbSiteColdTimes[i] + ",");
- document.write(bbSiteAvgRunTime[i].toFixed(2) + ",");
- document.write(bbSiteStdDev[i].toFixed(2) + ",");
- document.write(bbSiteCoeffVar[i].toFixed(2) + "<br />");
- console.log("metrics:" + bbSiteNames[i] + "," + bbSiteColdTimes[i] + "," + bbSiteAvgRunTime[i].toFixed(2) + "," + bbSiteStdDev[i].toFixed(2) + "," + bbSiteCoeffVar[i].toFixed(2) + ":");
- }
-
- document.write("<h3>Individual Site Times:</h3>");
- for (j = 0; j < numWebsites; ++j) {
- for (i = 0; i < numTimesToExecute; ++i) {
- var site_time = getAndRemoveURLParams(windowURL2, bbSiteNames[j]) - 0;
- bbSiteTimes[i] = site_time;
- document.write(bbSiteNames[j] + " load time: " + site_time + "<br />");
- }
- document.write("<br />");
- }
- setTimeout("window.location.href='http://localhost:3030/'", 1);
- </script>
-
- <p>
- <b>Click the return button to go to the start page.</b>
- </p>
- <button onclick="window.location.href='index.html'">return</button>
-</body>
-
-</html>
diff --git a/wlauto/workloads/bbench/patches/twitter.html b/wlauto/workloads/bbench/patches/twitter.html
deleted file mode 100755
index 1df318da..00000000
--- a/wlauto/workloads/bbench/patches/twitter.html
+++ /dev/null
@@ -1,1215 +0,0 @@
-<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
-<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
- <!-- Added by HTTrack --><meta http-equiv="content-type" content="text/html;charset=utf-8"><!-- /Added by HTTrack -->
-<head>
-<!--
- added this for bbench
--->
-
-<script type="text/javascript" src="../../../bbench.js"></script>
-
-<script type="text/javascript">
- var bb_start_time = new Date().getTime();
- var newSiteIndex = getURLParams("siteIndex");
- var allParams = getAllParams();
-</script>
-
- <meta http-equiv="X-UA-Compatible" content="IE=edge">
-
- <script type="text/javascript">
-//<![CDATA[
-//window.location.replace('http://twitter.com/#!/THE_REAL_SHAQ');
-//]]>
-</script>
- <script type="text/javascript">
-//<![CDATA[
-(function(g){var c=g.location.href.split("#!");if(c[1]){g.location.replace(g.HBR = (c[0].replace(/\/*$/, "") + "/" + c[1].replace(/^\/*/, "")));}else return true})(window);
-//]]>
-</script>
- <script type="text/javascript" charset="utf-8">
- if (!twttr) {
- var twttr = {}
- }
-
- // Benchmarking load time.
- // twttr.timeTillReadyUnique = '1305156259-61696-28955';
- // twttr.timeTillReadyStart = new Date().getTime();
- </script>
-
- <script type="text/javascript">
-//<![CDATA[
-var page={};var onCondition=function(D,C,A,B){D=D;A=A?Math.min(A,5):5;B=B||100;if(D()){C()}else{if(A>1){setTimeout(function(){onCondition(D,C,A-1,B)},B)}}};
-//]]>
-</script>
- <meta content="text/html; charset=utf-8" http-equiv="Content-Type" />
-<meta content="en-us" http-equiv="Content-Language" />
-<meta content="THE_REAL_SHAQ (THE_REAL_SHAQ) is on Twitter. Sign up for Twitter to follow THE_REAL_SHAQ (THE_REAL_SHAQ) and get their latest updates" name="description" />
-<meta content="no" http-equiv="imagetoolbar" />
-<meta content="width = 780" name="viewport" />
-<meta content="4FTTxY4uvo0RZTMQqIyhh18HsepyJOctQ+XTOu1zsfE=" name="verify-v1" />
-<meta content="1" name="page" />
-<meta content="NOODP" name="robots" />
-<meta content="n" name="session-loggedin" />
-<meta content="THE_REAL_SHAQ" name="page-user-screen_name" />
- <title id="page_title">THE_REAL_SHAQ (THE_REAL_SHAQ) on Twitter</title>
- <!--
- <link href="../a1.twimg.com/a/1305153939/images/twitter_57.png" rel="apple-touch-icon" />
- -->
-<link href="oexchange.xrd" rel="http://oexchange.org/spec/0.8/rel/related-target" type="application/xrd+xml" />
-<link href="../a3.twimg.com/a/1305153939/images/favicon.ico" rel="shortcut icon" type="image/x-icon" />
- <link rel="alternate" href="statuses/user_timeline/17461978.rss" title="THE_REAL_SHAQ's Tweets" type="application/rss+xml" />
- <link rel="alternate" href="favorites/17461978.rss" title="THE_REAL_SHAQ's Favorites" type="application/rss+xml" />
-
-
- <link href="../a2.twimg.com/a/1305153939/stylesheets/twitter38c1.css?1304530346" media="screen" rel="stylesheet" type="text/css" />
-<link href="../a2.twimg.com/a/1305153939/stylesheets/geo38c1.css?1304530346" media="screen" rel="stylesheet" type="text/css" />
-<link href="../a3.twimg.com/a/1305153939/stylesheets/buttons_new38c1.css?1304530346" media="screen" rel="stylesheet" type="text/css" />
- <style type="text/css">
-
- body {
- background: #080203 url('img/Shaq_Twitpic_back_BW.jpg') fixed no-repeat;
-
-}
-
-body#show #content .meta a.screen-name,
-#content .shared-content .screen-name,
-#content .meta .byline a {
- color: #2FC2EF;
-}
-
-/* Link Color */
-a,
-#content tr.hentry:hover a,
-body#profile #content div.hentry:hover a,
-#side .stats a:hover span.stats_count,
-#side div.user_icon a:hover,
-li.verified-profile a:hover,
-#side .promotion .definition strong,
-p.list-numbers a:hover,
-#side div.user_icon a:hover span,
-#content .tabMenu li a,
-.translator-profile a:hover,
-#local_trend_locations li a,
-.modal-content .list-slug,
-.tweet-label a:hover,
-ol.statuses li.garuda-tweet:hover .actions-hover li span a,
-ol.statuses li.garuda-tweet .actions-hover li span a:hover {
- color: #2FC2EF;
-}
-
-body,
-ul#tabMenu li a, #side .section h1,
-#side .stat a,
-#side .stats a span.stats_count,
-#side div.section-header h1,
-#side div.user_icon a,
-#side div.user_icon a:hover,
-#side div.section-header h3.faq-header,
-ul.sidebar-menu li.active a,
-li.verified-profile a,
-#side .promotion a,
-body #content .list-header h2,
-p.list-numbers a,
-.bar h3 label,
-body.timeline #content h1,
-.list-header h2 a span,
-#content .tabMenu li.active a,
-body#direct_messages #content .tabMenu #inbox_tab a,
-body#inbox #content .tabMenu #inbox_tab a,
-body#sent #content .tabMenu #sent_tab a,
-body#direct_messages #content .tabMenu #inbox_tab a,
-body#retweets_by_others #content .tabMenu #retweets_by_others_tab a,
-body#retweets #content .tabMenu #retweets_tab a,
-body#retweeted_by_others #content .tabMenu #retweeted_by_others_tab a,
-body#retweeted_of_mine #content .tabMenu #retweeted_of_mine_tab a,
-.translator-profile a,
-#owners_lists h2 a {
- color: #666666;
-}
-
-.email-address-nag-banner {
- border-bottom: solid 1px #181A1E;
-}
-#side_base {
- border-left:1px solid #181A1E;
- background-color: #252429;
-}
-
-ul.sidebar-menu li.active a,
-ul.sidebar-menu li a:hover,
-#side div#custom_search.active,
-#side .promotion,
-.notify div {
- background-color: #3D3C41;
-}
-
-.list-header,
-.list-controls,
-ul.sidebar-list li.active a,
-ul.sidebar-list li a:hover,
-.list-header-inner {
- background-color: #252429 !important;
-}
-
-#side .actions,
-#side .promo,
-#design .side-section {
- border: 1px solid #181A1E;
-}
-
-#side div.section-header h3 {
- border-bottom: 1px solid #181A1E;
-}
-
-#side p.sidebar-location {
- border-bottom: 1px dotted #181A1E;
-}
-
-#side hr {
- background: #181A1E;
- color: #181A1E;
-}
-
-ul.sidebar-menu li.loading a {
- background: #3D3C41 url('http://a3.twimg.com/a/1305153939/images/spinner.gif') no-repeat 171px 0.5em !important;
-}
-
-#side .collapsible h2.sidebar-title {
- background: transparent url('http://a0.twimg.com/a/1305153939/images/toggle_up_light.png') no-repeat center right !important;
-}
-
-#side .collapsible.collapsed h2.sidebar-title {
- background: transparent url('http://a3.twimg.com/a/1305153939/images/toggle_down_light.png') no-repeat center right !important;
-}
-
-#side ul.lists-links li a em {
- background: url('http://a2.twimg.com/a/1305153939/images/arrow_right_light.png') no-repeat left top;
-}
-
-#side span.pipe {
- border-left:1px solid #181A1E;
-}
-
-#list_subscriptions span.view-all,
-#list_memberships span.view-all,
-#profile span.view-all,
-#profile_favorites span.view-all,
-#following span.view-all,
-#followers span.view-all {
- border-left: 0;
-}
-
-a.edit-list {
- border-right: 1px solid #181A1E !important;
-}
-
-
-
- </style>
- <link href="../a0.twimg.com/a/1305153939/stylesheets/following38c1.css?1304530346" media="screen, projection" rel="stylesheet" type="text/css" />
-
- </head>
-
-<!--
-<body class="account signin-island" id="profile">
--->
-<body class="account signin-island" id="profile" onload="siteTest(bb_site[newSiteIndex] + allParams, newSiteIndex, bb_start_time, 'twitter')">
-
-<div class="fixed-banners">
-
-
- </div>
- <script type="text/javascript">
-//<![CDATA[
-//document.domain = 'twitter.com';function fn(){document.write = "";window.top.location = window.self.location;setTimeout(function(){document.body.innerHTML = '';},0);window.self.onload = function(evt){document.body.innerHTML = '';};}if(window.top !== window.self){try{if(window.top.location.host){}else{fn();}}catch(e){fn();}}
-//]]>
-</script>
- <div id="dim-screen"></div>
- <ul id="accessibility" class="offscreen">
- <li><a href="#content" accesskey="0">Skip past navigation</a></li>
- <li>On a mobile phone? Check out <a href="http://m.twitter.com/">m.twitter.com</a>!</li>
- <li><a href="#footer" accesskey="2">Skip to navigation</a></li>
- <li><a href="#signin">Skip to sign in form</a></li>
-</ul>
-
-
-
-
-
-
- <div id="container" class="subpage">
- <span id="loader" style="display:none"><img alt="Loader" src="../a3.twimg.com/a/1305153939/images/loader.gif" /></span>
-
- <div class="clearfix" id="header">
- <a href="http://twitter.com/" title="Twitter / Home" accesskey="1" id="logo">
- <img alt="Twitter.com" src="../a2.twimg.com/a/1305153939/images/twitter_logo_header.png" />
- </a>
- <form method="post" id="sign_out_form" action="http://twitter.com/sessions/destroy" style="display:none;">
- <input name="authenticity_token" value="378c4e6964e0c855348d0cc3567c9d8b94040241" type="hidden"/>
- </form>
-
-
- <div id="signin_controls">
- <span id="have_an_account">
- Have an account?<a href="http://twitter.com/login" class="signin" tabindex="3"><span>Sign in</span></a></span>
- <div id="signin_menu" class="common-form standard-form offscreen">
-
- <form method="post" id="signin" action="https://twitter.com/sessions">
-
- <input id="authenticity_token" name="authenticity_token" type="hidden" value="378c4e6964e0c855348d0cc3567c9d8b94040241" /> <input id="return_to_ssl" name="return_to_ssl" type="hidden" value="false" />
- <input id="redirect_after_login" name="redirect_after_login" type="hidden" value="/THE_REAL_SHAQ" /> <p class="textbox">
- <label for="username">Username or email</label>
- <input type="text" id="username" name="session[username_or_email]" value="" title="username" tabindex="4"/>
- </p>
-
- <p class="textbox">
- <label for="password">Password</label>
- <input type="password" id="password" name="session[password]" value="" title="password" tabindex="5"/>
- </p>
-
- <p class="remember">
- <input type="submit" id="signin_submit" value="Sign in" tabindex="7"/>
- <input type="checkbox" id="remember" name="remember_me" value="1" tabindex="6"/>
- <label for="remember">Remember me</label>
- </p>
-
- <p class="forgot">
- <a href="http://twitter.com/account/resend_password" id="resend_password_link">Forgot password?</a>
- </p>
-
- <p class="forgot-username">
- <a href="http://twitter.com/account/resend_password" id="forgot_username_link" title="If you remember your password, try logging in with your email">Forgot username?</a>
- </p>
- <p class="complete">
- <a href="http://twitter.com/account/complete" id="account_complete_link">Already using Twitter on your phone?</a>
- </p>
- <input type="hidden" name="q" id="signin_q" value=""/>
- </form>
-</div>
-
-</div>
-
-
-
-
- </div>
-
-
- <div id="profilebox_outer" class="home_page_new_home_page">
- <div id="profilebox" class="clearfix">
- <div id="profiletext">
- <h1>
- <span>Get short, timely messages from THE_REAL_SHAQ.</span>
- </h1>
-
- <h2>Twitter is a rich source of instantly updated information. It's easy to stay updated on an incredibly wide variety of topics. <strong><a href='http://twitter.com/signup?follow=THE_REAL_SHAQ'>Join today</a></strong> and <strong>follow @THE_REAL_SHAQ</strong>.</h2>
- </div>
- <div id="profilebutton">
- <form action="http://twitter.com/signup" id="account_signup_form" method="get" name="account_signup_form"> <input id="follow" name="follow" type="hidden" value="THE_REAL_SHAQ" />
- <input class="profilesubmit" id="profile_submit" name="commit" type="submit" value="Sign Up &rsaquo;" />
- </form>
- <p id="profilebox-mobile">
- <span class="sms-follow-instructions">Get updates via SMS by texting <strong>follow THE_REAL_SHAQ</strong> to <strong>40404</strong> in the United States</span><br/>
- <a id="sms_codes_link">
- <span>Codes for other countries</span>
- </a>
- <div id="sms_codes">
- <table celspacing="0" celpadding="0">
- <thead>
- <tr class="title">
- <td colspan="3">Two-way (sending and receiving) short codes:</td>
- </tr>
- </thead>
- <tbody>
- <tr>
- <th class="sms-country">Country</th>
- <th class="sms-code">Code</th>
- <th class="sms-network">For customers of</th>
- </tr>
- <tr>
- <td class="sms-country">Australia</td>
- <td colspan="2" class="sms-code-network">
- <ul>
-
- <li>
- <span class="sms-code">0198089488</span>
- <span class="sms-network">Telstra</span>
- </li>
-
- </ul>
- </td>
-</tr><tr>
- <td class="sms-country">Canada</td>
- <td colspan="2" class="sms-code-network">
- <ul>
-
- <li>
- <span class="sms-code">21212</span>
- <span class="sms-network">(any)</span>
- </li>
-
- </ul>
- </td>
-</tr><tr>
- <td class="sms-country">United Kingdom</td>
- <td colspan="2" class="sms-code-network">
- <ul>
-
- <li>
- <span class="sms-code">86444</span>
- <span class="sms-network">Vodafone, Orange, 3, O2</span>
- </li>
-
- </ul>
- </td>
-</tr><tr>
- <td class="sms-country">Indonesia</td>
- <td colspan="2" class="sms-code-network">
- <ul>
-
- <li>
- <span class="sms-code">89887</span>
- <span class="sms-network">AXIS, 3, Telkomsel</span>
- </li>
-
- </ul>
- </td>
-</tr><tr>
- <td class="sms-country">Ireland</td>
- <td colspan="2" class="sms-code-network">
- <ul>
-
- <li>
- <span class="sms-code">51210</span>
- <span class="sms-network">O2</span>
- </li>
-
- </ul>
- </td>
-</tr><tr>
- <td class="sms-country">India</td>
- <td colspan="2" class="sms-code-network">
- <ul>
-
- <li>
- <span class="sms-code">53000</span>
- <span class="sms-network">Bharti Airtel, Videocon</span>
- </li>
-
- </ul>
- </td>
-</tr><tr>
- <td class="sms-country">Jordan</td>
- <td colspan="2" class="sms-code-network">
- <ul>
-
- <li>
- <span class="sms-code">90903</span>
- <span class="sms-network">Zain</span>
- </li>
-
- </ul>
- </td>
-</tr><tr>
- <td class="sms-country">New Zealand</td>
- <td colspan="2" class="sms-code-network">
- <ul>
-
- <li>
- <span class="sms-code">8987</span>
- <span class="sms-network">Vodafone, Telecom NZ</span>
- </li>
-
- </ul>
- </td>
-</tr><tr>
- <td class="sms-country">United States</td>
- <td colspan="2" class="sms-code-network">
- <ul>
-
- <li>
- <span class="sms-code">40404</span>
- <span class="sms-network">(any)</span>
- </li>
-
- </ul>
- </td>
-</tr>
- </tbody>
- </table>
-</div>
-
- </p>
- </div>
- </div>
- </div>
-
-
-
-
-
- <div class="content-bubble-arrow"></div>
-
-
-
- <table cellspacing="0" class="columns">
- <tbody>
- <tr>
- <td id="content" class="round-left column">
- <div class="wrapper">
-
-
-
-
-
-
-
-
- <div class="profile-user">
- <div id="user_17461978" class="user ">
- <h2 class="thumb clearfix">
- <a href="http://twitter.com/account/profile_image/THE_REAL_SHAQ?hreflang=en"><img alt="" border="0" height="73" id="profile-image" src="../a2.twimg.com/profile_images/282574177/Shaq_avatar_bigger.jpg" valign="middle" width="73" /></a>
- <div class="screen-name">THE_REAL_SHAQ</div>
- </h2>
- </div>
- </div>
-
-
-
- <div id="similar_to_followed"></div>
-
-<div class="section">
-
- <div id="timeline_heading" style="display: none;">
- <h1 id="heading"></h1>
- </div>
- <ol id='timeline' class='statuses'>
- <li class="hentry u-THE_REAL_SHAQ status latest-status" id="status_63631192263634946"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">I need all of my people to Watch my Nephew Cory Gunz New MTV Show &quot;Son Of A Gunz&quot; tonight at 11pm Eastern time. LET'S GO!!!</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/63631192263634946">
- <span class="published timestamp" data="http://twitter.com/{time:'Thu Apr 28 15:50:33 +0000 2011'}">8:50 AM Apr 28th</span></a>
- <span>via <a href="http://www.tweetdeck.com/" rel="nofollow">TweetDeck</a></span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_61934819298390016"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/Christleen" rel="nofollow">Christleen</a> not tru</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/61934819298390016">
- <span class="published timestamp" data="http://twitter.com/{time:'Sat Apr 23 23:29:46 +0000 2011'}">4:29 PM Apr 23rd</span></a>
- <span>via web</span>
-
- <a href="http://twitter.com/Christleen/status/61926615202021376">in reply to Christleen</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_61934347124617216"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/im_pappag" rel="nofollow">im_pappag</a> copyshaq</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/61934347124617216">
- <span class="published timestamp" data="http://twitter.com/{time:'Sat Apr 23 23:27:53 +0000 2011'}">4:27 PM Apr 23rd</span></a>
- <span>via web</span>
-
- <a href="http://twitter.com/im_pappag/status/61926652556488704">in reply to im_pappag</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_61933762203758592"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/flocker289" rel="nofollow">flocker289</a> yup</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/61933762203758592">
- <span class="published timestamp" data="http://twitter.com/{time:'Sat Apr 23 23:25:34 +0000 2011'}">4:25 PM Apr 23rd</span></a>
- <span>via web</span>
-
- <a href="http://twitter.com/flocker289/status/61931312084291585">in reply to flocker289</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_61925835795472384"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">gsp ftw, dats all <a href="http://bit.ly/eo177i" class="tweet-url web" rel="nofollow" target="_blank">http://bit.ly/eo177i</a></span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/61925835795472384">
- <span class="published timestamp" data="http://twitter.com/{time:'Sat Apr 23 22:54:04 +0000 2011'}">3:54 PM Apr 23rd</span></a>
- <span>via web</span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_57926696896167936"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/yogi4change" rel="nofollow">yogi4change</a> i like dat</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/57926696896167936">
- <span class="published timestamp" data="http://twitter.com/{time:'Tue Apr 12 22:02:55 +0000 2011'}">3:02 PM Apr 12th</span></a>
- <span>via web</span>
-
- <a href="http://twitter.com/yogi4change/status/57926458106060800">in reply to yogi4change</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_57926550313631744"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/jvhugg" rel="nofollow">jvhugg</a> thanks man, hope u can help da kids</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/57926550313631744">
- <span class="published timestamp" data="http://twitter.com/{time:'Tue Apr 12 22:02:20 +0000 2011'}">3:02 PM Apr 12th</span></a>
- <span>via web</span>
-
- <a href="http://twitter.com/jvhugg/status/57926439135227904">in reply to jvhugg</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_57926443421810688"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/TrickyTre" rel="nofollow">TrickyTre</a> dats true</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/57926443421810688">
- <span class="published timestamp" data="http://twitter.com/{time:'Tue Apr 12 22:01:54 +0000 2011'}">3:01 PM Apr 12th</span></a>
- <span>via web</span>
-
- <a href="http://twitter.com/TrickyTre/status/57926256188071936">in reply to TrickyTre</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_57924961456762880"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">todays random act of shaqness, help out my little friends n be a childhelper <a href="http://bit.ly/hKFdFZ" class="tweet-url web" rel="nofollow" target="_blank">http://bit.ly/hKFdFZ</a></span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/57924961456762880">
- <span class="published timestamp" data="http://twitter.com/{time:'Tue Apr 12 21:56:01 +0000 2011'}">2:56 PM Apr 12th</span></a>
- <span>via web</span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_55075318645530624"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">did u already bust ur shaqet or u got <a href="http://twitter.com/search?q=%23butler" title="#butler" class="tweet-url hashtag" rel="nofollow">#butler</a> or <a href="http://twitter.com/search?q=%23uconn" title="#uconn" class="tweet-url hashtag" rel="nofollow">#uconn</a> winning it all? chat wit me during da <a href="http://twitter.com/search?q=%23ncaa" title="#ncaa" class="tweet-url hashtag" rel="nofollow">#ncaa</a> finals <a href="http://phi.tv/idOlGZ" class="tweet-url web" rel="nofollow" target="_blank">http://phi.tv/idOlGZ</a></span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/55075318645530624">
- <span class="published timestamp" data="http://twitter.com/{time:'Tue Apr 05 01:12:33 +0000 2011'}">6:12 PM Apr 4th</span></a>
- <span>via web</span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_53584327879041024"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">u chattin when the @<a class="tweet-url username" href="http://twitter.com/celtics" rel="nofollow">celtics</a> beat da spurs tonight? best comment wins 2 free tickets from urs truly <a href="http://phi.tv/eVJWBo" class="tweet-url web" rel="nofollow" target="_blank">http://phi.tv/eVJWBo</a></span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/53584327879041024">
- <span class="published timestamp" data="http://twitter.com/{time:'Thu Mar 31 22:27:53 +0000 2011'}">2:27 PM Mar 31st</span></a>
- <span>via web</span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_52815239217033216"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">twitter mirror twittrr mirror on da wall whos da best twitterer of dem all? <a href="http://ti.me/g85EUt" class="tweet-url web" rel="nofollow" target="_blank">http://ti.me/g85EUt</a></span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/52815239217033216">
- <span class="published timestamp" data="http://twitter.com/{time:'Tue Mar 29 19:31:48 +0000 2011'}">11:31 AM Mar 29th</span></a>
- <span>via web</span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_52047863525539840"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">RT @<a class="tweet-url username" href="http://twitter.com/realgranthill33" rel="nofollow">realgranthill33</a>: Me, @<a class="tweet-url username" href="http://twitter.com/THE_REAL_SHAQ" rel="nofollow">THE_REAL_SHAQ</a> and James Brown in 1996 <a href="http://t.co/qOMoz0Y" class="tweet-url web" rel="nofollow" target="_blank">http://t.co/qOMoz0Y</a></span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/52047863525539840">
- <span class="published timestamp" data="http://twitter.com/{time:'Sun Mar 27 16:42:32 +0000 2011'}">8:42 AM Mar 27th</span></a>
- <span>via <a href="http://twitter.com/#!/download/iphone" rel="nofollow">Twitter for iPhone</a></span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_51099620096491521"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">im chatting live during da <a href="http://twitter.com/search?q=%23duke" title="#duke" class="tweet-url hashtag" rel="nofollow">#duke</a> vs <a href="http://twitter.com/search?q=%23arizona" title="#arizona" class="tweet-url hashtag" rel="nofollow">#arizona</a> game, come join march shaqness lol <a href="http://phi.tv/fZsSxo" class="tweet-url web" rel="nofollow" target="_blank">http://phi.tv/fZsSxo</a> <a href="http://twitter.com/search?q=%23marchmadness" title="#marchmadness" class="tweet-url hashtag" rel="nofollow">#marchmadness</a></span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/51099620096491521">
- <span class="published timestamp" data="http://twitter.com/{time:'Fri Mar 25 01:54:33 +0000 2011'}">5:54 PM Mar 24th</span></a>
- <span>via web</span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_51099168864862209"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">check out @<a class="tweet-url username" href="http://twitter.com/jonnybones" rel="nofollow">jonnybones</a> on da @<a class="tweet-url username" href="http://twitter.com/jayleno" rel="nofollow">jayleno</a> sho tonite 11:35pm ET <a href="http://bit.ly/enCsFO" class="tweet-url web" rel="nofollow" target="_blank">http://bit.ly/enCsFO</a></span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/51099168864862209">
- <span class="published timestamp" data="http://twitter.com/{time:'Fri Mar 25 01:52:45 +0000 2011'}">5:52 PM Mar 24th</span></a>
- <span>via web</span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_50031332071452672"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">u complete me twitter, lol <a href="http://t.co/Riq93H4" class="tweet-url web" rel="nofollow" target="_blank">http://t.co/Riq93H4</a></span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/50031332071452672">
- <span class="published timestamp" data="http://twitter.com/{time:'Tue Mar 22 03:09:33 +0000 2011'}">7:09 PM Mar 21st</span></a>
- <span>via <a href="http://twitter.com/#!/download/iphone" rel="nofollow">Twitter for iPhone</a></span>
-
- </span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_49688912150536193"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/peterfitz" rel="nofollow">peterfitz</a> I agree</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/49688912150536193">
- <span class="published timestamp" data="http://twitter.com/{time:'Mon Mar 21 04:28:54 +0000 2011'}">8:28 PM Mar 20th</span></a>
- <span>via <a href="http://ubersocial.com/" rel="nofollow">ÜberSocial</a></span>
-
- <a href="http://twitter.com/peterfitz/status/49639347435212800">in reply to peterfitz</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_49688790377316353"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/SamTecle" rel="nofollow">SamTecle</a> hell yeah</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/49688790377316353">
- <span class="published timestamp" data="http://twitter.com/{time:'Mon Mar 21 04:28:25 +0000 2011'}">8:28 PM Mar 20th</span></a>
- <span>via <a href="http://ubersocial.com/" rel="nofollow">ÜberSocial</a></span>
-
- <a href="http://twitter.com/SamTecle/status/49653253255872512">in reply to SamTecle</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_49688697314086912"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/JOHELCD" rel="nofollow">JOHELCD</a> yes sir</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/49688697314086912">
- <span class="published timestamp" data="http://twitter.com/{time:'Mon Mar 21 04:28:03 +0000 2011'}">8:28 PM Mar 20th</span></a>
- <span>via <a href="http://ubersocial.com/" rel="nofollow">ÜberSocial</a></span>
-
- <a href="http://twitter.com/JOHELCD/status/49668129806225408">in reply to JOHELCD</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- <li class="hentry u-THE_REAL_SHAQ status" id="status_49687984865415168"
->
- <span class="status-body">
- <span class="status-content">
- <span class="entry-content">@<a class="tweet-url username" href="http://twitter.com/tamaAK87" rel="nofollow">tamaAK87</a> I hope so bro</span>
- </span>
- <span class="meta entry-meta" data='http://twitter.com/{}'>
- <a class="entry-date" rel="bookmark" href="http://twitter.com/THE_REAL_SHAQ/status/49687984865415168">
- <span class="published timestamp" data="http://twitter.com/{time:'Mon Mar 21 04:25:13 +0000 2011'}">8:25 PM Mar 20th</span></a>
- <span>via <a href="http://ubersocial.com/" rel="nofollow">ÜberSocial</a></span>
-
- <a href="http://twitter.com/tamaAK87/status/49672019486318592">in reply to tamaAK87</a></span>
-
- <ul class="meta-data clearfix">
-</ul>
- </span>
-</li>
- </ol>
-
- <div id="pagination">
- <a href="http://twitter.com/THE_REAL_SHAQ?max_id=63631192263634946&amp;page=2&amp;twttr=true" class="round more" id="more" rel="next">more</a> </div>
-
-</div>
-
-
-
-
-
- </div>
- </td>
-
- <td id="side_base" class="column round-right">
-
- <div id="side">
-
-<div id="profile" class="section profile-side">
- <span class="section-links">
- </span>
- <address>
- <ul class="about vcard entry-author">
-
- <li class="verified-profile"><a href="http://twitter.com/help/verified"><em>Verified Account</em></a></li>
-
-
-
- <li><span class="label">Name</span> <span class="fn">THE_REAL_SHAQ</span></li>
- <li><span class="label">Location</span> <span class="adr">BOSTON/EVERYWHERE</span></li>
- <li><span class="label">Web</span> <a href="http://www.facebook.com/Shaq" class="url" rel="me nofollow" target="_blank">http://www.Facebo...</a></li>
- <li id="bio"><span class="label">Bio</span> <span class="bio">VERY QUOTATIOUS, I PERFORM RANDOM ACTS OF SHAQNESS</span></li>
-
- </ul>
- </address>
-
-
-
-<div class="stats">
- <table>
- <tr>
- <td>
-
-
-
-<a href="http://twitter.com/THE_REAL_SHAQ/following" id="following_count_link" class="link-following_page" rel="me" title="See who THE_REAL_SHAQ is following">
- <span id="following_count" class="stats_count numeric">632 </span>
- <span class="label">Following</span>
-</a>
-
-
- </td>
- <td>
-
-<a href="http://twitter.com/THE_REAL_SHAQ/followers" id="follower_count_link" class="link-followers_page" rel="me" title="See who's following THE_REAL_SHAQ">
- <span id="follower_count" class="stats_count numeric">3,773,066 </span>
- <span class="label">Followers</span>
-</a>
-
-</td>
- <td>
-
-<a href="http://twitter.com/THE_REAL_SHAQ/lists/memberships" id="lists_count_link" class="link-lists_page" rel="me" title="See which lists THE_REAL_SHAQ is on">
- <span id="lists_count" class="stats_count numeric">37,759 </span>
- <span class="label">Listed</span>
-</a>
-
-</td>
- </tr>
- </table>
-
-</div>
-
-</div>
-
- <ul id="primary_nav" class="sidebar-menu">
- <li id="profile_tab"><a href="http://twitter.com/THE_REAL_SHAQ" accesskey="u"><span id="update_count" class="stat_count">3,591</span><span>Tweets</span></a></li>
- <li id="profile_favorites_tab"><a href="http://twitter.com/THE_REAL_SHAQ/favorites" accesskey="f"><span>Favorites</span></a></li>
- </ul>
-
-
-
-
-
-
-<hr/>
-
-
- <div id="following">
-
- <h2 class="sidebar-title" id="fm_menu"><span>Following</span></h2>
- <div class="sidebar-menu">
- <div id="following_list">
-
- <span class="vcard">
- <a href="http://twitter.com/AbellaXXX" class="url" hreflang="en" rel="contact" title="Abella Anderson✔"><img alt="Abella Anderson✔" class="photo fn" height="24" src="../a1.twimg.com/profile_images/1340603771/abella03resize_wonder_1__mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/ElleRoyale" class="url" hreflang="en" rel="contact" title="Alisha Gabrielle!"><img alt="Alisha Gabrielle!" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1343529813/31395_748676384518_15702008_41502160_2170655_n_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/Kingyella" class="url" hreflang="en" rel="contact" title="Kingyella.com"><img alt="Kingyella.com" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1311461880/KOC_Twitter_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/kdaubert1" class="url" hreflang="en" rel="contact" title="kd Aubert"><img alt="kd Aubert" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1234783721/6xg793by_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/beautifullapril" class="url" hreflang="en" rel="contact" title="beautifull april"><img alt="beautifull april" class="photo fn" height="24" src="../a0.twimg.com/profile_images/1345744548/DSCF9851-1_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/THEONLYLANA" class="url" hreflang="en" rel="contact" title="Lana Smith Brown"><img alt="Lana Smith Brown" class="photo fn" height="24" src="../a1.twimg.com/profile_images/1204373086/image_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/Ben_Hill_4" class="url" hreflang="en" rel="contact" title="Yomand Brown"><img alt="Yomand Brown" class="photo fn" height="24" src="../a3.twimg.com/profile_images/1287404787/image_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/REAL_PHILO_MAN" class="url" hreflang="en" rel="contact" title="PHILO MAN"><img alt="PHILO MAN" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1257124758/photo_mini.jpeg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/BrittGastineau" class="url" hreflang="en" rel="contact" title="brittny gastineau"><img alt="brittny gastineau" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1264054894/heyboy_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/VanessaRider" class="url" hreflang="en" rel="contact" title="Vanessa rider"><img alt="Vanessa rider" class="photo fn" height="24" src="../a1.twimg.com/profile_images/1316365411/bbwvan_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/ChadyDunmore" class="url" hreflang="en" rel="contact" title="chady dunmore"><img alt="chady dunmore" class="photo fn" height="24" src="../a1.twimg.com/profile_images/1325869569/image_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/TheRock" class="url" hreflang="en" rel="contact" title="Dwayne Johnson"><img alt="Dwayne Johnson" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1243188928/Rock-twitter-avatar_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/fedoruh" class="url" hreflang="en" rel="contact" title="Thunderbolt"><img alt="Thunderbolt" class="photo fn" height="24" src="../a0.twimg.com/profile_images/56128457/d_2068_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/JBA512" class="url" hreflang="en" rel="contact" title="Jamie Ambrosius"><img alt="Jamie Ambrosius" class="photo fn" height="24" src="../a0.twimg.com/profile_images/1218915882/image_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/AshleyBGC" class="url" hreflang="en" rel="contact" title="Ashley Nichole"><img alt="Ashley Nichole" class="photo fn" height="24" src="../a3.twimg.com/profile_images/1225064424/180728_145176348873194_144286742295488_274512_5245984_n_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/SUMMERWALKER" class="url" hreflang="en" rel="contact" title="SUMMER WALKER"><img alt="SUMMER WALKER" class="photo fn" height="24" src="../a1.twimg.com/profile_images/1335303828/m3Taz_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/Twitvid" class="url" hreflang="en" rel="contact" title="TwitVid"><img alt="TwitVid" class="photo fn" height="24" src="../a3.twimg.com/profile_images/1245596338/tv-logo_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/DenonW" class="url" hreflang="en" rel="contact" title="Denon Wilgosh"><img alt="Denon Wilgosh" class="photo fn" height="24" src="../a0.twimg.com/profile_images/1237797400/photo_mini.jpeg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/MartyCordova" class="url" hreflang="en" rel="contact" title="Marty Cordova"><img alt="Marty Cordova" class="photo fn" height="24" src="../a0.twimg.com/profile_images/1182047839/thumbnailmarty2_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/Yelawolf" class="url" hreflang="en" rel="contact" title="Yelawolf"><img alt="Yelawolf" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1316877690/yelawolf_resized_mini.png" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/UberSoc" class="url" hreflang="en" rel="contact" title="ÜberSocial "><img alt="ÜberSocial " class="photo fn" height="24" src="../a1.twimg.com/profile_images/893904993/Uber80_mini.png" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/thomasobrey" class="url" hreflang="en" rel="contact" title="thomasobrey"><img alt="thomasobrey" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1134818700/photo_mini.JPG" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/SherrodbCSN" class="url" hreflang="en" rel="contact" title="A. Sherrod Blakely"><img alt="A. Sherrod Blakely" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1340162205/black.white.head.shot_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/Gurpartap" class="url" hreflang="en" rel="contact" title="Gurpartap  Singh"><img alt="Gurpartap  Singh" class="photo fn" height="24" src="../a0.twimg.com/profile_images/1327793136/3CD48B4E-38BB-4D66-A304-050E63F7F24F_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/Ms_Farrakhan" class="url" hreflang="en" rel="contact" title="Karen Farrakhan"><img alt="Karen Farrakhan" class="photo fn" height="24" src="../a3.twimg.com/profile_images/1127520067/S5000210_2_mini.JPG" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/hollyrpeete" class="url" hreflang="en" rel="contact" title="Holly Robinson Peete"><img alt="Holly Robinson Peete" class="photo fn" height="24" src="../a0.twimg.com/profile_images/1327719571/holly-robinson-peete-humanitarian590_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/ishaNikki" class="url" hreflang="en" rel="contact" title="Aisha Nicole"><img alt="Aisha Nicole" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1250109002/l_501f436110e5693ac4e99f618b15a19d_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/TRINArockstarr" class="url" hreflang="en" rel="contact" title="TRINA "><img alt="TRINA " class="photo fn" height="24" src="../a0.twimg.com/profile_images/1249238745/244519224_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/PilarSanders" class="url" hreflang="en" rel="contact" title="PilarSanders"><img alt="PilarSanders" class="photo fn" height="24" src="../a3.twimg.com/profile_images/1323006300/Avilightps2_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/AmeriieStan101" class="url" hreflang="en" rel="contact" title="Cymatika2K11"><img alt="Cymatika2K11" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1155621659/becauseiloveitpromo4_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/DemetriusDavis" class="url" hreflang="en" rel="contact" title="Demetrius Davis"><img alt="Demetrius Davis" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1213371005/Davis_phone_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/Renee_1908" class="url" hreflang="en" rel="contact" title="Renee' Cobson"><img alt="Renee' Cobson" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1336608138/image_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/everychildusa" class="url" hreflang="en" rel="contact" title="EveryChildUSA"><img alt="EveryChildUSA" class="photo fn" height="24" src="../a1.twimg.com/profile_images/1104412861/everychild_twitter_image_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/TwitChange" class="url" hreflang="en" rel="contact" title="TwitChange"><img alt="TwitChange" class="photo fn" height="24" src="../a0.twimg.com/profile_images/1331744950/tc_avatar_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/jimmykimmel" class="url" hreflang="en" rel="contact" title="Jimmy Kimmel"><img alt="Jimmy Kimmel" class="photo fn" height="24" src="../a0.twimg.com/profile_images/1281205615/jimmyturkey_mini.jpg" width="24" /></a> </span>
-
-
- <span class="vcard">
- <a href="http://twitter.com/nate_robinson" class="url" hreflang="en" rel="contact" title="Nate Robinson"><img alt="Nate Robinson" class="photo fn" height="24" src="../a2.twimg.com/profile_images/1339571867/nate_robinson_mini.jpg" width="24" /></a> </span>
-
-
- </div>
- <div id="friends_view_all">
- <a href="http://twitter.com/THE_REAL_SHAQ/following" rel="me">View all&hellip;</a>
- </div>
-
-</div>
-
- <hr/>
- </div>
-
-
-
-
-
- <div id="rssfeed">
- <a href="statuses/user_timeline/17461978.rss" class="xref rss profile-rss" rel="alternate" type="application/rss+xml">RSS feed of THE_REAL_SHAQ's tweets</a>
- <a href="favorites/17461978.rss" class="xref rss favorites-rss" rel="alternate" type="application/rss+xml">RSS feed of THE_REAL_SHAQ's favorites</a>
- </div>
-
-
-
-
- </div>
- </td>
-
- </tr>
- </tbody>
- </table>
-
-
-
- <div id="footer" class="round">
- <h3 class="offscreen">Footer</h3>
-
-
- <ul class="footer-nav">
- <li class="first">&copy; 2011 Twitter</li>
- <li><a href="http://twitter.com/about">About Us</a></li>
- <li><a href="http://twitter.com/about/contact">Contact</a></li>
- <li><a href="http://blog.twitter.com/">Blog</a></li>
- <li><a href="http://status.twitter.com/">Status</a></li>
- <li><a href="http://twitter.com/about/resources">Resources</a></li>
- <li><a href="http://dev.twitter.com/">API</a></li>
- <li><a href="http://twitter.com/business">Business</a></li>
- <li><a href="http://support.twitter.com/">Help</a></li>
- <li><a href="http://twitter.com/jobs">Jobs</a></li>
- <li><a href="http://twitter.com/tos">Terms</a></li>
- <li><a href="http://twitter.com/privacy">Privacy</a></li>
- </ul>
- </div>
-
-
-
- </div>
-
-
-
- <script src="js/jquery.min.js" type="text/javascript"></script>
-<script src="js/twitter.js?1304530346" type="text/javascript"></script>
-<script src="js/jquery.tipsy.min.js?1304530346" type="text/javascript"></script>
-<script type='text/javascript' src='js/jsapi.js'></script>
-<script src="js/gears_init.js?1304530346" type="text/javascript"></script>
-<script src="js/mustache.js?1304530346" type="text/javascript"></script>
-<script src="js/geov1.js?1304530348" type="text/javascript"></script>
-<script src="js/api.js?1304530346" type="text/javascript"></script>
-<script type="text/javascript">
-//<![CDATA[
-$.cookie('tz_offset_sec', (-1 * (new Date()).getTimezoneOffset())*60);
-//]]>
-</script>
- <script src="js/mustache.js?1304530346" type="text/javascript"></script>
-<script src="js/dismissable.js?1304530346" type="text/javascript"></script>
-
-
-<script type="text/javascript">
-//<![CDATA[
- page.user_screenname = 'THE_REAL_SHAQ';
- page.user_fullname = 'THE_REAL_SHAQ';
- page.controller_name = 'AccountController';
- page.action_name = 'profile';
- twttr.form_authenticity_token = '378c4e6964e0c855348d0cc3567c9d8b94040241';
- $.ajaxSetup({ data: { authenticity_token: '378c4e6964e0c855348d0cc3567c9d8b94040241' } });
-
- // FIXME: Reconcile with the kinds on the Status model.
- twttr.statusKinds = {
- UPDATE: 1,
- SHARE: 2
- };
- twttr.ListPerUserLimit = 20;
-
-//]]>
-</script>
-<script type="text/javascript">
-//<![CDATA[
-
- $( function () {
-
- $("#sms_codes_link").hoverTip("#sms_codes");
- initializePage();
-
-
-
- if (twttr.geo !== undefined) {
- twttr.geo.options.show_place_details_in_map = true;
- }
-
-(function(g){function b(){var c=g.location.href.split("#!");if(c[1]){g.location.replace(g.HBR = (c[0].replace(/\/*$/, "") + "/" + c[1].replace(/^\/*/, "")));}else return true}var a="onhashchange"in g;if(!a&&g.setAttribute){g.setAttribute("onhashchange","return;");a=typeof g.onhashchange==="function"}if(a)$(g).bind("hashchange",b);else{var d=function(){b()&&setTimeout(d,250)};setTimeout(d,250)}}(window));
- $('#signin_menu').isSigninMenu();
-
- });
-
-//]]>
-</script>
-
- <!-- BEGIN google analytics -->
-
- <script type="text/javascript">
- var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
- document.write(unescape("%3Cscript src='" + "js/ga.js' type='text/javascript'%3E%3C/script%3E"));
- </script>
-
- <script type="text/javascript">
-
- try {
- var pageTracker = _gat._getTracker("UA-30775-6");
- pageTracker._setDomainName("twitter.com");
- pageTracker._setVar('Not Logged In');
- pageTracker._setVar('lang: en');
- pageTracker._initData();
-
- pageTracker._trackPageview('/profile/not_logged_in/THE_REAL_SHAQ');
- } catch(err) { }
-
- </script>
-
- <!-- END google analytics -->
-
-
-
-
- <div id="notifications"></div>
-
-
-
-
-
-
- </body>
-
-<!-- Added by HTTrack --><meta http-equiv="content-type" content="text/html;charset=utf-8"><!-- /Added by HTTrack -->
-</html>
diff --git a/wlauto/workloads/benchmarkpi/__init__.py b/wlauto/workloads/benchmarkpi/__init__.py
deleted file mode 100644
index c49f6d05..00000000
--- a/wlauto/workloads/benchmarkpi/__init__.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import re
-
-from wlauto import AndroidUiAutoBenchmark
-
-
-class BenchmarkPi(AndroidUiAutoBenchmark):
-
- name = 'benchmarkpi'
- description = """
- Measures the time the target device takes to run and complete the Pi
- calculation algorithm.
-
- http://androidbenchmark.com/howitworks.php
-
- from the website:
-
- The whole idea behind this application is to use the same Pi calculation
- algorithm on every Android Device and check how fast that proccess is.
- Better calculation times, conclude to faster Android devices. This way you
- can also check how lightweight your custom made Android build is. Or not.
-
- As Pi is an irrational number, Benchmark Pi does not calculate the actual Pi
- number, but an approximation near the first digits of Pi over the same
- calculation circles the algorithms needs.
-
- So, the number you are getting in miliseconds is the time your mobile device
- takes to run and complete the Pi calculation algorithm resulting in a
- approximation of the first Pi digits.
- """
- package = 'gr.androiddev.BenchmarkPi'
- activity = '.BenchmarkPi'
- summary_metrics = ['pi calculation']
-
- regex = re.compile('You calculated Pi in ([0-9]+)')
-
- def update_result(self, context):
- super(BenchmarkPi, self).update_result(context)
- result = None
- with open(self.logcat_log) as fh:
- for line in fh:
- match = self.regex.search(line)
- if match:
- result = int(match.group(1))
-
- if result is not None:
- context.result.add_metric('pi calculation', result,
- 'Milliseconds', lower_is_better=True)
diff --git a/wlauto/workloads/benchmarkpi/com.arm.wlauto.uiauto.benchmarkpi.jar b/wlauto/workloads/benchmarkpi/com.arm.wlauto.uiauto.benchmarkpi.jar
deleted file mode 100644
index 433334d2..00000000
--- a/wlauto/workloads/benchmarkpi/com.arm.wlauto.uiauto.benchmarkpi.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/benchmarkpi/uiauto/build.sh b/wlauto/workloads/benchmarkpi/uiauto/build.sh
deleted file mode 100755
index be7ca104..00000000
--- a/wlauto/workloads/benchmarkpi/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.benchmarkpi.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.benchmarkpi.jar ..
-fi
diff --git a/wlauto/workloads/benchmarkpi/uiauto/build.xml b/wlauto/workloads/benchmarkpi/uiauto/build.xml
deleted file mode 100644
index 67603ca8..00000000
--- a/wlauto/workloads/benchmarkpi/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.benchmarkpi" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/benchmarkpi/uiauto/project.properties b/wlauto/workloads/benchmarkpi/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/benchmarkpi/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/benchmarkpi/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/benchmarkpi/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index e4e8b7ad..00000000
--- a/wlauto/workloads/benchmarkpi/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.benchmarkpi;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "benchmarkpi";
-
- public void runUiAutomation() throws Exception{
- Bundle status = new Bundle();
-
- startTest();
- waitForAndExtractResults();
-
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
- public void startTest() throws Exception{
- UiSelector selector = new UiSelector();
- UiObject benchButton = new UiObject(selector.text("Benchmark my Android!")
- .className("android.widget.Button"));
- benchButton.click();
- }
-
- public void waitForAndExtractResults() throws Exception{
- UiSelector selector = new UiSelector();
- UiObject submitButton = new UiObject(selector.text("Submit")
- .className("android.widget.Button"));
- submitButton.waitForExists(10 * 1000);
-
- UiObject resultsText = new UiObject(selector.textContains("You calculated Pi in")
- .className("android.widget.TextView"));
- Log.v(TAG, resultsText.getText());
- }
-}
diff --git a/wlauto/workloads/caffeinemark/__init__.py b/wlauto/workloads/caffeinemark/__init__.py
deleted file mode 100644
index 6b2ae0f9..00000000
--- a/wlauto/workloads/caffeinemark/__init__.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import re
-
-from wlauto import AndroidUiAutoBenchmark
-
-
-class Caffeinemark(AndroidUiAutoBenchmark):
-
- name = 'caffeinemark'
- description = """
- CaffeineMark is a series of tests that measure the speed of Java
- programs running in various hardware and software configurations.
-
- http://www.benchmarkhq.ru/cm30/info.html
-
- From the website:
-
- CaffeineMark scores roughly correlate with the number of Java instructions
- executed per second, and do not depend significantly on the the amount of
- memory in the system or on the speed of a computers disk drives or internet
- connection.
-
- The following is a brief description of what each test does:
-
- - Sieve: The classic sieve of eratosthenes finds prime numbers.
- - Loop: The loop test uses sorting and sequence generation as to measure
- compiler optimization of loops.
- - Logic: Tests the speed with which the virtual machine executes
- decision-making instructions.
- - Method: The Method test executes recursive function calls to see how
- well the VM handles method calls.
- - Float: Simulates a 3D rotation of objects around a point.
- - Graphics: Draws random rectangles and lines.
- - Image: Draws a sequence of three graphics repeatedly.
- - Dialog: Writes a set of values into labels and editboxes on a form.
-
- The overall CaffeineMark score is the geometric mean of the individual
- scores, i.e., it is the 9th root of the product of all the scores.
- """
- package = "com.flexycore.caffeinemark"
- activity = ".Application"
- summary_metrics = ['OverallScore']
-
- regex = re.compile(r'CAFFEINEMARK RESULT: (?P<type>\w+) (?P<value>\S+)')
-
- def update_result(self, context):
- super(Caffeinemark, self).update_result(context)
- with open(self.logcat_log) as fh:
- for line in fh:
- match = self.regex.search(line)
- if match:
- metric = match.group('type')
- value = float(match.group('value'))
- context.result.add_metric(metric, value)
diff --git a/wlauto/workloads/caffeinemark/com.arm.wlauto.uiauto.caffeinemark.jar b/wlauto/workloads/caffeinemark/com.arm.wlauto.uiauto.caffeinemark.jar
deleted file mode 100644
index 2a75e9d2..00000000
--- a/wlauto/workloads/caffeinemark/com.arm.wlauto.uiauto.caffeinemark.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/caffeinemark/uiauto/build.sh b/wlauto/workloads/caffeinemark/uiauto/build.sh
deleted file mode 100755
index 148e101e..00000000
--- a/wlauto/workloads/caffeinemark/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.caffeinemark.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.caffeinemark.jar ..
-fi
diff --git a/wlauto/workloads/caffeinemark/uiauto/build.xml b/wlauto/workloads/caffeinemark/uiauto/build.xml
deleted file mode 100644
index 0b50bbf9..00000000
--- a/wlauto/workloads/caffeinemark/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.caffeinemark" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/caffeinemark/uiauto/project.properties b/wlauto/workloads/caffeinemark/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/caffeinemark/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/caffeinemark/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/caffeinemark/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index 3979b675..00000000
--- a/wlauto/workloads/caffeinemark/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,85 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.caffeinemark;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "caffeinemark";
- public String[] categories = {"Sieve", "Loop", "Logic", "String", "Float", "Method"};
-
- public void runUiAutomation() throws Exception {
- Bundle status = new Bundle();
- status.putString("product", getUiDevice().getProductName());
-
- UiSelector selector = new UiSelector();
- UiObject runButton = new UiObject(selector.text("Run benchmark")
- .className("android.widget.Button"));
- runButton.click();
-
- try {
- waitText("CaffeineMark results");
- extractOverallScore();
- extractDetailedScores();
-
-
- } catch(UiObjectNotFoundException e) {
- takeScreenshot("caffeine-error");
- }
-
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
- public void extractOverallScore() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject linearLayoutOverallScore = new UiObject(selector.className("android.widget.LinearLayout")
- .instance(1));
- UiObject overallScore = linearLayoutOverallScore.getChild(selector.className("android.widget.TextView")
- .instance(2));
- Log.v(TAG, "CAFFEINEMARK RESULT: OverallScore " + overallScore.getText());
- }
-
- public void extractDetailedScores() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject detailsButton = new UiObject(selector.text("Details")
- .className("android.widget.Button"));
- detailsButton.click();
- sleep(2);
-
- UiObject linearObject;
- UiObject detailedScore;
- for (int i = 1; i <= 6; i++) {
- linearObject = new UiObject(selector.className("android.widget.LinearLayout")
- .instance(i));
- detailedScore = linearObject.getChild(selector.className("android.widget.TextView")
- .instance(1));
- Log.v(TAG,"CAFFEINEMARK RESULT: " + categories[i-1] + " " + detailedScore.getText());
- }
- }
-}
diff --git a/wlauto/workloads/cameracapture/__init__.py b/wlauto/workloads/cameracapture/__init__.py
deleted file mode 100644
index de72acea..00000000
--- a/wlauto/workloads/cameracapture/__init__.py
+++ /dev/null
@@ -1,51 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101
-
-from wlauto import UiAutomatorWorkload, Parameter
-
-
-class Cameracapture(UiAutomatorWorkload):
-
- name = 'cameracapture'
- description = """
- Uses in-built Android camera app to take photos.
-
- """
- package = 'com.google.android.gallery3d'
- activity = 'com.android.camera.CameraActivity'
-
- parameters = [
- Parameter('no_of_captures', kind=int, default=5,
- description='Number of photos to be taken.'),
- Parameter('time_between_captures', kind=int, default=5,
- description='Time, in seconds, between two consecutive camera clicks.'),
- ]
-
- def __init__(self, device, **kwargs):
- super(Cameracapture, self).__init__(device, **kwargs)
- self.uiauto_params['no_of_captures'] = self.no_of_captures
- self.uiauto_params['time_between_captures'] = self.time_between_captures
-
- def setup(self, context):
- super(Cameracapture, self).setup(context)
- self.device.execute('am start -n {}/{}'.format(self.package, self.activity))
-
- def update_result(self, context):
- pass
-
- def teardown(self, context):
- super(Cameracapture, self).teardown(context)
diff --git a/wlauto/workloads/cameracapture/com.arm.wlauto.uiauto.cameracapture.jar b/wlauto/workloads/cameracapture/com.arm.wlauto.uiauto.cameracapture.jar
deleted file mode 100644
index 0d37d0b2..00000000
--- a/wlauto/workloads/cameracapture/com.arm.wlauto.uiauto.cameracapture.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/cameracapture/uiauto/build.sh b/wlauto/workloads/cameracapture/uiauto/build.sh
deleted file mode 100755
index 6b54f4f6..00000000
--- a/wlauto/workloads/cameracapture/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.cameracapture.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.cameracapture.jar ..
-fi
diff --git a/wlauto/workloads/cameracapture/uiauto/build.xml b/wlauto/workloads/cameracapture/uiauto/build.xml
deleted file mode 100644
index bcd7ef9d..00000000
--- a/wlauto/workloads/cameracapture/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.cameracapture" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/cameracapture/uiauto/project.properties b/wlauto/workloads/cameracapture/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/cameracapture/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/cameracapture/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/cameracapture/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index a5497468..00000000
--- a/wlauto/workloads/cameracapture/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.cameracapture;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.KeyEvent;
-
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "cameracapture";
-
- public void runUiAutomation() throws Exception {
- int timeDurationBetweenEachCapture = 0;
- int sleepTime = 2;
- Bundle parameters = getParams();
- String noOfCaptures = "";
- int iterations = 0;
-
- if (parameters.size() > 0) {
- iterations = Integer.parseInt(parameters
- .getString("no_of_captures"));
- timeDurationBetweenEachCapture = Integer.parseInt(parameters
- .getString("time_between_captures"));
- }
- // switch to camera capture mode
- UiObject clickModes = new UiObject(new UiSelector().descriptionMatches("Camera, video or panorama selector"));
- clickModes.click();
- sleep(sleepTime);
-
- UiObject changeModeToCapture = new UiObject(new UiSelector().descriptionMatches("Switch to photo"));
-
- changeModeToCapture.click();
- sleep(sleepTime);
-
- // click to capture photos
- UiObject clickCaptureButton = new UiObject(new UiSelector().descriptionMatches("Shutter button"));
-
- for (int i = 0; i < iterations; i++) {
- clickCaptureButton.longClick();
- sleep(timeDurationBetweenEachCapture);
- }
- getUiDevice().pressBack();
- }
-}
diff --git a/wlauto/workloads/camerarecord/__init__.py b/wlauto/workloads/camerarecord/__init__.py
deleted file mode 100644
index 7f237f2f..00000000
--- a/wlauto/workloads/camerarecord/__init__.py
+++ /dev/null
@@ -1,47 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-from wlauto import UiAutomatorWorkload, Parameter
-
-
-class Camerarecord(UiAutomatorWorkload):
-
- name = 'camerarecord'
- description = """
- Uses in-built Android camera app to record the video for given interval
- of time.
-
- """
- package = 'com.google.android.gallery3d'
- activity = 'com.android.camera.CameraActivity'
- run_timeout = 0
-
- parameters = [
- Parameter('recording_time', kind=int, default=60,
- description='The video recording time in seconds.'),
- ]
-
- def __init__(self, device, **kwargs):
- super(Camerarecord, self).__init__(device)
- self.uiauto_params['recording_time'] = self.recording_time # pylint: disable=E1101
- self.run_timeout = 3 * self.uiauto_params['recording_time']
-
- def setup(self, context):
- super(Camerarecord, self).setup(context)
- self.device.execute('am start -n {}/{}'.format(self.package, self.activity))
-
- def teardown(self, context):
- self.device.execute('am force-stop {}'.format(self.package))
- super(Camerarecord, self).teardown(context)
diff --git a/wlauto/workloads/camerarecord/com.arm.wlauto.uiauto.camerarecord.jar b/wlauto/workloads/camerarecord/com.arm.wlauto.uiauto.camerarecord.jar
deleted file mode 100644
index 7de7cfcf..00000000
--- a/wlauto/workloads/camerarecord/com.arm.wlauto.uiauto.camerarecord.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/camerarecord/uiauto/build.sh b/wlauto/workloads/camerarecord/uiauto/build.sh
deleted file mode 100755
index eff5293f..00000000
--- a/wlauto/workloads/camerarecord/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.camerarecord.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.camerarecord.jar ..
-fi
diff --git a/wlauto/workloads/camerarecord/uiauto/build.xml b/wlauto/workloads/camerarecord/uiauto/build.xml
deleted file mode 100644
index 31a4132e..00000000
--- a/wlauto/workloads/camerarecord/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.camerarecord" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/camerarecord/uiauto/project.properties b/wlauto/workloads/camerarecord/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/camerarecord/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/camerarecord/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/camerarecord/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index c030a078..00000000
--- a/wlauto/workloads/camerarecord/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.camerarecord;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.KeyEvent;
-
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "camerarecord";
-
- public void runUiAutomation() throws Exception {
- Bundle parameters = getParams();
- int timeToRecord = 0;
- int timeout = 4;
- int sleepTime = 2;
- int recordingTime = 0;
- if (parameters.size() > 0) {
- recordingTime = Integer.parseInt(parameters
- .getString("recording_time"));
- }
-
- // switch to camera capture mode
- UiObject clickModes = new UiObject(new UiSelector().descriptionMatches("Camera, video or panorama selector"));
- clickModes.click();
- sleep(sleepTime);
-
- UiObject changeModeToCapture = new UiObject(new UiSelector().descriptionMatches("Switch to video"));
- changeModeToCapture.click();
- sleep(sleepTime);
-
- UiObject clickRecordingButton = new UiObject(new UiSelector().descriptionMatches("Shutter button"));
- clickRecordingButton.longClick();
- sleep(recordingTime);
-
- // Stop video recording
- clickRecordingButton.longClick();
- getUiDevice().pressBack();
- }
-
-}
diff --git a/wlauto/workloads/castlebuilder/__init__.py b/wlauto/workloads/castlebuilder/__init__.py
deleted file mode 100644
index 5a527330..00000000
--- a/wlauto/workloads/castlebuilder/__init__.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import GameWorkload
-
-
-class Castlebuilder(GameWorkload):
-
- name = 'castlebuilder'
- description = """
- Castle Builder game.
-
- """
- package = 'com.ettinentertainment.castlebuilder'
- activity = 'com.unity3d.player.UnityPlayerProxyActivity'
diff --git a/wlauto/workloads/castlebuilder/revent_files/.empty b/wlauto/workloads/castlebuilder/revent_files/.empty
deleted file mode 100644
index e69de29b..00000000
--- a/wlauto/workloads/castlebuilder/revent_files/.empty
+++ /dev/null
diff --git a/wlauto/workloads/castlebuilder/revent_files/Nexus10.run.revent b/wlauto/workloads/castlebuilder/revent_files/Nexus10.run.revent
deleted file mode 100644
index 704231f5..00000000
--- a/wlauto/workloads/castlebuilder/revent_files/Nexus10.run.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/castlebuilder/revent_files/Nexus10.setup.revent b/wlauto/workloads/castlebuilder/revent_files/Nexus10.setup.revent
deleted file mode 100644
index 25370392..00000000
--- a/wlauto/workloads/castlebuilder/revent_files/Nexus10.setup.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/castlemaster/__init__.py b/wlauto/workloads/castlemaster/__init__.py
deleted file mode 100644
index fa104b81..00000000
--- a/wlauto/workloads/castlemaster/__init__.py
+++ /dev/null
@@ -1,30 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import GameWorkload
-
-
-class CastleMaster(GameWorkload):
-
- name = 'castlemaster'
- description = """
- Castle Master v1.09 game.
-
- """
- package = 'com.alphacloud.castlemaster'
- activity = 'com.unity3d.player.UnityPlayerActivity'
- install_timeout = 500
-
diff --git a/wlauto/workloads/castlemaster/revent_files/.empty b/wlauto/workloads/castlemaster/revent_files/.empty
deleted file mode 100644
index e69de29b..00000000
--- a/wlauto/workloads/castlemaster/revent_files/.empty
+++ /dev/null
diff --git a/wlauto/workloads/castlemaster/revent_files/Nexus10.run.revent b/wlauto/workloads/castlemaster/revent_files/Nexus10.run.revent
deleted file mode 100644
index 404f7c71..00000000
--- a/wlauto/workloads/castlemaster/revent_files/Nexus10.run.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/castlemaster/revent_files/Nexus10.setup.revent b/wlauto/workloads/castlemaster/revent_files/Nexus10.setup.revent
deleted file mode 100644
index 680d2e34..00000000
--- a/wlauto/workloads/castlemaster/revent_files/Nexus10.setup.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/cfbench/__init__.py b/wlauto/workloads/cfbench/__init__.py
deleted file mode 100644
index 8fb73cdb..00000000
--- a/wlauto/workloads/cfbench/__init__.py
+++ /dev/null
@@ -1,72 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import xml.etree.ElementTree as ET
-
-from wlauto import AndroidUiAutoBenchmark
-
-
-class Cfbench(AndroidUiAutoBenchmark):
-
- name = 'cfbench'
- description = """
- CF-Bench is (mainly) CPU and memory benchmark tool specifically designed to
- be able to handle multi-core devices, produce a fairly stable score, and
- test both native as well managed code performance.
-
- https://play.google.com/store/apps/details?id=eu.chainfire.cfbench&hl=en
-
- From the website:
-
- It tests specific device properties you do not regularly see tested by other
- benchmarks, and runs in a set timeframe.
-
- It does produce some "final" scores, but as with every benchmark, you should
- take those with a grain of salt. It is simply not theoretically possible to
- produce a single number that accurately describes a device's performance.
-
- .. note:: This workload relies on the device being rooted
-
- """
- package = 'eu.chainfire.cfbench'
- activity = '.MainActivity'
- run_timeout = 5 * 60 # seconds
- summary_metrics = ['overall_score']
-
- cfbench_params = ['java_mdflops', 'native_memory_read', 'java_msflops', 'native_disk_read', 'native_score', 'java_efficiency_memory_read',
- 'native_mips', 'native_mdflops', 'java_score', 'native_memory_write', 'java_memory_write', 'native_mallocs', 'native_msflops',
- 'java_mips', 'java_efficiency_mdflops', 'overall_score', 'java_memory_read', 'java_efficiency_memory_write', 'java_efficiency_mips',
- 'java_efficiency_msflops', 'native_disk_write']
-
- def update_result(self, context):
- super(Cfbench, self).update_result(context)
- device_results_file = os.path.join(self.device.package_data_directory,
- self.package,
- 'shared_prefs', 'eu.chainfire.cfbench_preferences.xml ')
- self.device.execute('cp {} {}'.format(device_results_file, self.device.working_directory), as_root=True)
- self.device.pull(os.path.join(self.device.working_directory, 'eu.chainfire.cfbench_preferences.xml'), context.output_directory)
- result_file = os.path.join(context.output_directory, 'eu.chainfire.cfbench_preferences.xml')
- tree = ET.parse(result_file)
- root = tree.getroot()
- for child in root:
- if child.attrib['name'] in self.cfbench_params:
- if '%' in child.text:
- value = float(child.text.split('%')[0]) / 100
- else:
- value = int(child.text)
- context.result.add_metric(child.attrib['name'], value)
-
-
diff --git a/wlauto/workloads/cfbench/com.arm.wlauto.uiauto.cfbench.jar b/wlauto/workloads/cfbench/com.arm.wlauto.uiauto.cfbench.jar
deleted file mode 100644
index 1b4ae753..00000000
--- a/wlauto/workloads/cfbench/com.arm.wlauto.uiauto.cfbench.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/cfbench/uiauto/build.sh b/wlauto/workloads/cfbench/uiauto/build.sh
deleted file mode 100755
index d72e4d38..00000000
--- a/wlauto/workloads/cfbench/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.cfbench.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.cfbench.jar ..
-fi
diff --git a/wlauto/workloads/cfbench/uiauto/build.xml b/wlauto/workloads/cfbench/uiauto/build.xml
deleted file mode 100644
index 994c34e7..00000000
--- a/wlauto/workloads/cfbench/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.cfbench" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/cfbench/uiauto/project.properties b/wlauto/workloads/cfbench/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/cfbench/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/cfbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/cfbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index 0e61d92d..00000000
--- a/wlauto/workloads/cfbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,63 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.cfbench;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "cfbench";
-
- public void runUiAutomation() throws Exception{
- Bundle status = new Bundle();
- status.putString("product", getUiDevice().getProductName());
- UiSelector selector = new UiSelector();
- UiObject text_bench = new UiObject(selector.text("Full Benchmark")
- .className("android.widget.TextView"));
-
- text_bench.click();
- sleep(2);
-
- try{
- UiObject stop_text = new UiObject(selector.textContains("Benchmarking ...")
- .className("android.widget.TextView"));
- waitUntilNoObject(stop_text, 600);
-
- sleep(2);
- }finally{
- takeScreenshot("cf-bench");
- }
-
- UiScrollable res = new UiScrollable(new UiSelector());//.scrollable(true));
- res.flingToEnd(10);
- sleep(2);
-
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
-}
diff --git a/wlauto/workloads/citadel/__init__.py b/wlauto/workloads/citadel/__init__.py
deleted file mode 100644
index 71b433b9..00000000
--- a/wlauto/workloads/citadel/__init__.py
+++ /dev/null
@@ -1,44 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101
-import time
-
-from wlauto import GameWorkload, Parameter
-
-
-class EpicCitadel(GameWorkload):
-
- name = 'citadel'
- description = """
- Epic Citadel demo showcasing Unreal Engine 3.
-
- The game has very rich graphics details. The workload only moves around its
- environment for the specified time.
-
- """
- package = 'com.epicgames.EpicCitadel'
- activity = '.UE3JavaApp'
- install_timeout = 120
-
- parameters = [
- Parameter('duration', kind=int, default=60,
- description=('Duration, in seconds, of the run (may need to be adjusted for '
- 'different devices.')),
- ]
-
- def run(self, context):
- super(EpicCitadel, self).run(context)
- time.sleep(self.duration)
diff --git a/wlauto/workloads/citadel/revent_files/.empty b/wlauto/workloads/citadel/revent_files/.empty
deleted file mode 100644
index e69de29b..00000000
--- a/wlauto/workloads/citadel/revent_files/.empty
+++ /dev/null
diff --git a/wlauto/workloads/citadel/revent_files/Nexus10.run.revent b/wlauto/workloads/citadel/revent_files/Nexus10.run.revent
deleted file mode 100644
index ac580434..00000000
--- a/wlauto/workloads/citadel/revent_files/Nexus10.run.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/citadel/revent_files/Nexus10.setup.revent b/wlauto/workloads/citadel/revent_files/Nexus10.setup.revent
deleted file mode 100644
index df45cf44..00000000
--- a/wlauto/workloads/citadel/revent_files/Nexus10.setup.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/cyclictest/LICENSE b/wlauto/workloads/cyclictest/LICENSE
deleted file mode 100644
index 58b35d03..00000000
--- a/wlauto/workloads/cyclictest/LICENSE
+++ /dev/null
@@ -1,8 +0,0 @@
-cyclictest binaries included here are part of the Linux kernel and are distributed
-under GPL version 2; The full text of the license may be viewed here:
-
-http://www.gnu.org/licenses/gpl-2.0.html
-
-Source for these binaries can be obtained here:
-
-http://git.kernel.org/cgit/linux/kernel/git/clrkwllms/rt-tests.git
diff --git a/wlauto/workloads/cyclictest/__init__.py b/wlauto/workloads/cyclictest/__init__.py
deleted file mode 100644
index 69b3acd2..00000000
--- a/wlauto/workloads/cyclictest/__init__.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# Copyright 2012-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=no-member
-# pylint: disable=attribute-defined-outside-init
-
-import os
-import time
-
-from wlauto import settings, Workload, Executable, Parameter
-from wlauto.exceptions import ConfigError, WorkloadError
-from wlauto.utils.types import boolean
-
-TXT_RESULT_NAME = 'cyclictest_result.txt'
-RESULT_INTERPRETATION = {
- 'T': 'Thread',
- 'P': 'Priority',
- 'C': 'Clock',
-}
-
-
-class Cyclictest(Workload):
-
- name = 'cyclictest'
- description = """
- Measures the amount of time that passes between when a timer expires and
- when the thread which set the timer actually runs.
-
- Cyclic test works by taking a time snapshot just prior to waiting for a specific
- time interval (t1), then taking another time snapshot after the timer
- finishes (t2), then comparing the theoretical wakeup time with the actual
- wakeup time (t2 -(t1 + sleep_time)). This value is the latency for that
- timers wakeup.
-
- """
-
- parameters = [
- Parameter('clock', allowed_values=['monotonic', 'realtime'], default='realtime',
- description=('specify the clock to be used during the test.')),
- Parameter('duration', kind=int, default=30,
- description=('Specify the length for the test to run in seconds.')),
- Parameter('quiet', kind=boolean, default=True,
- description=('Run the tests quiet and print only a summary on exit.')),
- Parameter('thread', kind=int, default=8,
- description=('Set the number of test threads')),
- Parameter('latency', kind=int, default=1000000,
- description=('Write the value to /dev/cpu_dma_latency')),
- Parameter('extra_parameters', kind=str, default="",
- description=('Any additional command line parameters to append to the '
- 'existing parameters above. A list can be found at '
- 'https://rt.wiki.kernel.org/index.php/Cyclictest or '
- 'in the help page ``cyclictest -h``')),
- Parameter('clear_file_cache', kind=boolean, default=True,
- description=('Clear file caches before starting test')),
- Parameter('screen_off', kind=boolean, default=True,
- description=('If true it will turn the screen off so that onscreen '
- 'graphics do not effect the score. This is predominantly '
- 'for devices without a GPU')),
-
- ]
-
- def setup(self, context):
- self.cyclictest_on_device = 'cyclictest'
- self.cyclictest_result = os.path.join(self.device.working_directory, TXT_RESULT_NAME)
- self.cyclictest_command = '{} --clock={} --duration={}s --thread={} --latency={} {} {} > {}'
- self.device_binary = None
-
- if not self.device.is_rooted:
- raise WorkloadError("This workload requires a device with root premissions to run")
-
- host_binary = context.resolver.get(Executable(self, self.device.abi, 'cyclictest'))
- self.device_binary = self.device.install(host_binary)
-
- self.cyclictest_command = self.cyclictest_command.format(self.device_binary,
- 0 if self.clock == 'monotonic' else 1,
- self.duration,
- self.thread,
- self.latency,
- "--quiet" if self.quiet else "",
- self.extra_parameters,
- self.cyclictest_result)
-
- if self.clear_file_cache:
- self.device.execute('sync')
- self.device.write_value('/proc/sys/vm/drop_caches', 3)
-
- if self.device.os == 'android':
- if self.screen_off and self.device.is_screen_on:
- self.device.execute('input keyevent 26')
-
- def run(self, context):
- self.device.execute(self.cyclictest_command, self.duration * 2, as_root=True)
-
- def update_result(self, context):
- self.device.pull(self.cyclictest_result, context.output_directory)
-
- # Parsing the output
- # Standard Cyclictest Output:
- # T: 0 (31974) P:95 I:1000 C:4990 Min:9 Act:37 Avg:31 Max:59
- with open(os.path.join(context.output_directory, TXT_RESULT_NAME)) as f:
- for line in f:
- if line.find('C:') is not -1:
- # Key = T: 0 (31974) P:95 I:1000
- # Remaing = 49990 Min:9 Act:37 Avg:31 Max:59
- # sperator = C:
- (key, sperator, remaing) = line.partition('C:')
-
- index = key.find('T')
- key = key.replace(key[index], RESULT_INTERPRETATION['T'])
- index = key.find('P')
- key = key.replace(key[index], RESULT_INTERPRETATION['P'])
-
- index = sperator.find('C')
- sperator = sperator.replace(sperator[index], RESULT_INTERPRETATION['C'])
-
- metrics = (sperator + remaing).split()
- # metrics is now in the from of ['Min:', '9', 'Act:', '37', 'Avg:', '31' , 'Max', '59']
- for i in range(0, len(metrics), 2):
- full_key = key + ' ' + metrics[i][:-1]
- value = int(metrics[i + 1])
- context.result.add_metric(full_key, value, 'microseconds')
-
- def teardown(self, context):
- if self.device.os == 'android':
- if self.screen_off:
- self.device.ensure_screen_is_on()
- self.device.execute('rm -f {}'.format(self.cyclictest_result))
diff --git a/wlauto/workloads/cyclictest/bin/arm64/cyclictest b/wlauto/workloads/cyclictest/bin/arm64/cyclictest
deleted file mode 100755
index 9d682da1..00000000
--- a/wlauto/workloads/cyclictest/bin/arm64/cyclictest
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/cyclictest/bin/armeabi/cyclictest b/wlauto/workloads/cyclictest/bin/armeabi/cyclictest
deleted file mode 100755
index e61f2076..00000000
--- a/wlauto/workloads/cyclictest/bin/armeabi/cyclictest
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/dex2oat/__init__.py b/wlauto/workloads/dex2oat/__init__.py
deleted file mode 100644
index c9c17733..00000000
--- a/wlauto/workloads/dex2oat/__init__.py
+++ /dev/null
@@ -1,121 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=no-member,attribute-defined-outside-init
-import re
-import os
-import sys
-
-from wlauto import Workload, Parameter, PluginLoader
-from wlauto.exceptions import WorkloadError
-from wlauto.utils.android import ApkInfo
-import wlauto.common.android.resources
-
-
-class Dex2oatBenchmark(Workload):
-
- name = 'dex2oat'
- description = """
- Benchmarks the execution time of dex2oat (a key part of APK installation process).
-
- ART is a new Android runtime in KitKat, which replaces Dalvik VM. ART uses Ahead-Of-Time
- compilation. It pre-compiles ODEX files used by Dalvik using dex2oat tool as part of APK
- installation process.
-
- This workload benchmarks the time it take to compile an APK using dex2oat, which has a
- significant impact on the total APK installation time, and therefore user experience.
-
- """
-
- supported_platforms = ['android']
- command_template = 'dex2oat --dex-file={} --oat-file={} --instruction-set={} --dump-timing'
- run_timeout = 5 * 60
-
- parameters = [
- Parameter('instruction_set', default='arm64',
- allowed_values=['arm', 'arm64', 'x86', 'x86_64', 'mips'],
- description="""Specifies the instruction set to compile for. Only options supported by
- the target device can be used."""),
- ]
-
- def init_resources(self, context):
- # TODO: find a better APK to use for this.
- peacekeeper = PluginLoader().get_workload('peacekeeper', self.device)
- self.apk_file = context.resolver.get(wlauto.common.android.resources.ApkFile(peacekeeper), version='chrome')
- self.package = ApkInfo(self.apk_file).package
-
- def setup(self, context):
- if self.device.getprop('persist.sys.dalvik.vm.lib.2') != 'libart.so':
- raise WorkloadError('Android system must be using ART (rather than Dalvik) in order for dex2oat to work.')
- supported = [eabi == 'armeabi' and 'arm' or eabi.split('-')[0]
- for eabi in self.device.supported_eabi]
- if self.instruction_set not in supported:
- message = 'Instruction set "{}" is not supported by the device; (supported: {})'
- raise WorkloadError(message.format(self.instruction_set, supported))
-
- on_device_apk = self.device.path.join(self.device.working_directory,
- os.path.basename(self.apk_file))
- self.on_device_oat = on_device_apk.replace('.apk', '-{}.oat'.format(self.instruction_set))
- self.command = self.command_template.format(on_device_apk, self.on_device_oat, self.instruction_set)
-
- if not self.device.file_exists(on_device_apk):
- self.device.push(self.apk_file, on_device_apk)
-
- def run(self, context):
- self.device.execute(self.command, self.run_timeout)
-
- def update_result(self, context):
- """
- Retrieve the last dex2oat time from the logs. That will correspond with the run() method.
- The compilation time does not.
-
- Pulls out the compilation time and dex2oat execution time:
- I/dex2oat ( 2522): 1.8s Compile Dex File
- I/dex2oat ( 2522): dex2oat took 2.366s (threads: 6)
-
-
- """
- logcat_log = os.path.join(context.output_directory, 'logcat.log')
- context.device_manager.dump_logcat(logcat_log)
-
- regex_time = re.compile("^I\/dex2oat \( *[0-9]+\): dex2oat took (?P<time>[0-9]+\.?[0-9]*)(?P<unit>m?s)")
- regex_comp_time = re.compile("^I\/dex2oat \( *[0-9]+\): +(?P<time>[0-9]*\.?[0-9]*)(?P<unit>m?s) Compile Dex File")
- time_data, comp_time_data = None, None
- with open(logcat_log) as fh:
- for line in fh:
- match = regex_time.search(line)
-
- if match:
- time_data = match.groupdict()
-
- match = regex_comp_time.search(line)
-
- if match:
- comp_time_data = match.groupdict()
- # Last dex2oat time wins.
- if time_data is not None:
- time = time_data['time']
- if time_data['unit'] == "s":
- time = float(time) * 1000.0
- context.result.add_metric('dex2oat_time', time, "ms", lower_is_better=True)
-
- if comp_time_data is not None:
- time = comp_time_data['time']
- if comp_time_data['unit'] == "s":
- time = float(time) * 1000.0
- context.result.add_metric('dex2oat_comp_time', time, "ms", lower_is_better=True)
-
- def teardown(self, context):
- self.device.remove(self.on_device_oat)
diff --git a/wlauto/workloads/dhrystone/__init__.py b/wlauto/workloads/dhrystone/__init__.py
deleted file mode 100644
index 5cb18b26..00000000
--- a/wlauto/workloads/dhrystone/__init__.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#pylint: disable=E1101,W0201
-
-import os
-import re
-
-from wlauto import Workload, Parameter
-from wlauto.exceptions import ConfigError
-
-
-this_dir = os.path.dirname(__file__)
-
-
-class Dhrystone(Workload):
-
- name = 'dhrystone'
- description = """
- Runs the Dhrystone benchmark.
-
- Original source from::
-
- http://classes.soe.ucsc.edu/cmpe202/benchmarks/standard/dhrystone.c
-
- This version has been modified to configure duration and the number of
- threads used.
-
- """
-
- bm_regex = re.compile(r'This machine benchmarks at (?P<score>\d+)')
- dmips_regex = re.compile(r'(?P<score>\d+) DMIPS')
- time_regex = re.compile(r'Total dhrystone run time: (?P<time>[0-9.]+)')
-
- default_mloops = 100
-
- parameters = [
- Parameter('duration', kind=int, default=0,
- description='The duration, in seconds, for which dhrystone will be executed. '
- 'Either this or ``mloops`` should be specified but not both.'),
- Parameter('mloops', kind=int, default=0,
- description='Millions of loops to run. Either this or ``duration`` should be '
- 'specified, but not both. If neither is specified, this will default '
- 'to ``{}``'.format(default_mloops)),
- Parameter('threads', kind=int, default=4,
- description='The number of separate dhrystone "threads" that will be forked.'),
- Parameter('delay', kind=int, default=0,
- description=('The delay, in seconds, between kicking off of dhrystone '
- 'threads (if ``threads`` > 1).')),
- Parameter('taskset_mask', kind=int, default=0,
- description='''
- The processes spawned by the workload will be pinned to cores
- as specified by this parameter
- '''),
- ]
-
- def setup(self, context):
- host_exe = os.path.join(this_dir, 'dhrystone')
- self.device_exe = self.device.install(host_exe)
- execution_mode = '-l {}'.format(self.mloops) if self.mloops else '-r {}'.format(self.duration)
- if self.taskset_mask:
- taskset_string = '{} taskset 0x{:x} '.format(self.device.busybox, self.taskset_mask)
- else:
- taskset_string = ''
- self.command = '{}{} {} -t {} -d {}'.format(taskset_string,
- self.device_exe,
- execution_mode,
- self.threads, self.delay)
- self.timeout = self.duration and self.duration + self.delay * self.threads + 10 or 300
- self.device.killall('dhrystone')
-
- def run(self, context):
- try:
- self.output = self.device.execute(self.command, timeout=self.timeout, check_exit_code=False)
- except KeyboardInterrupt:
- self.device.killall('dhrystone')
- raise
-
- def update_result(self, context):
- outfile = os.path.join(context.output_directory, 'dhrystone.output')
- with open(outfile, 'w') as wfh:
- wfh.write(self.output)
- score_count = 0
- dmips_count = 0
- total_score = 0
- total_dmips = 0
- for line in self.output.split('\n'):
- match = self.time_regex.search(line)
- if match:
- context.result.add_metric('time', float(match.group('time')), 'seconds', lower_is_better=True)
- else:
- match = self.bm_regex.search(line)
- if match:
- metric = 'thread {} score'.format(score_count)
- value = int(match.group('score'))
- context.result.add_metric(metric, value)
- score_count += 1
- total_score += value
- else:
- match = self.dmips_regex.search(line)
- if match:
- metric = 'thread {} DMIPS'.format(dmips_count)
- value = int(match.group('score'))
- context.result.add_metric(metric, value)
- dmips_count += 1
- total_dmips += value
- context.result.add_metric('total DMIPS', total_dmips)
- context.result.add_metric('total score', total_score)
-
- def teardown(self, context):
- self.device.uninstall('dhrystone')
-
- def validate(self):
- if self.mloops and self.duration: # pylint: disable=E0203
- raise ConfigError('mloops and duration cannot be both specified at the same time for dhrystone.')
- if not self.mloops and not self.duration: # pylint: disable=E0203
- self.mloops = self.default_mloops
-
diff --git a/wlauto/workloads/dhrystone/dhrystone b/wlauto/workloads/dhrystone/dhrystone
deleted file mode 100755
index 68cd9b71..00000000
--- a/wlauto/workloads/dhrystone/dhrystone
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/dhrystone/src/build.sh b/wlauto/workloads/dhrystone/src/build.sh
deleted file mode 100755
index 61fcce5d..00000000
--- a/wlauto/workloads/dhrystone/src/build.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-ndk-build
-if [[ -f libs/armeabi/dhrystone ]]; then
- echo "Dhrystone binary updated."
- cp libs/armeabi/dhrystone ..
- rm -rf libs
- rm -rf obj
-fi
diff --git a/wlauto/workloads/dhrystone/src/jni/Android.mk b/wlauto/workloads/dhrystone/src/jni/Android.mk
deleted file mode 100644
index 2f974319..00000000
--- a/wlauto/workloads/dhrystone/src/jni/Android.mk
+++ /dev/null
@@ -1,11 +0,0 @@
-LOCAL_PATH:= $(call my-dir)
-
-include $(CLEAR_VARS)
-LOCAL_SRC_FILES:= dhrystone.c
-LOCAL_MODULE := dhrystone
-LOCAL_MODULE_TAGS := optional
-LOCAL_STATIC_LIBRARIES := libc
-LOCAL_SHARED_LIBRARIES := liblog
-LOCAL_LDLIBS := -llog
-LOCAL_CFLAGS := -O2
-include $(BUILD_EXECUTABLE)
diff --git a/wlauto/workloads/dhrystone/src/jni/dhrystone.c b/wlauto/workloads/dhrystone/src/jni/dhrystone.c
deleted file mode 100644
index 9f16003e..00000000
--- a/wlauto/workloads/dhrystone/src/jni/dhrystone.c
+++ /dev/null
@@ -1,959 +0,0 @@
-/* ARM modifications to the original Dhrystone are */
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-/***** hpda:net.sources / homxb!gemini / 1:58 am Apr 1, 1986*/
-/* EVERBODY: Please read "APOLOGY" below. -rick 01/06/85
- * See introduction in net.arch, or net.micro
- *
- * "DHRYSTONE" Benchmark Program
- *
- * Version: C/1.1, 12/01/84
- *
- * Date: PROGRAM updated 01/06/86, RESULTS updated 03/31/86
- *
- * Author: Reinhold P. Weicker, CACM Vol 27, No 10, 10/84 pg. 1013
- * Translated from ADA by Rick Richardson
- * Every method to preserve ADA-likeness has been used,
- * at the expense of C-ness.
- *
- * Compile: cc -O dry.c -o drynr : No registers
- * cc -O -DREG=register dry.c -o dryr : Registers
- *
- * Defines: Defines are provided for old C compiler's
- * which don't have enums, and can't assign structures.
- * The time(2) function is library dependant; Most
- * return the time in seconds, but beware of some, like
- * Aztec C, which return other units.
- * The LOOPS define is initially set for 50000 loops.
- * If you have a machine with large integers and is
- * very fast, please change this number to 500000 to
- * get better accuracy. Please select the way to
- * measure the execution time using the TIME define.
- * For single user machines, time(2) is adequate. For
- * multi-user machines where you cannot get single-user
- * access, use the times(2) function. If you have
- * neither, use a stopwatch in the dead of night.
- * Use a "printf" at the point marked "start timer"
- * to begin your timings. DO NOT use the UNIX "time(1)"
- * command, as this will measure the total time to
- * run this program, which will (erroneously) include
- * the time to malloc(3) storage and to compute the
- * time it takes to do nothing.
- *
- * Run: drynr; dryr
- *
- * Results: If you get any new machine/OS results, please send to:
- *
- * ihnp4!castor!pcrat!rick
- *
- * and thanks to all that do. Space prevents listing
- * the names of those who have provided some of these
- * results. I'll be forwarding these results to
- * Rheinhold Weicker.
- *
- * Note: I order the list in increasing performance of the
- * "with registers" benchmark. If the compiler doesn't
- * provide register variables, then the benchmark
- * is the same for both REG and NOREG.
- *
- * PLEASE: Send complete information about the machine type,
- * clock speed, OS and C manufacturer/version. If
- * the machine is modified, tell me what was done.
- * On UNIX, execute uname -a and cc -V to get this info.
- *
- * 80x8x NOTE: 80x8x benchers: please try to do all memory models
- * for a particular compiler.
- *
- * APOLOGY (1/30/86):
- * Well, I goofed things up! As pointed out by Haakon Bugge,
- * the line of code marked "GOOF" below was missing from the
- * Dhrystone distribution for the last several months. It
- * *WAS* in a backup copy I made last winter, so no doubt it
- * was victimized by sleepy fingers operating vi!
- *
- * The effect of the line missing is that the reported benchmarks
- * are 15% too fast (at least on a 80286). Now, this creates
- * a dilema - do I throw out ALL the data so far collected
- * and use only results from this (corrected) version, or
- * do I just keep collecting data for the old version?
- *
- * Since the data collected so far *is* valid as long as it
- * is compared with like data, I have decided to keep
- * TWO lists- one for the old benchmark, and one for the
- * new. This also gives me an opportunity to correct one
- * other error I made in the instructions for this benchmark.
- * My experience with C compilers has been mostly with
- * UNIX 'pcc' derived compilers, where the 'optimizer' simply
- * fixes sloppy code generation (peephole optimization).
- * But today, there exist C compiler optimizers that will actually
- * perform optimization in the Computer Science sense of the word,
- * by removing, for example, assignments to a variable whose
- * value is never used. Dhrystone, unfortunately, provides
- * lots of opportunities for this sort of optimization.
- *
- * I request that benchmarkers re-run this new, corrected
- * version of Dhrystone, turning off or bypassing optimizers
- * which perform more than peephole optimization. Please
- * indicate the version of Dhrystone used when reporting the
- * results to me.
- *
- * RESULTS BEGIN HERE
- *
- *----------------DHRYSTONE VERSION 1.1 RESULTS BEGIN--------------------------
- *
- * MACHINE MICROPROCESSOR OPERATING COMPILER DHRYSTONES/SEC.
- * TYPE SYSTEM NO REG REGS
- * -------------------------- ------------ ----------- ---------------
- * Apple IIe 65C02-1.02Mhz DOS 3.3 Aztec CII v1.05i 37 37
- * - Z80-2.5Mhz CPM-80 v2.2 Aztec CII v1.05g 91 91
- * - 8086-8Mhz RMX86 V6 Intel C-86 V2.0 197 203LM??
- * IBM PC/XT 8088-4.77Mhz COHERENT 2.3.43 Mark Wiiliams 259 275
- * - 8086-8Mhz RMX86 V6 Intel C-86 V2.0 287 304 ??
- * Fortune 32:16 68000-6Mhz V7+sys3+4.1BSD cc 360 346
- * PDP-11/34A w/FP-11C UNIX V7m cc 406 449
- * Macintosh512 68000-7.7Mhz Mac ROM O/S DeSmet(C ware) 625 625
- * VAX-11/750 w/FPA UNIX 4.2BSD cc 831 852
- * DataMedia 932 68000-10Mhz UNIX sysV cc 837 888
- * Plexus P35 68000-12.5Mhz UNIX sysIII cc 835 894
- * ATT PC7300 68010-10Mhz UNIX 5.0.3 cc 973 1034
- * Compaq II 80286-8Mhz MSDOS 3.1 MS C 3.0 1086 1140 LM
- * IBM PC/AT 80286-7.5Mhz Venix/286 SVR2 cc 1159 1254 *15
- * Compaq II 80286-8Mhz MSDOS 3.1 MS C 3.0 1190 1282 MM
- * MicroVAX II - Mach/4.3 cc 1361 1385
- * DEC uVAX II - Ultrix-32m v1.1 cc 1385 1399
- * Compaq II 80286-8Mhz MSDOS 3.1 MS C 3.0 1351 1428
- * VAX 11/780 - UNIX 4.2BSD cc 1417 1441
- * VAX-780/MA780 Mach/4.3 cc 1428 1470
- * VAX 11/780 - UNIX 5.0.1 cc 4.1.1.31 1650 1640
- * Ridge 32C V1 - ROS 3.3 Ridge C (older) 1628 1695
- * Gould PN6005 - UTX 1.1c+ (4.2) cc 1732 1884
- * Gould PN9080 custom ECL UTX-32 1.1C cc 4745 4992
- * VAX-784 - Mach/4.3 cc 5263 5555 &4
- * VAX 8600 - 4.3 BSD cc 6329 6423
- * Amdahl 5860 - UTS sysV cc 1.22 28735 28846
- * IBM3090/200 - ? ? 31250 31250
- *
- *
- *----------------DHRYSTONE VERSION 1.0 RESULTS BEGIN--------------------------
- *
- * MACHINE MICROPROCESSOR OPERATING COMPILER DHRYSTONES/SEC.
- * TYPE SYSTEM NO REG REGS
- * -------------------------- ------------ ----------- ---------------
- * Commodore 64 6510-1MHz C64 ROM C Power 2.8 36 36
- * HP-110 8086-5.33Mhz MSDOS 2.11 Lattice 2.14 284 284
- * IBM PC/XT 8088-4.77Mhz PC/IX cc 271 294
- * CCC 3205 - Xelos(SVR2) cc 558 592
- * Perq-II 2901 bitslice Accent S5c cc (CMU) 301 301
- * IBM PC/XT 8088-4.77Mhz COHERENT 2.3.43 MarkWilliams cc 296 317
- * Cosmos 68000-8Mhz UniSoft cc 305 322
- * IBM PC/XT 8088-4.77Mhz Venix/86 2.0 cc 297 324
- * DEC PRO 350 11/23 Venix/PRO SVR2 cc 299 325
- * IBM PC 8088-4.77Mhz MSDOS 2.0 b16cc 2.0 310 340
- * PDP11/23 11/23 Venix (V7) cc 320 358
- * Commodore Amiga ? Lattice 3.02 368 371
- * PC/XT 8088-4.77Mhz Venix/86 SYS V cc 339 377
- * IBM PC 8088-4.77Mhz MSDOS 2.0 CI-C86 2.20M 390 390
- * IBM PC/XT 8088-4.77Mhz PCDOS 2.1 Wizard 2.1 367 403
- * IBM PC/XT 8088-4.77Mhz PCDOS 3.1 Lattice 2.15 403 403 @
- * Colex DM-6 68010-8Mhz Unisoft SYSV cc 378 410
- * IBM PC 8088-4.77Mhz PCDOS 3.1 Datalight 1.10 416 416
- * IBM PC NEC V20-4.77Mhz MSDOS 3.1 MS 3.1 387 420
- * IBM PC/XT 8088-4.77Mhz PCDOS 2.1 Microsoft 3.0 390 427
- * IBM PC NEC V20-4.77Mhz MSDOS 3.1 MS 3.1 (186) 393 427
- * PDP-11/34 - UNIX V7M cc 387 438
- * IBM PC 8088, 4.77mhz PC-DOS 2.1 Aztec C v3.2d 423 454
- * Tandy 1000 V20, 4.77mhz MS-DOS 2.11 Aztec C v3.2d 423 458
- * Tandy TRS-16B 68000-6Mhz Xenix 1.3.5 cc 438 458
- * PDP-11/34 - RSTS/E decus c 438 495
- * Onyx C8002 Z8000-4Mhz IS/1 1.1 (V7) cc 476 511
- * Tandy TRS-16B 68000-6Mhz Xenix 1.3.5 Green Hills 609 617
- * DEC PRO 380 11/73 Venix/PRO SVR2 cc 577 628
- * FHL QT+ 68000-10Mhz Os9/68000 version 1.3 603 649 FH
- * Apollo DN550 68010-?Mhz AegisSR9/IX cc 3.12 666 666
- * HP-110 8086-5.33Mhz MSDOS 2.11 Aztec-C 641 676
- * ATT PC6300 8086-8Mhz MSDOS 2.11 b16cc 2.0 632 684
- * IBM PC/AT 80286-6Mhz PCDOS 3.0 CI-C86 2.1 666 684
- * Tandy 6000 68000-8Mhz Xenix 3.0 cc 694 694
- * IBM PC/AT 80286-6Mhz Xenix 3.0 cc 684 704 MM
- * Macintosh 68000-7.8Mhz 2M Mac Rom Mac C 32 bit int 694 704
- * Macintosh 68000-7.7Mhz - MegaMax C 2.0 661 709
- * Macintosh512 68000-7.7Mhz Mac ROM O/S DeSmet(C ware) 714 714
- * IBM PC/AT 80286-6Mhz Xenix 3.0 cc 704 714 LM
- * Codata 3300 68000-8Mhz UniPlus+ (v7) cc 678 725
- * WICAT MB 68000-8Mhz System V WICAT C 4.1 585 731 ~
- * Cadmus 9000 68010-10Mhz UNIX cc 714 735
- * AT&T 6300 8086-8Mhz Venix/86 SVR2 cc 668 743
- * Cadmus 9790 68010-10Mhz 1MB SVR0,Cadmus3.7 cc 720 747
- * NEC PC9801F 8086-8Mhz PCDOS 2.11 Lattice 2.15 768 - @
- * ATT PC6300 8086-8Mhz MSDOS 2.11 CI-C86 2.20M 769 769
- * Burroughs XE550 68010-10Mhz Centix 2.10 cc 769 769 CT1
- * EAGLE/TURBO 8086-8Mhz Venix/86 SVR2 cc 696 779
- * ALTOS 586 8086-10Mhz Xenix 3.0b cc 724 793
- * DEC 11/73 J-11 micro Ultrix-11 V3.0 cc 735 793
- * ATT 3B2/300 WE32000-?Mhz UNIX 5.0.2 cc 735 806
- * Apollo DN320 68010-?Mhz AegisSR9/IX cc 3.12 806 806
- * IRIS-2400 68010-10Mhz UNIX System V cc 772 829
- * Atari 520ST 68000-8Mhz TOS DigResearch 839 846
- * IBM PC/AT 80286-6Mhz PCDOS 3.0 MS 3.0(large) 833 847 LM
- * WICAT MB 68000-8Mhz System V WICAT C 4.1 675 853 S~
- * VAX 11/750 - Ultrix 1.1 4.2BSD cc 781 862
- * CCC 7350A 68000-8MHz UniSoft V.2 cc 821 875
- * VAX 11/750 - UNIX 4.2bsd cc 862 877
- * Fast Mac 68000-7.7Mhz - MegaMax C 2.0 839 904 +
- * IBM PC/XT 8086-9.54Mhz PCDOS 3.1 Microsoft 3.0 833 909 C1
- * DEC 11/44 Ultrix-11 V3.0 cc 862 909
- * Macintosh 68000-7.8Mhz 2M Mac Rom Mac C 16 bit int 877 909 S
- * CCC 3210 - Xelos R01(SVR2) cc 849 924
- * CCC 3220 - Ed. 7 v2.3 cc 892 925
- * IBM PC/AT 80286-6Mhz Xenix 3.0 cc -i 909 925
- * AT&T 6300 8086, 8mhz MS-DOS 2.11 Aztec C v3.2d 862 943
- * IBM PC/AT 80286-6Mhz Xenix 3.0 cc 892 961
- * VAX 11/750 w/FPA Eunice 3.2 cc 914 976
- * IBM PC/XT 8086-9.54Mhz PCDOS 3.1 Wizard 2.1 892 980 C1
- * IBM PC/XT 8086-9.54Mhz PCDOS 3.1 Lattice 2.15 980 980 C1
- * Plexus P35 68000-10Mhz UNIX System III cc 984 980
- * PDP-11/73 KDJ11-AA 15Mhz UNIX V7M 2.1 cc 862 981
- * VAX 11/750 w/FPA UNIX 4.3bsd cc 994 997
- * IRIS-1400 68010-10Mhz UNIX System V cc 909 1000
- * IBM PC/AT 80286-6Mhz Venix/86 2.1 cc 961 1000
- * IBM PC/AT 80286-6Mhz PCDOS 3.0 b16cc 2.0 943 1063
- * Zilog S8000/11 Z8001-5.5Mhz Zeus 3.2 cc 1011 1084
- * NSC ICM-3216 NSC 32016-10Mhz UNIX SVR2 cc 1041 1084
- * IBM PC/AT 80286-6Mhz PCDOS 3.0 MS 3.0(small) 1063 1086
- * VAX 11/750 w/FPA VMS VAX-11 C 2.0 958 1091
- * Stride 68000-10Mhz System-V/68 cc 1041 1111
- * Plexus P/60 MC68000-12.5Mhz UNIX SYSIII Plexus 1111 1111
- * ATT PC7300 68010-10Mhz UNIX 5.0.2 cc 1041 1111
- * CCC 3230 - Xelos R01(SVR2) cc 1040 1126
- * Stride 68000-12Mhz System-V/68 cc 1063 1136
- * IBM PC/AT 80286-6Mhz Venix/286 SVR2 cc 1056 1149
- * Plexus P/60 MC68000-12.5Mhz UNIX SYSIII Plexus 1111 1163 T
- * IBM PC/AT 80286-6Mhz PCDOS 3.0 Datalight 1.10 1190 1190
- * ATT PC6300+ 80286-6Mhz MSDOS 3.1 b16cc 2.0 1111 1219
- * IBM PC/AT 80286-6Mhz PCDOS 3.1 Wizard 2.1 1136 1219
- * Sun2/120 68010-10Mhz Sun 4.2BSD cc 1136 1219
- * IBM PC/AT 80286-6Mhz PCDOS 3.0 CI-C86 2.20M 1219 1219
- * WICAT PB 68000-8Mhz System V WICAT C 4.1 998 1226 ~
- * MASSCOMP 500 68010-10MHz RTU V3.0 cc (V3.2) 1156 1238
- * Alliant FX/8 IP (68012-12Mhz) Concentrix cc -ip;exec -i 1170 1243 FX
- * Cyb DataMate 68010-12.5Mhz Uniplus 5.0 Unisoft cc 1162 1250
- * PDP 11/70 - UNIX 5.2 cc 1162 1250
- * IBM PC/AT 80286-6Mhz PCDOS 3.1 Lattice 2.15 1250 1250
- * IBM PC/AT 80286-7.5Mhz Venix/86 2.1 cc 1190 1315 *15
- * Sun2/120 68010-10Mhz Standalone cc 1219 1315
- * Intel 380 80286-8Mhz Xenix R3.0up1 cc 1250 1315 *16
- * Sequent Balance 8000 NS32032-10MHz Dynix 2.0 cc 1250 1315 N12
- * IBM PC/DSI-32 32032-10Mhz MSDOS 3.1 GreenHills 2.14 1282 1315 C3
- * ATT 3B2/400 WE32100-?Mhz UNIX 5.2 cc 1315 1315
- * CCC 3250XP - Xelos R01(SVR2) cc 1215 1318
- * IBM PC/RT 032 RISC(801?)?Mhz BSD 4.2 cc 1248 1333 RT
- * DG MV4000 - AOS/VS 5.00 cc 1333 1333
- * IBM PC/AT 80286-8Mhz Venix/86 2.1 cc 1275 1380 *16
- * IBM PC/AT 80286-6Mhz MSDOS 3.0 Microsoft 3.0 1250 1388
- * ATT PC6300+ 80286-6Mhz MSDOS 3.1 CI-C86 2.20M 1428 1428
- * COMPAQ/286 80286-8Mhz Venix/286 SVR2 cc 1326 1443
- * IBM PC/AT 80286-7.5Mhz Venix/286 SVR2 cc 1333 1449 *15
- * WICAT PB 68000-8Mhz System V WICAT C 4.1 1169 1464 S~
- * Tandy II/6000 68000-8Mhz Xenix 3.0 cc 1384 1477
- * MicroVAX II - Mach/4.3 cc 1513 1536
- * WICAT MB 68000-12.5Mhz System V WICAT C 4.1 1246 1537 ~
- * IBM PC/AT 80286-9Mhz SCO Xenix V cc 1540 1556 *18
- * Cyb DataMate 68010-12.5Mhz Uniplus 5.0 Unisoft cc 1470 1562 S
- * VAX 11/780 - UNIX 5.2 cc 1515 1562
- * MicroVAX-II - - - 1562 1612
- * VAX-780/MA780 Mach/4.3 cc 1587 1612
- * VAX 11/780 - UNIX 4.3bsd cc 1646 1662
- * Apollo DN660 - AegisSR9/IX cc 3.12 1666 1666
- * ATT 3B20 - UNIX 5.2 cc 1515 1724
- * NEC PC-98XA 80286-8Mhz PCDOS 3.1 Lattice 2.15 1724 1724 @
- * HP9000-500 B series CPU HP-UX 4.02 cc 1724 -
- * Ridge 32C V1 - ROS 3.3 Ridge C (older) 1776 -
- * IBM PC/STD 80286-8Mhz MSDOS 3.0 Microsoft 3.0 1724 1785 C2
- * WICAT MB 68000-12.5Mhz System V WICAT C 4.1 1450 1814 S~
- * WICAT PB 68000-12.5Mhz System V WICAT C 4.1 1530 1898 ~
- * DEC-2065 KL10-Model B TOPS-20 6.1FT5 Port. C Comp. 1937 1946
- * Gould PN6005 - UTX 1.1(4.2BSD) cc 1675 1964
- * DEC2060 KL-10 TOPS-20 cc 2000 2000 NM
- * Intel 310AP 80286-8Mhz Xenix 3.0 cc 1893 2009
- * VAX 11/785 - UNIX 5.2 cc 2083 2083
- * VAX 11/785 - VMS VAX-11 C 2.0 2083 2083
- * VAX 11/785 - UNIX SVR2 cc 2123 2083
- * VAX 11/785 - ULTRIX-32 1.1 cc 2083 2091
- * VAX 11/785 - UNIX 4.3bsd cc 2135 2136
- * WICAT PB 68000-12.5Mhz System V WICAT C 4.1 1780 2233 S~
- * Pyramid 90x - OSx 2.3 cc 2272 2272
- * Pyramid 90x FPA,cache,4Mb OSx 2.5 cc no -O 2777 2777
- * Pyramid 90x w/cache OSx 2.5 cc w/-O 3333 3333
- * IBM-4341-II - VM/SP3 Waterloo C 1.2 3333 3333
- * IRIS-2400T 68020-16.67Mhz UNIX System V cc 3105 3401
- * Celerity C-1200 ? UNIX 4.2BSD cc 3485 3468
- * SUN 3/75 68020-16.67Mhz SUN 4.2 V3 cc 3333 3571
- * IBM-4341 Model 12 UTS 5.0 ? 3685 3685
- * SUN-3/160 68020-16.67Mhz Sun 4.2 V3.0A cc 3381 3764
- * Sun 3/180 68020-16.67Mhz Sun 4.2 cc 3333 3846
- * IBM-4341 Model 12 UTS 5.0 ? 3910 3910 MN
- * MC 5400 68020-16.67MHz RTU V3.0 cc (V4.0) 3952 4054
- * Intel 386/20 80386-12.5Mhz PMON debugger Intel C386v0.2 4149 4386
- * NCR Tower32 68020-16.67Mhz SYS 5.0 Rel 2.0 cc 3846 4545
- * MC 5600/5700 68020-16.67MHz RTU V3.0 cc (V4.0) 4504 4746 %
- * Intel 386/20 80386-12.5Mhz PMON debugger Intel C386v0.2 4534 4794 i1
- * Intel 386/20 80386-16Mhz PMON debugger Intel C386v0.2 5304 5607
- * Gould PN9080 custom ECL UTX-32 1.1C cc 5369 5676
- * Gould 1460-342 ECL proc UTX/32 1.1/c cc 5342 5677 G1
- * VAX-784 - Mach/4.3 cc 5882 5882 &4
- * Intel 386/20 80386-16Mhz PMON debugger Intel C386v0.2 5801 6133 i1
- * VAX 8600 - UNIX 4.3bsd cc 7024 7088
- * VAX 8600 - VMS VAX-11 C 2.0 7142 7142
- * Alliant FX/8 CE Concentrix cc -ce;exec -c 6952 7655 FX
- * CCI POWER 6/32 COS(SV+4.2) cc 7500 7800
- * CCI POWER 6/32 POWER 6 UNIX/V cc 8236 8498
- * CCI POWER 6/32 4.2 Rel. 1.2b cc 8963 9544
- * Sperry (CCI Power 6) 4.2BSD cc 9345 10000
- * CRAY-X-MP/12 105Mhz COS 1.14 Cray C 10204 10204
- * IBM-3083 - UTS 5.0 Rel 1 cc 16666 12500
- * CRAY-1A 80Mhz CTSS Cray C 2.0 12100 13888
- * IBM-3083 - VM/CMS HPO 3.4 Waterloo C 1.2 13889 13889
- * Amdahl 470 V/8 UTS/V 5.2 cc v1.23 15560 15560
- * CRAY-X-MP/48 105Mhz CTSS Cray C 2.0 15625 17857
- * Amdahl 580 - UTS 5.0 Rel 1.2 cc v1.5 23076 23076
- * Amdahl 5860 UTS/V 5.2 cc v1.23 28970 28970
- *
- * NOTE
- * * Crystal changed from 'stock' to listed value.
- * + This Macintosh was upgraded from 128K to 512K in such a way that
- * the new 384K of memory is not slowed down by video generator accesses.
- * % Single processor; MC == MASSCOMP
- * NM A version 7 C compiler written at New Mexico Tech.
- * @ vanilla Lattice compiler used with MicroPro standard library
- * S Shorts used instead of ints
- * T with Chris Torek's patches (whatever they are).
- * ~ For WICAT Systems: MB=MultiBus, PB=Proprietary Bus
- * LM Large Memory Model. (Otherwise, all 80x8x results are small model)
- * MM Medium Memory Model. (Otherwise, all 80x8x results are small model)
- * C1 Univation PC TURBO Co-processor; 9.54Mhz 8086, 640K RAM
- * C2 Seattle Telecom STD-286 board
- * C3 Definicon DSI-32 coprocessor
- * C? Unknown co-processor board?
- * CT1 Convergent Technologies MegaFrame, 1 processor.
- * MN Using Mike Newtons 'optimizer' (see net.sources).
- * G1 This Gould machine has 2 processors and was able to run 2 dhrystone
- * Benchmarks in parallel with no slowdown.
- * FH FHC == Frank Hogg Labs (Hazelwood Uniquad 2 in an FHL box).
- * FX The Alliant FX/8 is a system consisting of 1-8 CEs (computation
- * engines) and 1-12 IPs (interactive processors). Note N8 applies.
- * RT This is one of the RT's that CMU has been using for awhile. I'm
- * not sure that this is identical to the machine that IBM is selling
- * to the public.
- * i1 Normally, the 386/20 starter kit has a 16k direct mapped cache
- * which inserts 2 or 3 wait states on a write thru. These results
- * were obtained by disabling the write-thru, or essentially turning
- * the cache into 0 wait state memory.
- * Nnn This machine has multiple processors, allowing "nn" copies of the
- * benchmark to run in the same time as 1 copy.
- * &nn This machine has "nn" processors, and the benchmark results were
- * obtained by having all "nn" processors working on 1 copy of dhrystone.
- * (Note, this is different than Nnn. Salesmen like this measure).
- * ? I don't trust results marked with '?'. These were sent to me with
- * either incomplete info, or with times that just don't make sense.
- * ?? means I think the performance is too poor, ?! means too good.
- * If anybody can confirm these figures, please respond.
- *
- * ABBREVIATIONS
- * CCC Concurrent Computer Corp. (was Perkin-Elmer)
- * MC Masscomp
- *
- *--------------------------------RESULTS END----------------------------------
- *
- * The following program contains statements of a high-level programming
- * language (C) in a distribution considered representative:
- *
- * assignments 53%
- * control statements 32%
- * procedure, function calls 15%
- *
- * 100 statements are dynamically executed. The program is balanced with
- * respect to the three aspects:
- * - statement type
- * - operand type (for simple data types)
- * - operand access
- * operand global, local, parameter, or constant.
- *
- * The combination of these three aspects is balanced only approximately.
- *
- * The program does not compute anything meaningfull, but it is
- * syntactically and semantically correct.
- *
- */
-
-/* Accuracy of timings and human fatigue controlled by next two lines */
-/*#define LOOPS 5000 /* Use this for slow or 16 bit machines */
-/*#define LOOPS 50000 /* Use this for slow or 16 bit machines */
-#define LOOPS 500000 /* Use this for faster machines */
-
-/* Compiler dependent options */
-#undef NOENUM /* Define if compiler has no enum's */
-#undef NOSTRUCTASSIGN /* Define if compiler can't assign structures */
-
-/* define only one of the next three defines */
-#define GETRUSAGE /* Use getrusage(2) time function */
-/*#define TIMES /* Use times(2) time function */
-/*#define TIME /* Use time(2) time function */
-
-/* define the granularity of your times(2) function (when used) */
-/*#define HZ 60 /* times(2) returns 1/60 second (most) */
-/*#define HZ 100 /* times(2) returns 1/100 second (WECo) */
-
-/* for compatibility with goofed up version */
-/*#define GOOF /* Define if you want the goofed up version */
-
-/* default number of threads that will be spawned */
-#define DEFAULT_THREADS 1
-
-/* Dhrystones per second obtained on VAX11/780 -- a notional 1MIPS machine. */
-/* Used in DMIPS calculation. */
-#define ONE_MIPS 1757
-
-#ifdef GOOF
-char Version[] = "1.0";
-#else
-char Version[] = "1.1";
-#endif
-
-#ifdef NOSTRUCTASSIGN
-#define structassign(d, s) memcpy(&(d), &(s), sizeof(d))
-#else
-#define structassign(d, s) d = s
-#endif
-
-#ifdef NOENUM
-#define Ident1 1
-#define Ident2 2
-#define Ident3 3
-#define Ident4 4
-#define Ident5 5
-typedef int Enumeration;
-#else
-typedef enum {Ident1, Ident2, Ident3, Ident4, Ident5} Enumeration;
-#endif
-
-typedef int OneToThirty;
-typedef int OneToFifty;
-typedef char CapitalLetter;
-typedef char String30[31];
-typedef int Array1Dim[51];
-typedef int Array2Dim[51][51];
-
-struct Record
-{
- struct Record *PtrComp;
- Enumeration Discr;
- Enumeration EnumComp;
- OneToFifty IntComp;
- String30 StringComp;
-};
-
-typedef struct Record RecordType;
-typedef RecordType * RecordPtr;
-typedef int boolean;
-
-//#define NULL 0
-#define TRUE 1
-#define FALSE 0
-
-#ifndef REG
-#define REG
-#endif
-
-extern Enumeration Func1();
-extern boolean Func2();
-
-#ifdef TIMES
-#include <sys/param.h>
-#include <sys/types.h>
-#endif
-#ifdef GETRUSAGE
-#include <sys/resource.h>
-#endif
-#include <time.h>
-#include <unistd.h>
-#include <sys/wait.h>
-#include <stdlib.h>
-#include <stdio.h>
-#include <string.h>
-#include <sys/time.h>
-
-
-main(int argc, char** argv)
-{
- int num_threads = DEFAULT_THREADS;
- int runtime = 0;
- int delay = 0;
- long mloops = 0;
-
- int opt;
- while ((opt = getopt(argc, argv, "ht:r:d:l:")) != -1) {
- switch (opt) {
- case 'h':
- printhelp();
- exit(0);
- break;
- case 't':
- num_threads = atoi(optarg);
- break;
- case 'r':
- runtime = atoi(optarg);
- break;
- case 'd':
- delay = atoi(optarg);
- break;
- case 'l':
- mloops = atoll(optarg);
- break;
- }
- }
-
- if (runtime && mloops) {
- fprintf(stderr, "-r and -l options cannot be specified at the same time.\n");
- exit(1);
- } else if (!runtime && !mloops) {
- fprintf(stderr, "Must specify either -r or -l option; use -h to see help.\n");
- exit(1);
- }
-
- long num_loops = mloops ? mloops * 1000000L : LOOPS * num_threads;
- run_dhrystone(runtime, num_threads, num_loops, delay);
-}
-
-run_dhrystone(int duration, int num_threads, long num_loops, int delay) {
- printf("duration: %d seconds\n", duration);
- printf("number of threads: %d\n", num_threads);
- printf("number of loops: %ld\n", num_loops);
- printf("delay between starting threads: %d seconds\n", delay);
- printf("\n");
-
- pid_t *children = malloc(num_threads* sizeof(pid_t));
- int loops_per_thread = num_loops / num_threads;
-
- clock_t run_start = clock();
-
- long i;
- int actual_duration;
- for (i = 0; i < (num_threads - 1); i++) {
- pid_t c = fork();
- if (c == 0) {
- // child
- actual_duration = duration - i * delay;
- if (actual_duration < 0)
- actual_duration = 0;
- run_for_duration(actual_duration, loops_per_thread);
- exit(0);
- }
-
- children[i] = c;
- sleep(delay);
- }
-
- run_for_duration(duration - delay * (num_threads - 1), loops_per_thread);
-
- for (i = 0; i < num_threads; i++) {
- int status, w;
- do {
- w= wait(&status);
- } while (w != -1 && (!WIFEXITED(status) && !WIFSIGNALED(status)));
- }
-
- clock_t run_end = clock();
- printf("\nTotal dhrystone run time: %f seconds.\n", (double)(run_end - run_start) / CLOCKS_PER_SEC);
-
- exit(0);
-}
-
-run_for_duration(int duration, long num_loops) {
- clock_t end = clock() + duration * CLOCKS_PER_SEC;
- do {
- Proc0(num_loops, duration == 0);
- } while (clock() < end);
-}
-
-printhelp() {
- printf("Usage: dhrystone (-h | -l MLOOPS | -r DURATION) [-t THREADS [-d DELAY]]\n");
- printf("\n");
- printf("Runs dhrystone benchmark either for a specfied duration or for a specified\n");
- printf("number of iterations.\n");
- printf("\n");
- printf("Options:\n");
- printf(" -h Print this message and exit.\n");
- printf(" -l MLOOPS Run dhrystone for the specified number of millions\n");
- printf(" of iterations (i.e. the actual number of iterations is\n");
- printf(" MLOOPS * 1e6).\n");
- printf(" -r DURATION Run dhhrystone for the specified duration (in seconds). \n");
- printf(" dhrystone will be run 500000 iterations, looping until\n");
- printf(" the specified time period has passed.\n");
- printf("\n");
- printf(" Note: -r and -l options may not be specified at the same time.\n");
- printf("\n");
- printf(" -t THREADS Specified the number of concurrent threads (processes,\n");
- printf(" actually) that will be spawned. Defaults to 1.\n");
- printf(" -d DELAY if THREADS is > 1, this specifies the delay between\n");
- printf(" spawning the threads.\n");
- printf("\n");
-}
-
-
-/*
- * Package 1
- */
-int IntGlob;
-boolean BoolGlob;
-char Char1Glob;
-char Char2Glob;
-Array1Dim Array1Glob;
-Array2Dim Array2Glob;
-RecordPtr PtrGlb;
-RecordPtr PtrGlbNext;
-
-Proc0(long numloops, boolean print_result)
-{
- OneToFifty IntLoc1;
- REG OneToFifty IntLoc2;
- OneToFifty IntLoc3;
- REG char CharLoc;
- REG char CharIndex;
- Enumeration EnumLoc;
- String30 String1Loc;
- String30 String2Loc;
- // extern char *malloc();
-
- register unsigned int i;
-#ifdef TIME
- long time();
- long starttime;
- long benchtime;
- long nulltime;
-
- starttime = time( (long *) 0);
- for (i = 0; i < numloops; ++i);
- nulltime = time( (long *) 0) - starttime; /* Computes o'head of loop */
-#endif
-#ifdef TIMES
- time_t starttime;
- time_t benchtime;
- time_t nulltime;
- struct tms tms;
-
- times(&tms); starttime = tms.tms_utime;
- for (i = 0; i < numloops; ++i);
- times(&tms);
- nulltime = tms.tms_utime - starttime; /* Computes overhead of looping */
-#endif
-#ifdef GETRUSAGE
- struct rusage starttime;
- struct rusage endtime;
- struct timeval nulltime;
-
- getrusage(RUSAGE_SELF, &starttime);
- for (i = 0; i < numloops; ++i);
- getrusage(RUSAGE_SELF, &endtime);
- nulltime.tv_sec = endtime.ru_utime.tv_sec - starttime.ru_utime.tv_sec;
- nulltime.tv_usec = endtime.ru_utime.tv_usec - starttime.ru_utime.tv_usec;
-#endif
-
- PtrGlbNext = (RecordPtr) malloc(sizeof(RecordType));
- PtrGlb = (RecordPtr) malloc(sizeof(RecordType));
- PtrGlb->PtrComp = PtrGlbNext;
- PtrGlb->Discr = Ident1;
- PtrGlb->EnumComp = Ident3;
- PtrGlb->IntComp = 40;
- strcpy(PtrGlb->StringComp, "DHRYSTONE PROGRAM, SOME STRING");
-#ifndef GOOF
- strcpy(String1Loc, "DHRYSTONE PROGRAM, 1'ST STRING"); /*GOOF*/
-#endif
- Array2Glob[8][7] = 10; /* Was missing in published program */
-
-/*****************
--- Start Timer --
-*****************/
-#ifdef TIME
- starttime = time( (long *) 0);
-#endif
-#ifdef TIMES
- times(&tms); starttime = tms.tms_utime;
-#endif
-#ifdef GETRUSAGE
- getrusage (RUSAGE_SELF, &starttime);
-#endif
- for (i = 0; i < numloops; ++i)
- {
-
- Proc5();
- Proc4();
- IntLoc1 = 2;
- IntLoc2 = 3;
- strcpy(String2Loc, "DHRYSTONE PROGRAM, 2'ND STRING");
- EnumLoc = Ident2;
- BoolGlob = ! Func2(String1Loc, String2Loc);
- while (IntLoc1 < IntLoc2)
- {
- IntLoc3 = 5 * IntLoc1 - IntLoc2;
- Proc7(IntLoc1, IntLoc2, &IntLoc3);
- ++IntLoc1;
- }
- Proc8(Array1Glob, Array2Glob, IntLoc1, IntLoc3);
- Proc1(PtrGlb);
- for (CharIndex = 'A'; CharIndex <= Char2Glob; ++CharIndex)
- if (EnumLoc == Func1(CharIndex, 'C'))
- Proc6(Ident1, &EnumLoc);
- IntLoc3 = IntLoc2 * IntLoc1;
- IntLoc2 = IntLoc3 / IntLoc1;
- IntLoc2 = 7 * (IntLoc3 - IntLoc2) - IntLoc1;
- Proc2(&IntLoc1);
- }
-
-/*****************
--- Stop Timer --
-*****************/
-
- if (print_result) {
-#ifdef TIME
- benchtime = time( (long *) 0) - starttime - nulltime;
- printf("Dhrystone(%s) time for %ld passes = %ld\n",
- Version,
- (long) numloops, benchtime);
- printf("This machine benchmarks at %ld dhrystones/second\n",
- ((long) numloops) / benchtime);
- printf(" %ld DMIPS\n",
- ((long) numloops) / benchtime / ONE_MIPS);
-#endif
-#ifdef TIMES
- times(&tms);
- benchtime = tms.tms_utime - starttime - nulltime;
- printf("Dhrystone(%s) time for %ld passes = %ld\n",
- Version,
- (long) numloops, benchtime/HZ);
- printf("This machine benchmarks at %ld dhrystones/second\n",
- ((long) numloops) * HZ / benchtime);
- printf(" %ld DMIPS\n",
- ((long) numloops) * HZ / benchtime / ONE_MIPS);
-#endif
-#ifdef GETRUSAGE
- getrusage(RUSAGE_SELF, &endtime);
- {
- double t = (double)(endtime.ru_utime.tv_sec
- - starttime.ru_utime.tv_sec
- - nulltime.tv_sec)
- + (double)(endtime.ru_utime.tv_usec
- - starttime.ru_utime.tv_usec
- - nulltime.tv_usec) * 1e-6;
- printf("Dhrystone(%s) time for %ld passes = %.1f\n",
- Version,
- (long)numloops,
- t);
- printf("This machine benchmarks at %.0f dhrystones/second\n",
- (double)numloops / t);
- printf(" %.0f DMIPS\n",
- (double)numloops / t / ONE_MIPS);
- }
-#endif
- }
-
-}
-
-Proc1(PtrParIn)
-REG RecordPtr PtrParIn;
-{
-#define NextRecord (*(PtrParIn->PtrComp))
-
- structassign(NextRecord, *PtrGlb);
- PtrParIn->IntComp = 5;
- NextRecord.IntComp = PtrParIn->IntComp;
- NextRecord.PtrComp = PtrParIn->PtrComp;
- Proc3(NextRecord.PtrComp);
- if (NextRecord.Discr == Ident1)
- {
- NextRecord.IntComp = 6;
- Proc6(PtrParIn->EnumComp, &NextRecord.EnumComp);
- NextRecord.PtrComp = PtrGlb->PtrComp;
- Proc7(NextRecord.IntComp, 10, &NextRecord.IntComp);
- }
- else
- structassign(*PtrParIn, NextRecord);
-
-#undef NextRecord
-}
-
-Proc2(IntParIO)
-OneToFifty *IntParIO;
-{
- REG OneToFifty IntLoc;
- REG Enumeration EnumLoc;
-
- IntLoc = *IntParIO + 10;
- for(;;)
- {
- if (Char1Glob == 'A')
- {
- --IntLoc;
- *IntParIO = IntLoc - IntGlob;
- EnumLoc = Ident1;
- }
- if (EnumLoc == Ident1)
- break;
- }
-}
-
-Proc3(PtrParOut)
-RecordPtr *PtrParOut;
-{
- if (PtrGlb != NULL)
- *PtrParOut = PtrGlb->PtrComp;
- else
- IntGlob = 100;
- Proc7(10, IntGlob, &PtrGlb->IntComp);
-}
-
-Proc4()
-{
- REG boolean BoolLoc;
-
- BoolLoc = Char1Glob == 'A';
- BoolLoc |= BoolGlob;
- Char2Glob = 'B';
-}
-
-Proc5()
-{
- Char1Glob = 'A';
- BoolGlob = FALSE;
-}
-
-extern boolean Func3();
-
-Proc6(EnumParIn, EnumParOut)
-REG Enumeration EnumParIn;
-REG Enumeration *EnumParOut;
-{
- *EnumParOut = EnumParIn;
- if (! Func3(EnumParIn) )
- *EnumParOut = Ident4;
- switch (EnumParIn)
- {
- case Ident1: *EnumParOut = Ident1; break;
- case Ident2: if (IntGlob > 100) *EnumParOut = Ident1;
- else *EnumParOut = Ident4;
- break;
- case Ident3: *EnumParOut = Ident2; break;
- case Ident4: break;
- case Ident5: *EnumParOut = Ident3;
- }
-}
-
-Proc7(IntParI1, IntParI2, IntParOut)
-OneToFifty IntParI1;
-OneToFifty IntParI2;
-OneToFifty *IntParOut;
-{
- REG OneToFifty IntLoc;
-
- IntLoc = IntParI1 + 2;
- *IntParOut = IntParI2 + IntLoc;
-}
-
-Proc8(Array1Par, Array2Par, IntParI1, IntParI2)
-Array1Dim Array1Par;
-Array2Dim Array2Par;
-OneToFifty IntParI1;
-OneToFifty IntParI2;
-{
- REG OneToFifty IntLoc;
- REG OneToFifty IntIndex;
-
- IntLoc = IntParI1 + 5;
- Array1Par[IntLoc] = IntParI2;
- Array1Par[IntLoc+1] = Array1Par[IntLoc];
- Array1Par[IntLoc+30] = IntLoc;
- for (IntIndex = IntLoc; IntIndex <= (IntLoc+1); ++IntIndex)
- Array2Par[IntLoc][IntIndex] = IntLoc;
- ++Array2Par[IntLoc][IntLoc-1];
- Array2Par[IntLoc+20][IntLoc] = Array1Par[IntLoc];
- IntGlob = 5;
-}
-
-Enumeration Func1(CharPar1, CharPar2)
-CapitalLetter CharPar1;
-CapitalLetter CharPar2;
-{
- REG CapitalLetter CharLoc1;
- REG CapitalLetter CharLoc2;
-
- CharLoc1 = CharPar1;
- CharLoc2 = CharLoc1;
- if (CharLoc2 != CharPar2)
- return (Ident1);
- else
- return (Ident2);
-}
-
-boolean Func2(StrParI1, StrParI2)
-String30 StrParI1;
-String30 StrParI2;
-{
- REG OneToThirty IntLoc;
- REG CapitalLetter CharLoc;
-
- IntLoc = 1;
- while (IntLoc <= 1)
- if (Func1(StrParI1[IntLoc], StrParI2[IntLoc+1]) == Ident1)
- {
- CharLoc = 'A';
- ++IntLoc;
- }
- if (CharLoc >= 'W' && CharLoc <= 'Z')
- IntLoc = 7;
- if (CharLoc == 'X')
- return(TRUE);
- else
- {
- if (strcmp(StrParI1, StrParI2) > 0)
- {
- IntLoc += 7;
- return (TRUE);
- }
- else
- return (FALSE);
- }
-}
-
-boolean Func3(EnumParIn)
-REG Enumeration EnumParIn;
-{
- REG Enumeration EnumLoc;
-
- EnumLoc = EnumParIn;
- if (EnumLoc == Ident3) return (TRUE);
- return (FALSE);
-}
-
-#ifdef NOSTRUCTASSIGN
-memcpy(d, s, l)
-register char *d;
-register char *s;
-register int l;
-{
- while (l--) *d++ = *s++;
-}
-#endif
-/* ---------- */
diff --git a/wlauto/workloads/dungeondefenders/__init__.py b/wlauto/workloads/dungeondefenders/__init__.py
deleted file mode 100644
index da924202..00000000
--- a/wlauto/workloads/dungeondefenders/__init__.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=R0801
-import os
-import time
-
-from wlauto import GameWorkload
-from wlauto.exceptions import WorkloadError, DeviceError
-
-
-class DungeonDefenders(GameWorkload):
-
- name = 'dungeondefenders'
- description = """
- Dungeon Defenders game.
-
- """
- package = 'com.trendy.ddapp'
- activity = 'com.trendy.ddapp.ddapp'
- loading_time = 20
- asset_file = 'com.trendy.ddapp.tar.gz'
diff --git a/wlauto/workloads/dungeondefenders/revent_files/Nexus10.run.revent b/wlauto/workloads/dungeondefenders/revent_files/Nexus10.run.revent
deleted file mode 100644
index 42b13a84..00000000
--- a/wlauto/workloads/dungeondefenders/revent_files/Nexus10.run.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/dungeondefenders/revent_files/Nexus10.setup.revent b/wlauto/workloads/dungeondefenders/revent_files/Nexus10.setup.revent
deleted file mode 100644
index d3575a75..00000000
--- a/wlauto/workloads/dungeondefenders/revent_files/Nexus10.setup.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/ebizzy/__init__.py b/wlauto/workloads/ebizzy/__init__.py
deleted file mode 100644
index 7106ce7e..00000000
--- a/wlauto/workloads/ebizzy/__init__.py
+++ /dev/null
@@ -1,89 +0,0 @@
-# Copyright 2012-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# pylint: disable=W0201, C0103
-
-import os
-import re
-
-from wlauto import Workload, Parameter, Executable
-
-results_txt = 'ebizzy_results.txt'
-record_regex = re.compile(r'(?P<record>\d+) records/s')
-result_regex = re.compile(r'(?P<metric>\D+)(?P<value>\d+.*\b)(?P<unit>\S+)')
-
-
-class Ebizzy(Workload):
-
- name = 'ebizzy'
- description = """
- ebizzy is designed to generate a workload resembling common web
- application server workloads. It is highly threaded, has a large in-memory
- working set with low locality, and allocates and deallocates memory frequently.
- When running most efficiently, it will max out the CPU.
-
- ebizzy description taken from the source code at
- https://github.com/linux-test-project/ltp/tree/master/utils/benchmark/ebizzy-0.3
-
- """
-
- parameters = [
- # Workload parameters go here e.g.
- Parameter('threads', kind=int, default=2, description='Number of threads to execute.'),
- Parameter('seconds', kind=int, default=10, description='Number of seconds.'),
- Parameter('chunks', kind=int, default=10,
- description='Number of memory chunks to allocate.'),
- Parameter('extra_params', kind=str, default='',
- description='Extra parameters to pass in (e.g. -M to disable mmap).'
- ' See ebizzy -? for full list of options.')
- ]
-
- def setup(self, context):
- timeout_buf = 10
- self.command = '{} -t {} -S {} -n {} {} > {}'
- self.ebizzy_results = self.device.path.join(self.device.working_directory, results_txt)
- self.device_binary = None
- self.run_timeout = self.seconds + timeout_buf
-
- self.binary_name = 'ebizzy'
- host_binary = context.resolver.get(Executable(self, self.device.abi, self.binary_name))
- self.device_binary = self.device.install_if_needed(host_binary)
-
- self.command = self.command.format(self.device_binary, self.threads, self.seconds,
- self.chunks, self.extra_params, self.ebizzy_results)
-
- def run(self, context):
- self.device.execute(self.command, timeout=self.run_timeout)
-
- def update_result(self, context):
- self.device.pull(self.ebizzy_results, context.output_directory)
-
- with open(os.path.join(context.output_directory, results_txt)) as ebizzy_file:
- for line in ebizzy_file:
- record_match = record_regex.search(line)
- if record_match:
- context.result.add_metric('total_recs', record_match.group('record'),
- 'records/s')
-
- results_match = result_regex.search(line)
- if results_match:
- context.result.add_metric(results_match.group('metric'),
- results_match.group('value'),
- results_match.group('unit'))
-
- def teardown(self, context):
- self.device.uninstall(self.device_binary)
-
- def validate(self):
- pass
diff --git a/wlauto/workloads/ebizzy/bin/arm64/ebizzy b/wlauto/workloads/ebizzy/bin/arm64/ebizzy
deleted file mode 100755
index d74ffe27..00000000
--- a/wlauto/workloads/ebizzy/bin/arm64/ebizzy
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/ebizzy/bin/armeabi/ebizzy b/wlauto/workloads/ebizzy/bin/armeabi/ebizzy
deleted file mode 100755
index acc0e18b..00000000
--- a/wlauto/workloads/ebizzy/bin/armeabi/ebizzy
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/ebizzy/src/LICENSE b/wlauto/workloads/ebizzy/src/LICENSE
deleted file mode 100644
index eb84b5ff..00000000
--- a/wlauto/workloads/ebizzy/src/LICENSE
+++ /dev/null
@@ -1,3 +0,0 @@
-ebizzy binary source code can be found here:
-
-https://github.com/linux-test-project/ltp/tree/master/utils/benchmark/ebizzy-0.3
diff --git a/wlauto/workloads/facebook/__init__.py b/wlauto/workloads/facebook/__init__.py
deleted file mode 100644
index cbc9a7c8..00000000
--- a/wlauto/workloads/facebook/__init__.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import os
-import time
-import sys
-
-from wlauto import AndroidUiAutoBenchmark
-from wlauto import UiAutomatorWorkload
-from wlauto import AndroidBenchmark
-
-
-class Facebook(AndroidUiAutoBenchmark):
-
- name = 'facebook'
- description = """
- Uses com.facebook.patana apk for facebook workload.
- This workload does the following activities in facebook
-
- Login to facebook account.
- Send a message.
- Check latest notification.
- Search particular user account and visit his/her facebook account.
- Find friends.
- Update the facebook status
-
- [NOTE: This workload starts disableUpdate workload as a part of setup to
- disable online updates, which helps to tackle problem of uncertain
- behavier during facebook workload run.]
-
- """
- package = 'com.facebook.katana'
- activity = '.LoginActivity'
-
- #'du' specify 'disable update'
- du_activity = 'com.android.vending/.AssetBrowserActivity'
- du_method_string = 'com.arm.wlauto.uiauto.facebook.UiAutomation#disableUpdate'
- du_jar_file = '/data/local/wa_usecases/com.arm.wlauto.uiauto.facebook.jar'
- du_run_timeout = 4 * 60
- du_working_dir = '/data/local/wa_usecases'
- du_apk_file = '/disableupdateapk/com.android.vending-4.3.10.apk'
- DELAY = 5
-
- def setup(self, context):
- UiAutomatorWorkload.setup(self, context)
-
- #Start the play store activity
- self.device.execute('am start {}'.format(self.du_activity))
-
- #Creating command
- command = 'uiautomator runtest {} -e workdir {} -c {}'.format(self.du_jar_file,
- self.du_working_dir,
- self.du_method_string)
-
- #Start the disable update workload
- self.device.execute(command, self.du_run_timeout)
- time.sleep(self.DELAY)
-
- #Stop the play store activity
- self.device.execute('am force-stop com.android.vending')
-
- AndroidBenchmark.setup(self, context)
-
- def update_result(self, context):
- super(Facebook, self).update_result(context)
-
- def teardown(self, context):
- pass
-
diff --git a/wlauto/workloads/facebook/com.arm.wlauto.uiauto.facebook.jar b/wlauto/workloads/facebook/com.arm.wlauto.uiauto.facebook.jar
deleted file mode 100644
index 098030b0..00000000
--- a/wlauto/workloads/facebook/com.arm.wlauto.uiauto.facebook.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/facebook/uiauto/build.sh b/wlauto/workloads/facebook/uiauto/build.sh
deleted file mode 100755
index 00535591..00000000
--- a/wlauto/workloads/facebook/uiauto/build.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.facebook.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.facebook.jar ..
-fi
diff --git a/wlauto/workloads/facebook/uiauto/build.xml b/wlauto/workloads/facebook/uiauto/build.xml
deleted file mode 100644
index e39db0ff..00000000
--- a/wlauto/workloads/facebook/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.facebook" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/facebook/uiauto/project.properties b/wlauto/workloads/facebook/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/facebook/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/facebook/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/facebook/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index 3c9dbb2c..00000000
--- a/wlauto/workloads/facebook/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,257 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.facebook;
-
-import android.app.Activity;
-import android.os.Bundle;
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "facebook";
-
- /*
- * The 'runUiAutomation' method implements the following activities
- * Login to facebook account.
- * Send a message.
- * Check latest notification.
- * Search particular user account and visit his/her facebook account.
- * Go to find friends.
- * Update the facebook status
- */
- public void runUiAutomation() throws Exception {
- final int timeout = 5;
- UiSelector selector = new UiSelector();
-
- UiObject logInButton = new UiObject(selector
- .className("android.widget.Button").index(3).text("Log In"));
-
- UiObject emailField = new UiObject(selector
- .className("android.widget.EditText").index(1));
- emailField.clearTextField();
- emailField.setText("abkksathe@gmail.com");
-
- UiObject passwordField = new UiObject(selector
- .className("android.widget.EditText").index(2));
- passwordField.clearTextField();
- passwordField.setText("highelymotivated");
-
- logInButton.clickAndWaitForNewWindow(timeout);
-
- sleep(timeout);
-
- //Click on message logo
- UiObject messageLogo = new UiObject(new UiSelector()
- .className("android.widget.RelativeLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(3)
- .childSelector(new UiSelector()
- .className("android.widget.RelativeLayout").index(1)
- .childSelector(new UiSelector()
- .className("android.widget.ImageButton").index(0)))));
- messageLogo.clickAndWaitForNewWindow(timeout);
-
- //send message
- UiObject clickMessage = new UiObject(new UiSelector()
- .className("android.support.v4.view.ViewPager").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.RelativeLayout").index(1)));
- clickMessage.clickAndWaitForNewWindow(timeout);
-
- sleep(timeout);
-
- UiObject sendMessage = new UiObject(new UiSelector()
- .className("android.widget.FrameLayout").index(4)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(2))
- .childSelector(new UiSelector()
- .className("android.widget.EditText").index(0)
- .text("Write a message")));
- sendMessage.click();
-
- sleep(timeout);
-
- UiObject editMessage = new UiObject(new UiSelector()
- .className("android.widget.EditText").text("Write a message"));
-
- editMessage.setText("Hi how are you?????");
-
- UiObject sendButton = new UiObject(new UiSelector()
- .className("android.widget.TextView").text("Send"));
- sendButton.click();
-
- getUiDevice().pressDPadDown();
- sleep(timeout);
- getUiDevice().pressBack();
- sleep(timeout);
- getUiDevice().pressBack();
-
- //Check for notifications
- UiObject clickNotificationsLogo = new UiObject(new UiSelector()
- .className("android.widget.RelativeLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(3)
- .childSelector(new UiSelector()
- .className("android.widget.RelativeLayout").index(2)
- .childSelector(new UiSelector()
- .className("android.widget.ImageButton").index(0)))));
- clickNotificationsLogo.clickAndWaitForNewWindow(timeout);
-
- //Click on latest notification
- UiObject clickNotify = new UiObject(new UiSelector()
- .className("android.support.v4.view.ViewPager").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(1)));
- clickNotify.clickAndWaitForNewWindow(timeout);
-
- sleep(timeout);
- getUiDevice().pressBack();
- sleep(timeout);
- getUiDevice().pressBack();
-
- //Search for the facebook account
- UiObject clickBar = new UiObject(new UiSelector()
- .className("android.view.View").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.ImageButton").index(0)
- .description("Main navigation menu")));
- clickBar.clickAndWaitForNewWindow(timeout);
-
- UiObject clickSearch = new UiObject(new UiSelector()
- .className("android.widget.FrameLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.FrameLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.FrameLayout").index(1)
- .childSelector(new UiSelector()
- .className("android.widget.EditText").index(1)
- .text("Search"))))));
- clickSearch.clickAndWaitForNewWindow(timeout);
-
- UiObject editSearch = new UiObject(new UiSelector()
- .className("android.widget.EditText").index(0).text("Search"));
-
- editSearch.clearTextField();
- editSearch.setText("amol kamble");
- sleep(timeout);
-
- UiObject clickOnSearchResult = new UiObject(new UiSelector()
- .className("android.webkit.WebView").index(0));
- clickOnSearchResult.clickTopLeft();
-
- sleep(2 * timeout);
-
- getUiDevice().pressBack();
- sleep(timeout);
- getUiDevice().pressBack();
-
- clickBar.click();
-
- sleep(timeout);
-
- //Click on find friends
- UiObject clickFriends = new UiObject(new UiSelector()
- .className("android.widget.FrameLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.FrameLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.FrameLayout").index(1)
- .childSelector(new UiSelector()
- .className("android.widget.RelativeLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.ListView").index(2)))))));
-
- UiObject friends = clickFriends.getChild(new UiSelector()
- .className("android.widget.RelativeLayout").index(3));
- friends.click();
- sleep(timeout);
- getUiDevice().pressBack();
-
- //Update the status
- UiObject updateStatus = new UiObject(new UiSelector()
- .className("android.widget.FrameLayout").index(1)
- .childSelector(new UiSelector()
- .className("android.widget.FrameLayout").index(1)
- .childSelector(new UiSelector()
- .className("android.widget.RelativeLayout").index(1)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(0)))))));
-
- updateStatus.clickAndWaitForNewWindow(timeout);
-
- UiObject editUpdateStatus = new UiObject(new UiSelector()
- .className("android.widget.EditText")
- .text("What's on your mind?"));
- editUpdateStatus.clearTextField();
- editUpdateStatus.setText("hellllooooooo its done!!");
-
- UiObject clickPost = new UiObject(new UiSelector()
- .className("android.widget.RelativeLayout").index(0)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout").index(3)));
- clickPost.clickAndWaitForNewWindow(timeout);
- getUiDevice().pressHome();
- }
-
- //disable update using playstore
- public void disableUpdate() throws UiObjectNotFoundException {
-
- UiObject accountSelect = new UiObject(new UiSelector()
- .className("android.widget.Button").text("Accept"));
-
- if (accountSelect.exists())
- accountSelect.click();
-
- UiObject moreOptions = new UiObject(new UiSelector()
- .className("android.widget.ImageButton")
- .description("More options"));
- moreOptions.click();
-
- UiObject settings = new UiObject(new UiSelector()
- .className("android.widget.TextView").text("Settings"));
- settings.clickAndWaitForNewWindow();
-
- UiObject autoUpdate = new UiObject(new UiSelector()
- .className("android.widget.TextView")
- .text("Auto-update apps"));
-
- autoUpdate.clickAndWaitForNewWindow();
-
- UiObject clickAutoUpdate = new UiObject(new UiSelector()
- .className("android.widget.CheckedTextView")
- .text("Do not auto-update apps"));
-
- clickAutoUpdate.clickAndWaitForNewWindow();
-
- getUiDevice().pressBack();
- getUiDevice().pressHome();
- }
-}
diff --git a/wlauto/workloads/geekbench/__init__.py b/wlauto/workloads/geekbench/__init__.py
deleted file mode 100644
index b81325c9..00000000
--- a/wlauto/workloads/geekbench/__init__.py
+++ /dev/null
@@ -1,356 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101
-import os
-import re
-import tempfile
-import json
-from collections import defaultdict
-
-from wlauto import AndroidUiAutoBenchmark, Parameter, Artifact
-from wlauto.exceptions import ConfigError, WorkloadError
-from wlauto.utils.misc import capitalize
-import wlauto.common.android.resources
-
-
-class Geekbench(AndroidUiAutoBenchmark):
-
- name = 'geekbench'
- description = """
- Geekbench provides a comprehensive set of benchmarks engineered to quickly
- and accurately measure processor and memory performance.
-
- http://www.primatelabs.com/geekbench/
-
- From the website:
-
- Designed to make benchmarks easy to run and easy to understand, Geekbench
- takes the guesswork out of producing robust and reliable benchmark results.
-
- Geekbench scores are calibrated against a baseline score of 1,000 (which is
- the score of a single-processor Power Mac G5 @ 1.6GHz). Higher scores are
- better, with double the score indicating double the performance.
-
- The benchmarks fall into one of four categories:
-
- - integer performance.
- - floating point performance.
- - memory performance.
- - stream performance.
-
- Geekbench benchmarks: http://www.primatelabs.com/geekbench/doc/benchmarks.html
-
- Geekbench scoring methedology:
- http://support.primatelabs.com/kb/geekbench/interpreting-geekbench-scores
-
- """
- summary_metrics = ['score', 'multicore_score']
- versions = {
- '3': {
- 'package': 'com.primatelabs.geekbench3',
- 'activity': '.HomeActivity',
- },
- '2': {
- 'package': 'ca.primatelabs.geekbench2',
- 'activity': '.HomeActivity',
- },
- }
- begin_regex = re.compile(r'^\s*D/WebViewClassic.loadDataWithBaseURL\(\s*\d+\s*\)'
- r'\s*:\s*(?P<content>\<.*)\s*$')
- replace_regex = re.compile(r'<[^>]*>')
-
- parameters = [
- Parameter('version', default=sorted(versions.keys())[-1], allowed_values=sorted(versions.keys()),
- description='Specifies which version of the workload should be run.'),
- Parameter('times', kind=int, default=1,
- description=('Specfies the number of times the benchmark will be run in a "tight '
- 'loop", i.e. without performaing setup/teardown inbetween.')),
- ]
-
- @property
- def activity(self):
- return self.versions[self.version]['activity']
-
- @property
- def package(self):
- return self.versions[self.version]['package']
-
- def __init__(self, device, **kwargs):
- super(Geekbench, self).__init__(device, **kwargs)
- self.uiauto_params['version'] = self.version
- self.uiauto_params['times'] = self.times
- self.run_timeout = 5 * 60 * self.times
-
- def initialize(self, context):
- if self.version == '3' and not self.device.is_rooted:
- raise WorkloadError('Geekbench workload only works on rooted devices.')
-
- def init_resources(self, context):
- self.apk_file = context.resolver.get(wlauto.common.android.resources.ApkFile(self), version=self.version)
- self.uiauto_file = context.resolver.get(wlauto.common.android.resources.JarFile(self))
- self.device_uiauto_file = self.device.path.join(self.device.working_directory,
- os.path.basename(self.uiauto_file))
- if not self.uiauto_package:
- self.uiauto_package = os.path.splitext(os.path.basename(self.uiauto_file))[0]
-
- def update_result(self, context):
- super(Geekbench, self).update_result(context)
- update_method = getattr(self, 'update_result_{}'.format(self.version))
- update_method(context)
-
- def validate(self):
- if (self.times > 1) and (self.version == '2'):
- raise ConfigError('times parameter is not supported for version 2 of Geekbench.')
-
- def update_result_2(self, context):
- score_calculator = GBScoreCalculator()
- score_calculator.parse(self.logcat_log)
- score_calculator.update_results(context)
-
- def update_result_3(self, context):
- outfile_glob = self.device.path.join(context.device_manager.package_data_directory, self.package, 'files', '*gb3')
- on_device_output_files = [f.strip() for f in
- self.device.execute('ls {}'.format(outfile_glob), as_root=True).split('\n')
- if f.strip()]
- for i, on_device_output_file in enumerate(on_device_output_files):
- host_temp_file = tempfile.mktemp()
- self.device.pull(on_device_output_file, host_temp_file)
- host_output_file = os.path.join(context.output_directory, os.path.basename(on_device_output_file))
- with open(host_temp_file) as fh:
- data = json.load(fh)
- os.remove(host_temp_file)
- with open(host_output_file, 'w') as wfh:
- json.dump(data, wfh, indent=4)
- context.iteration_artifacts.append(Artifact('geekout', path=os.path.basename(on_device_output_file),
- kind='data',
- description='Geekbench 3 output from device.'))
- context.result.add_metric(namemify('score', i), data['score'])
- context.result.add_metric(namemify('multicore_score', i), data['multicore_score'])
- for section in data['sections']:
- context.result.add_metric(namemify(section['name'] + '_score', i), section['score'])
- context.result.add_metric(namemify(section['name'] + '_multicore_score', i),
- section['multicore_score'])
-
-
-class GBWorkload(object):
- """
- Geekbench workload (not to be confused with WA's workloads). This is a single test run by
- geek bench, such as preforming compression or generating Madelbrot.
-
- """
-
- # Index maps onto the hundreds digit of the ID.
- categories = [None, 'integer', 'float', 'memory', 'stream']
-
- # 2003 entry-level Power Mac G5 is considered to have a baseline score of
- # 1000 for every category.
- pmac_g5_base_score = 1000
-
- units_conversion_map = {
- 'K': 1,
- 'M': 1000,
- 'G': 1000000,
- }
-
- def __init__(self, wlid, name, pmac_g5_st_score, pmac_g5_mt_score):
- """
- :param wlid: A three-digit workload ID. Uniquely identifies a workload and also
- determines the category a workload belongs to.
- :param name: The name of the workload.
- :param pmac_g5_st_score: Score achieved for this workload on 2003 entry-level
- Power Mac G5 running in a single thread.
- :param pmac_g5_mt_score: Score achieved for this workload on 2003 entry-level
- Power Mac G5 running in multiple threads.
-
- """
- self.wlid = wlid
- self.name = name
- self.pmac_g5_st_score = pmac_g5_st_score
- self.pmac_g5_mt_score = pmac_g5_mt_score
- self.category = self.categories[int(wlid) // 100]
- self.collected_results = []
-
- def add_result(self, value, units):
- self.collected_results.append(self.convert_to_kilo(value, units))
-
- def convert_to_kilo(self, value, units):
- return value * self.units_conversion_map[units[0]]
-
- def clear(self):
- self.collected_results = []
-
- def get_scores(self):
- """
- Returns a tuple (single-thraded score, multi-threaded score) for this workload.
- Some workloads only have a single-threaded score, in which case multi-threaded
- score will be ``None``.
-
- Geekbench will perform four iterations of each workload in single-threaded and,
- for some workloads, multi-threaded configurations. Thus there should always be
- either four or eight scores collected for each workload. Single-threaded iterations
- are always done before multi-threaded, so the ordering of the scores can be used
- to determine which configuration they belong to.
-
- This method should not be called before score collection has finished.
-
- """
- no_of_results = len(self.collected_results)
- if no_of_results == 4:
- return (self._calculate(self.collected_results[:4], self.pmac_g5_st_score), None)
- if no_of_results == 8:
- return (self._calculate(self.collected_results[:4], self.pmac_g5_st_score),
- self._calculate(self.collected_results[4:], self.pmac_g5_mt_score))
- else:
- msg = 'Collected {} results for Geekbench {} workload;'.format(no_of_results, self.name)
- msg += ' expecting either 4 or 8.'
- raise WorkloadError(msg)
-
- def _calculate(self, values, scale_factor):
- return max(values) * self.pmac_g5_base_score / scale_factor
-
- def __str__(self):
- return self.name
-
- __repr__ = __str__
-
-
-class GBScoreCalculator(object):
- """
- Parses logcat output to extract raw Geekbench workload values and converts them into
- category and overall scores.
-
- """
-
- result_regex = re.compile(r'workload (?P<id>\d+) (?P<value>[0-9.]+) '
- r'(?P<units>[a-zA-Z/]+) (?P<time>[0-9.]+)s')
-
- # Indicates contribution to the overall score.
- category_weights = {
- 'integer': 0.3357231,
- 'float': 0.3594,
- 'memory': 0.1926489,
- 'stream': 0.1054738,
- }
- #pylint: disable=C0326
- workloads = [
- # ID Name Power Mac ST Power Mac MT
- GBWorkload(101, 'Blowfish', 43971, 40979),
- GBWorkload(102, 'Text Compress', 3202, 3280),
- GBWorkload(103, 'Text Decompress', 4112, 3986),
- GBWorkload(104, 'Image Compress', 8272, 8412),
- GBWorkload(105, 'Image Decompress', 16800, 16330),
- GBWorkload(107, 'Lua', 385, 385),
-
- GBWorkload(201, 'Mandelbrot', 665589, 653746),
- GBWorkload(202, 'Dot Product', 481449, 455422),
- GBWorkload(203, 'LU Decomposition', 889933, 877657),
- GBWorkload(204, 'Primality Test', 149394, 185502),
- GBWorkload(205, 'Sharpen Image', 2340, 2304),
- GBWorkload(206, 'Blur Image', 791, 787),
-
- GBWorkload(302, 'Read Sequential', 1226708, None),
- GBWorkload(304, 'Write Sequential', 683782, None),
- GBWorkload(306, 'Stdlib Allocate', 3739, None),
- GBWorkload(307, 'Stdlib Write', 2070681, None),
- GBWorkload(308, 'Stdlib Copy', 1030360, None),
-
- GBWorkload(401, 'Stream Copy', 1367892, None),
- GBWorkload(402, 'Stream Scale', 1296053, None),
- GBWorkload(403, 'Stream Add', 1507115, None),
- GBWorkload(404, 'Stream Triad', 1384526, None),
- ]
-
- def __init__(self):
- self.workload_map = {wl.wlid: wl for wl in self.workloads}
-
- def parse(self, filepath):
- """
- Extract results from the specified file. The file should contain a logcat log of Geekbench execution.
- Iteration results in the log appear as 'I/geekbench' category entries in the following format::
-
- | worklod ID value units timing
- | \------------- | ----/ ---/
- | | | | |
- | I/geekbench(29026): [....] workload 101 132.9 MB/sec 0.0300939s
- | | |
- | | -----\
- | label random crap we don't care about
-
- """
- for wl in self.workloads:
- wl.clear()
- with open(filepath) as fh:
- for line in fh:
- match = self.result_regex.search(line)
- if match:
- wkload = self.workload_map[int(match.group('id'))]
- wkload.add_result(float(match.group('value')), match.group('units'))
-
- def update_results(self, context):
- """
- http://support.primatelabs.com/kb/geekbench/interpreting-geekbench-2-scores
-
- From the website:
-
- Each workload's performance is compared against a baseline to determine a score. These
- scores are averaged together to determine an overall, or Geekbench, score for the system.
-
- Geekbench uses the 2003 entry-level Power Mac G5 as the baseline with a score of 1,000
- points. Higher scores are better, with double the score indicating double the performance.
-
- Geekbench provides three different kinds of scores:
-
- :Workload Scores: Each time a workload is executed Geekbench calculates a score based
- on the computer's performance compared to the baseline
- performance. There can be multiple workload scores for the
- same workload as Geekbench can execute each workload multiple
- times with different settings. For example, the "Dot Product"
- workload is executed four times (single-threaded scalar code,
- multi-threaded scalar code, single-threaded vector code, and
- multi-threaded vector code) producing four "Dot Product" scores.
-
- :Section Scores: A section score is the average of all the workload scores for
- workloads that are part of the section. These scores are useful
- for determining the performance of the computer in a particular
- area. See the section descriptions above for a summary on what
- each section measures.
-
- :Geekbench Score: The Geekbench score is the weighted average of the four section
- scores. The Geekbench score provides a way to quickly compare
- performance across different computers and different platforms
- without getting bogged down in details.
-
- """
- scores_by_category = defaultdict(list)
- for wkload in self.workloads:
- st_score, mt_score = wkload.get_scores()
- scores_by_category[wkload.category].append(st_score)
- context.result.add_metric(wkload.name + ' (single-threaded)', int(st_score))
- if mt_score is not None:
- scores_by_category[wkload.category].append(mt_score)
- context.result.add_metric(wkload.name + ' (multi-threaded)', int(mt_score))
-
- overall_score = 0
- for category in scores_by_category:
- scores = scores_by_category[category]
- category_score = sum(scores) / len(scores)
- overall_score += category_score * self.category_weights[category]
- context.result.add_metric(capitalize(category) + ' Score', int(category_score))
- context.result.add_metric('Geekbench Score', int(overall_score))
-
-
-def namemify(basename, i):
- return basename + (' {}'.format(i) if i else '')
diff --git a/wlauto/workloads/geekbench/com.arm.wlauto.uiauto.geekbench.jar b/wlauto/workloads/geekbench/com.arm.wlauto.uiauto.geekbench.jar
deleted file mode 100644
index 5359cc30..00000000
--- a/wlauto/workloads/geekbench/com.arm.wlauto.uiauto.geekbench.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/geekbench/uiauto/build.sh b/wlauto/workloads/geekbench/uiauto/build.sh
deleted file mode 100755
index 7da9f5fe..00000000
--- a/wlauto/workloads/geekbench/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.geekbench.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.geekbench.jar ..
-fi
diff --git a/wlauto/workloads/geekbench/uiauto/build.xml b/wlauto/workloads/geekbench/uiauto/build.xml
deleted file mode 100644
index 7fdf1685..00000000
--- a/wlauto/workloads/geekbench/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.geekbench" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/geekbench/uiauto/project.properties b/wlauto/workloads/geekbench/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/geekbench/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/geekbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/geekbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index 968d2abc..00000000
--- a/wlauto/workloads/geekbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,121 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.geekbench;
-
-import java.util.concurrent.TimeUnit;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.KeyEvent;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "geekbench";
-
- public void runUiAutomation() throws Exception {
- Bundle params = getParams();
- int version = Integer.parseInt(params.getString("version"));
- int times = Integer.parseInt(params.getString("times"));
-
- for (int i = 0; i < times; i++) {
- runBenchmarks();
- switch(version) {
- case 2:
- // In version 2, we scroll through the results WebView to make sure
- // all results appear on the screen, which causes them to be dumped into
- // logcat by the Linaro hacks.
- waitForResultsv2();
- scrollThroughResults();
- break;
- case 3:
- // Attempting to share the results will generate the .gb3 file with
- // results that can then be pulled from the device. This is not possible
- // in verison 2 of Geekbench (Share option was added later).
- waitForResultsv3();
- shareResults();
- break;
- }
-
- if (i < (times - 1)) {
- getUiDevice().pressBack();
- getUiDevice().pressBack(); // twice
- }
- }
-
- Bundle status = new Bundle();
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
- public void runBenchmarks() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject runButton = new UiObject(selector.text("Run Benchmarks")
- .className("android.widget.Button"));
- if (!runButton.exists()) {
- getUiDevice().pressBack();
- }
- runButton.click();
- }
-
- public void waitForResultsv2() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject resultsWebview = new UiObject(selector.className("android.webkit.WebView"));
- if (!resultsWebview.waitForExists(TimeUnit.SECONDS.toMillis(200))) {
- throw new UiObjectNotFoundException("Did not see Geekbench results screen.");
- }
- }
-
- public void waitForResultsv3() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject runningTextView = new UiObject(selector.text("Running Benchmarks...")
- .className("android.widget.TextView"));
- runningTextView.waitForExists(TimeUnit.SECONDS.toMillis(2));
- if (!runningTextView.waitUntilGone(TimeUnit.SECONDS.toMillis(200))) {
- throw new UiObjectNotFoundException("Did not get to Geekbench results screen.");
- }
- }
-
- public void scrollThroughResults() throws Exception {
- UiSelector selector = new UiSelector();
- getUiDevice().pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);
- sleep(1);
- getUiDevice().pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);
- sleep(1);
- getUiDevice().pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);
- sleep(1);
- getUiDevice().pressKeyCode(KeyEvent.KEYCODE_PAGE_DOWN);
- }
-
- public void shareResults() throws Exception {
- sleep(2); // transition
- UiSelector selector = new UiSelector();
- getUiDevice().pressMenu();
- UiObject runButton = new UiObject(selector.text("Share")
- .className("android.widget.TextView"));
- runButton.waitForExists(500);
- runButton.click();
- }
-}
diff --git a/wlauto/workloads/glbcorp/__init__.py b/wlauto/workloads/glbcorp/__init__.py
deleted file mode 100644
index 18de09d5..00000000
--- a/wlauto/workloads/glbcorp/__init__.py
+++ /dev/null
@@ -1,215 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,W0201,E0203
-
-from __future__ import division
-import os
-import re
-import time
-import select
-import json
-import threading
-import subprocess
-
-from wlauto import ApkWorkload, Parameter, Alias
-from wlauto.exceptions import WorkloadError
-
-
-DELAY = 2
-
-
-class GlbCorp(ApkWorkload):
-
- name = 'glb_corporate'
- description = """
- GFXBench GL (a.k.a. GLBench) v3.0 Corporate version.
-
- This is a version of GLBench available through a corporate license (distinct
- from the version available in Google Play store).
-
- """
- package = 'net.kishonti.gfxbench'
- activity = 'net.kishonti.benchui.TestActivity'
-
- result_start_regex = re.compile(r'I/TfwActivity\s*\(\s*\d+\):\s+\S+\s+result: {')
- preamble_regex = re.compile(r'I/TfwActivity\s*\(\s*\d+\):\s+')
-
- valid_test_ids = [
- 'gl_alu',
- 'gl_alu_off',
- 'gl_blending',
- 'gl_blending_off',
- 'gl_driver',
- 'gl_driver_off',
- 'gl_fill',
- 'gl_fill_off',
- 'gl_manhattan',
- 'gl_manhattan_off',
- 'gl_trex',
- 'gl_trex_battery',
- 'gl_trex_off',
- 'gl_trex_qmatch',
- 'gl_trex_qmatch_highp',
- ]
-
- supported_resolutions = {
- '720p': {
- '-ei -w': 1280,
- '-ei -h': 720,
- },
- '1080p': {
- '-ei -w': 1920,
- '-ei -h': 1080,
- }
- }
-
- parameters = [
- Parameter('times', kind=int, default=1, constraint=lambda x: x > 0,
- description=('Specifies the number of times the benchmark will be run in a "tight '
- 'loop", i.e. without performaing setup/teardown inbetween.')),
- Parameter('resolution', default=None, allowed_values=['720p', '1080p', '720', '1080'],
- description=('Explicitly specifies the resultion under which the benchmark will '
- 'be run. If not specfied, device\'s native resoution will used.')),
- Parameter('test_id', default='gl_manhattan_off', allowed_values=valid_test_ids,
- description='ID of the GFXBench test to be run.'),
- Parameter('run_timeout', kind=int, default=10 * 60,
- description="""
- Time out for workload execution. The workload will be killed if it hasn't completed
- withint this period.
- """),
- ]
-
- aliases = [
- Alias('manhattan', test_id='gl_manhattan'),
- Alias('manhattan_off', test_id='gl_manhattan_off'),
- Alias('manhattan_offscreen', test_id='gl_manhattan_off'),
- ]
-
- def setup(self, context):
- super(GlbCorp, self).setup(context)
- self.command = self._build_command()
- self.monitor = GlbRunMonitor(self.device)
- self.monitor.start()
-
- def start_activity(self):
- # Unlike with most other APK workloads, we're invoking the use case
- # directly by starting the activity with appropriate parameters on the
- # command line during execution, so we dont' need to start activity
- # during setup.
- pass
-
- def run(self, context):
- for _ in xrange(self.times):
- result = self.device.execute(self.command, timeout=self.run_timeout)
- if 'FAILURE' in result:
- raise WorkloadError(result)
- else:
- self.logger.debug(result)
- time.sleep(DELAY)
- self.monitor.wait_for_run_end(self.run_timeout)
-
- def update_result(self, context): # NOQA
- super(GlbCorp, self).update_result(context)
- self.monitor.stop()
- iteration = 0
- results = []
- with open(self.logcat_log) as fh:
- try:
- line = fh.next()
- result_lines = []
- while True:
- if self.result_start_regex.search(line):
- result_lines.append('{')
- line = fh.next()
- while self.preamble_regex.search(line):
- result_lines.append(self.preamble_regex.sub('', line))
- line = fh.next()
- try:
- result = json.loads(''.join(result_lines))
- results.append(result)
- if iteration:
- suffix = '_{}'.format(iteration)
- else:
- suffix = ''
- for sub_result in result['results']:
- frames = sub_result['score']
- elapsed_time = sub_result['elapsed_time'] / 1000
- fps = frames / elapsed_time
- context.result.add_metric('score' + suffix, frames, 'frames')
- context.result.add_metric('fps' + suffix, fps)
- except ValueError:
- self.logger.warning('Could not parse result for iteration {}'.format(iteration))
- result_lines = []
- iteration += 1
- line = fh.next()
- except StopIteration:
- pass # EOF
- if results:
- outfile = os.path.join(context.output_directory, 'glb-results.json')
- with open(outfile, 'wb') as wfh:
- json.dump(results, wfh, indent=4)
-
- def _build_command(self):
- command_params = []
- command_params.append('-e test_ids "{}"'.format(self.test_id))
- if self.resolution:
- if not self.resolution.endswith('p'):
- self.resolution += 'p'
- for k, v in self.supported_resolutions[self.resolution].iteritems():
- command_params.append('{} {}'.format(k, v))
- return 'am start -W -S -n {}/{} {}'.format(self.package,
- self.activity,
- ' '.join(command_params))
-
-
-class GlbRunMonitor(threading.Thread):
-
- regex = re.compile(r'I/Runner\s+\(\s*\d+\): finished:')
-
- def __init__(self, device):
- super(GlbRunMonitor, self).__init__()
- self.device = device
- self.daemon = True
- self.run_ended = threading.Event()
- self.stop_event = threading.Event()
- # Not using clear_logcat() because command collects directly, i.e. will
- # ignore poller.
- self.device.execute('logcat -c')
- if self.device.adb_name:
- self.command = ['adb', '-s', self.device.adb_name, 'logcat']
- else:
- self.command = ['adb', 'logcat']
-
- def run(self):
- proc = subprocess.Popen(self.command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- while not self.stop_event.is_set():
- if self.run_ended.is_set():
- time.sleep(DELAY)
- else:
- ready, _, _ = select.select([proc.stdout, proc.stderr], [], [], 2)
- if ready:
- line = ready[0].readline()
- if self.regex.search(line):
- self.run_ended.set()
-
- def stop(self):
- self.stop_event.set()
- self.join()
-
- def wait_for_run_end(self, timeout):
- self.run_ended.wait(timeout)
- self.run_ended.clear()
-
diff --git a/wlauto/workloads/glbenchmark/__init__.py b/wlauto/workloads/glbenchmark/__init__.py
deleted file mode 100644
index 9710c206..00000000
--- a/wlauto/workloads/glbenchmark/__init__.py
+++ /dev/null
@@ -1,158 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,E0203
-import re
-import os
-
-from wlauto import AndroidUiAutoBenchmark, Parameter, Alias
-from wlauto.exceptions import ConfigError
-import wlauto.common.android.resources
-
-# These maps provide use-friendly aliases for the most common options.
-USE_CASE_MAP = {
- 'egypt': 'GLBenchmark 2.5 Egypt HD',
- 'egypt-classic': 'GLBenchmark 2.1 Egypt Classic',
- 't-rex': 'GLBenchmark 2.7 T-Rex HD',
-}
-
-VARIANT_MAP = {
- 'onscreen': 'C24Z16 Onscreen Auto',
- 'offscreen': 'C24Z16 Offscreen Auto',
-}
-
-
-class Glb(AndroidUiAutoBenchmark):
-
- name = 'glbenchmark'
- description = """
- Measures the graphics performance of Android devices by testing
- the underlying OpenGL (ES) implementation.
-
- http://gfxbench.com/about-gfxbench.jsp
-
- From the website:
-
- The benchmark includes console-quality high-level 3D animations
- (T-Rex HD and Egypt HD) and low-level graphics measurements.
-
- With high vertex count and complex effects such as motion blur, parallax
- mapping and particle systems, the engine of GFXBench stresses GPUs in order
- provide users a realistic feedback on their device.
-
- """
- activity = 'com.glbenchmark.activities.GLBenchmarkDownloaderActivity'
- view = 'com.glbenchmark.glbenchmark27/com.glbenchmark.activities.GLBRender'
-
- packages = {
- '2.7.0': 'com.glbenchmark.glbenchmark27',
- '2.5.1': 'com.glbenchmark.glbenchmark25',
- }
- # If usecase is not specified the default usecase is the first supported usecase alias
- # for the specified version.
- supported_usecase_aliases = {
- '2.7.0': ['t-rex', 'egypt'],
- '2.5.1': ['egypt-classic', 'egypt'],
- }
-
- default_iterations = 1
- install_timeout = 500
-
- regex = re.compile(r'GLBenchmark (metric|FPS): (.*)')
-
- parameters = [
- Parameter('version', default='2.7.0', allowed_values=['2.7.0', '2.5.1'],
- description=('Specifies which version of the benchmark to run (different versions '
- 'support different use cases).')),
- Parameter('use_case', default=None,
- description="""Specifies which usecase to run, as listed in the benchmark menu; e.g.
- ``'GLBenchmark 2.5 Egypt HD'``. For convenience, two aliases are provided
- for the most common use cases: ``'egypt'`` and ``'t-rex'``. These could
- be use instead of the full use case title. For version ``'2.7.0'`` it defaults
- to ``'t-rex'``, for version ``'2.5.1'`` it defaults to ``'egypt-classic'``.
- """),
- Parameter('variant', default='onscreen',
- description="""Specifies which variant of the use case to run, as listed in the benchmarks
- menu (small text underneath the use case name); e.g. ``'C24Z16 Onscreen Auto'``.
- For convenience, two aliases are provided for the most common variants:
- ``'onscreen'`` and ``'offscreen'``. These may be used instead of full variant
- names.
- """),
- Parameter('times', kind=int, default=1,
- description=('Specfies the number of times the benchmark will be run in a "tight '
- 'loop", i.e. without performaing setup/teardown inbetween.')),
- Parameter('timeout', kind=int, default=200,
- description="""Specifies how long, in seconds, UI automation will wait for results screen to
- appear before assuming something went wrong.
- """),
- ]
-
- aliases = [
- Alias('glbench'),
- Alias('egypt', use_case='egypt'),
- Alias('t-rex', use_case='t-rex'),
- Alias('egypt_onscreen', use_case='egypt', variant='onscreen'),
- Alias('t-rex_onscreen', use_case='t-rex', variant='onscreen'),
- Alias('egypt_offscreen', use_case='egypt', variant='offscreen'),
- Alias('t-rex_offscreen', use_case='t-rex', variant='offscreen'),
- ]
-
- def __init__(self, device, **kwargs):
- super(Glb, self).__init__(device, **kwargs)
- self.uiauto_params['version'] = self.version
-
- if self.use_case is None:
- self.use_case = self.supported_usecase_aliases[self.version][0]
- if self.use_case.lower() in USE_CASE_MAP:
- if self.use_case not in self.supported_usecase_aliases[self.version]:
- raise ConfigError('usecases {} is not supported in version {}'.format(self.use_case, self.version))
- self.use_case = USE_CASE_MAP[self.use_case.lower()]
- self.uiauto_params['use_case'] = self.use_case.replace(' ', '_')
-
- if self.variant.lower() in VARIANT_MAP:
- self.variant = VARIANT_MAP[self.variant.lower()]
- self.uiauto_params['variant'] = self.variant.replace(' ', '_')
-
- self.uiauto_params['iterations'] = self.times
- self.run_timeout = 4 * 60 * self.times
-
- self.uiauto_params['timeout'] = self.timeout
- self.package = self.packages[self.version]
-
- def init_resources(self, context):
- self.apk_file = context.resolver.get(wlauto.common.android.resources.ApkFile(self), version=self.version)
- self.uiauto_file = context.resolver.get(wlauto.common.android.resources.JarFile(self))
- self.device_uiauto_file = self.device.path.join(self.device.working_directory,
- os.path.basename(self.uiauto_file))
- if not self.uiauto_package:
- self.uiauto_package = os.path.splitext(os.path.basename(self.uiauto_file))[0]
-
- def update_result(self, context):
- super(Glb, self).update_result(context)
- match_count = 0
- with open(self.logcat_log) as fh:
- for line in fh:
- match = self.regex.search(line)
- if match:
- metric = match.group(1)
- value, units = match.group(2).split()
- value = value.replace('*', '')
- if metric == 'metric':
- metric = 'Frames'
- units = 'frames'
- metric = metric + '_' + str(match_count // 2)
- context.result.add_metric(metric, value, units)
- match_count += 1
-
diff --git a/wlauto/workloads/glbenchmark/com.arm.wlauto.uiauto.glb.jar b/wlauto/workloads/glbenchmark/com.arm.wlauto.uiauto.glb.jar
deleted file mode 100644
index 57d0fb1e..00000000
--- a/wlauto/workloads/glbenchmark/com.arm.wlauto.uiauto.glb.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/glbenchmark/uiauto/build.sh b/wlauto/workloads/glbenchmark/uiauto/build.sh
deleted file mode 100755
index 820eae37..00000000
--- a/wlauto/workloads/glbenchmark/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.glb.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.glb.jar ..
-fi
diff --git a/wlauto/workloads/glbenchmark/uiauto/build.xml b/wlauto/workloads/glbenchmark/uiauto/build.xml
deleted file mode 100644
index 54ccc98b..00000000
--- a/wlauto/workloads/glbenchmark/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.glb" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/glbenchmark/uiauto/project.properties b/wlauto/workloads/glbenchmark/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/glbenchmark/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/glbenchmark/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/glbenchmark/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index 2c244d64..00000000
--- a/wlauto/workloads/glbenchmark/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,164 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.glb;
-
-import java.lang.Runtime;
-import java.lang.Process;
-import java.util.concurrent.TimeUnit;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.KeyEvent;
-
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "glb";
- public static int maxScrolls = 15;
-
- public void runUiAutomation() throws Exception {
- Bundle parameters = getParams();
- String version = parameters.getString("version");
- String useCase = parameters.getString("use_case").replace('_', ' ');
- String variant = parameters.getString("variant").replace('_', ' ');
- int iterations = Integer.parseInt(parameters.getString("iterations"));
- int testTimeoutSeconds = Integer.parseInt(parameters.getString("timeout"));
- if (iterations < 1)
- iterations = 1;
-
- goToPreformanceTestsMenu();
- selectUseCase(version, useCase, variant);
- hitStart();
- waitForResults(version, useCase, testTimeoutSeconds);
- extractResults();
- iterations -= 1;
-
- while (iterations > 0) {
- getUiDevice().pressBack();
- goToPreformanceTestsMenu();
- hitStart();
- waitForResults(version, useCase, testTimeoutSeconds);
- extractResults();
- iterations -= 1;
- }
-
- Bundle status = new Bundle();
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
- public void goToPreformanceTestsMenu() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject choosePerfTest = new UiObject(selector.text("Performance Tests")
- .className("android.widget.TextView"));
- choosePerfTest.clickAndWaitForNewWindow();
- }
-
- public void selectUseCase(String version, String useCase, String variant) throws Exception {
- UiSelector selector = new UiSelector();
- UiScrollable testList = new UiScrollable(selector.className("android.widget.ListView"));
- UiObject useCaseText = new UiObject(selector.className("android.widget.TextView")
- .text(useCase)
- );
- if (version.equals("2.7.0")){
- UiObject variantText = useCaseText.getFromParent(selector.className("android.widget.TextView")
- .text(variant));
- int scrolls = 0;
- while(!variantText.exists()) {
- testList.scrollForward();
- scrolls += 1;
- if (scrolls >= maxScrolls) {
- break;
- }
- }
- variantText.click();
- }
- else if (version.equals("2.5.1")){
- int scrolls = 0;
- while(!useCaseText.exists()) {
- testList.scrollForward();
- scrolls += 1;
- if (scrolls >= maxScrolls) {
- break;
- }
- }
- useCaseText.click();
- //UiSelector selector = new UiSelector();
- UiObject modeDisableModeButton = null;
- if (variant.contains("Onscreen"))
- modeDisableModeButton = new UiObject(selector.text("Offscreen"));
- else
- modeDisableModeButton = new UiObject(selector.text("Onscreen"));
- modeDisableModeButton.click();
- }
- }
-
- public void hitStart() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject startButton = new UiObject(selector.text("Start"));
- startButton.clickAndWaitForNewWindow();
- }
-
- public void waitForResults(String version, String useCase, int timeout) throws Exception {
- UiSelector selector = new UiSelector();
- UiObject results = null;
- if (version.equals("2.7.0"))
- results = new UiObject(selector.text("Results").className("android.widget.TextView"));
- else
- results = new UiObject(selector.text(useCase).className("android.widget.TextView"));
- Log.v(TAG, "Waiting for results screen.");
- // On some devices, the results screen sometimes gets "backgrounded" (or
- // rather, doesn't seem to come to foreground to begin with). This code
- // attemps to deal with that by explicitly bringing glbench to the
- // foreground if results screen doesn't appear within testTimeoutSeconds seconds of
- // starting GLB.
- if (!results.waitForExists(TimeUnit.SECONDS.toMillis(timeout))) {
- Log.v(TAG, "Results screen not found. Attempting to bring to foreground.");
- String[] commandLine = {"am", "start",
- "-a", "android.intent.action.MAIN",
- "-c", "android.intent.category.LAUNCHER",
- "-n", "com.glbenchmark.glbenchmark27/com.glbenchmark.activities.GLBenchmarkDownloaderActivity"};
- Process proc = Runtime.getRuntime().exec(commandLine);
- proc.waitFor();
- Log.v(TAG, String.format("am start exit value: %d", proc.exitValue()));
- if (!results.exists()) {
- throw new UiObjectNotFoundException("Could not find results screen.");
- }
- }
- Log.v(TAG, "Results screen found.");
- }
-
- public void extractResults() throws Exception {
- Log.v(TAG, "Extracting results.");
- sleep(2); // wait for the results screen to fully load.
- UiSelector selector = new UiSelector();
- UiObject fpsText = new UiObject(selector.className("android.widget.TextView")
- .textContains("fps")
- );
- UiObject otherText = fpsText.getFromParent(selector.className("android.widget.TextView").index(0));
-
- Log.v(TAG, String.format("GLBenchmark metric: %s", otherText.getText().replace('\n', ' ')));
- Log.v(TAG, String.format("GLBenchmark FPS: %s", fpsText.getText().replace('\n', ' ')));
- }
-}
diff --git a/wlauto/workloads/googlemap/__init__.py b/wlauto/workloads/googlemap/__init__.py
deleted file mode 100644
index 59c50cc7..00000000
--- a/wlauto/workloads/googlemap/__init__.py
+++ /dev/null
@@ -1,39 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import GameWorkload, Parameter
-
-
-class GoogleMap(GameWorkload):
-
- name = 'googlemap'
- description = """
- Navigation app.
-
- Stock map provided by Google Inc.
- Based on revent, we can use this workload to
- do multiple tasks such as navigation usecases,
- swipe & pinch etc.
-
- Provided revent is for Odriod XU3 for navigation use
- case. For running on other devices, we need to build
- revent.
- """
- package = 'com.google.android.apps.maps'
- activity = 'com.google.android.maps.MapsActivity'
- loading_time = 20
-
-
diff --git a/wlauto/workloads/googlemap/revent_files/generic_android.run.revent b/wlauto/workloads/googlemap/revent_files/generic_android.run.revent
deleted file mode 100644
index b3679fbb..00000000
--- a/wlauto/workloads/googlemap/revent_files/generic_android.run.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/googlemap/revent_files/generic_android.setup.revent b/wlauto/workloads/googlemap/revent_files/generic_android.setup.revent
deleted file mode 100644
index 1dd8cf57..00000000
--- a/wlauto/workloads/googlemap/revent_files/generic_android.setup.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/gunbros2/__init__.py b/wlauto/workloads/gunbros2/__init__.py
deleted file mode 100644
index 1ae07b4b..00000000
--- a/wlauto/workloads/gunbros2/__init__.py
+++ /dev/null
@@ -1,42 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=R0801
-import os
-import time
-import tarfile
-import shutil
-
-from wlauto import settings
-from wlauto.common.android.workload import GameWorkload
-from wlauto.exceptions import WorkloadError, DeviceError
-from wlauto.utils.misc import check_output
-from wlauto.common.resources import PluginAsset
-
-
-class GunBros(GameWorkload):
-
- name = 'gunbros2'
- description = """
- Gun Bros. 2 game.
-
- """
- package = 'com.glu.gunbros2'
- activity = 'com.google.android.vending.expansion.downloader_impl.DownloaderActivity'
- asset_file = 'com.glu.gunbros2.tar.gz'
- ondevice_asset_root = '/data'
- loading_time = 20
- install_timeout = 500
-
diff --git a/wlauto/workloads/hackbench/__init__.py b/wlauto/workloads/hackbench/__init__.py
deleted file mode 100644
index 73f2c17d..00000000
--- a/wlauto/workloads/hackbench/__init__.py
+++ /dev/null
@@ -1,88 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# pylint: disable=W0201, C0103
-
-import os
-import re
-
-from wlauto import Workload, Parameter, Executable
-
-
-hackbench_results_txt = 'hackbench_results.txt'
-
-regex_map = {"total_groups": (re.compile(r'(\d+) groups'), "groups"),
- "total_fd": (re.compile(r'(\d+) file descriptors'), "file_descriptors"),
- "total_messages": (re.compile(r'(\d+) messages'), "messages"),
- "total_bytes": (re.compile(r'(\d+) bytes'), "bytes"),
- "test_time": (re.compile(r'Time: (\d+.*)'), "seconds")
- }
-
-
-class Hackbench(Workload):
-
- name = 'hackbench'
- description = """
- Hackbench runs a series of tests for the Linux scheduler.
-
- For details, go to:
- https://github.com/linux-test-project/ltp/
-
- """
-
- parameters = [
- # Workload parameters go here e.g.
- Parameter('datasize', kind=int, default=100, description='Message size in bytes.'),
- Parameter('groups', kind=int, default=10, description='Number of groups.'),
- Parameter('loops', kind=int, default=100, description='Number of loops.'),
- Parameter('fds', kind=int, default=40, description='Number of file descriptors.'),
- Parameter('extra_params', kind=str, default='',
- description='Extra parameters to pass in. See the hackbench man page'
- ' or type `hackbench --help` for list of options.'),
- Parameter('duration', kind=int, default=30, description='Test duration in seconds.')
- ]
-
- def setup(self, context):
- timeout_buf = 10
- self.command = '{} -s {} -g {} -l {} {} > {}'
- self.device_binary = None
- self.hackbench_result = os.path.join(self.device.working_directory, hackbench_results_txt)
- self.run_timeout = self.duration + timeout_buf
-
- self.binary_name = 'hackbench'
- host_binary = context.resolver.get(Executable(self, self.device.abi, self.binary_name))
- self.device_binary = self.device.install(host_binary)
-
- self.command = self.command.format(self.device_binary, self.datasize, self.groups,
- self.loops, self.extra_params, self.hackbench_result)
-
- def run(self, context):
- self.device.execute(self.command, timeout=self.run_timeout)
-
- def update_result(self, context):
- self.device.pull(self.hackbench_result, context.output_directory)
-
- with open(os.path.join(context.output_directory, hackbench_results_txt)) as hackbench_file:
- for line in hackbench_file:
- for label, (regex, units) in regex_map.iteritems():
- match = regex.search(line)
- if match:
- context.result.add_metric(label, float(match.group(1)), units)
-
- def teardown(self, context):
- self.device.uninstall(self.binary_name)
- self.device.execute('rm -f {}'.format(self.hackbench_result))
-
- def validate(self):
- pass
diff --git a/wlauto/workloads/hackbench/bin/arm64/hackbench b/wlauto/workloads/hackbench/bin/arm64/hackbench
deleted file mode 100755
index 04ea2714..00000000
--- a/wlauto/workloads/hackbench/bin/arm64/hackbench
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/hackbench/bin/armeabi/hackbench b/wlauto/workloads/hackbench/bin/armeabi/hackbench
deleted file mode 100755
index e3839483..00000000
--- a/wlauto/workloads/hackbench/bin/armeabi/hackbench
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/hackbench/src/LICENSE b/wlauto/workloads/hackbench/src/LICENSE
deleted file mode 100644
index 2d4b1ab8..00000000
--- a/wlauto/workloads/hackbench/src/LICENSE
+++ /dev/null
@@ -1,3 +0,0 @@
-Source for these binaries can be obtained here:
-
-http://git.kernel.org/cgit/linux/kernel/git/clrkwllms/rt-tests.git
diff --git a/wlauto/workloads/homescreen/__init__.py b/wlauto/workloads/homescreen/__init__.py
deleted file mode 100644
index 737d435b..00000000
--- a/wlauto/workloads/homescreen/__init__.py
+++ /dev/null
@@ -1,43 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101
-
-import time
-
-from wlauto import Workload, Parameter
-
-
-class HomeScreen(Workload):
-
- name = 'homescreen'
- description = """
- A workload that goes to the home screen and idles for the the
- specified duration.
-
- """
- supported_platforms = ['android']
-
- parameters = [
- Parameter('duration', kind=int, default=20,
- description='Specifies the duration, in seconds, of this workload.'),
- ]
-
- def setup(self, context):
- self.device.clear_logcat()
- self.device.execute('input keyevent 3') # press the home key
-
- def run(self, context):
- time.sleep(self.duration)
diff --git a/wlauto/workloads/hwuitest/__init__.py b/wlauto/workloads/hwuitest/__init__.py
deleted file mode 100644
index 376bd202..00000000
--- a/wlauto/workloads/hwuitest/__init__.py
+++ /dev/null
@@ -1,108 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#pylint: disable=E1101,W0201
-
-import os
-import re
-from collections import defaultdict
-
-from wlauto import Workload, Parameter, Executable
-from wlauto.utils.types import caseless_string
-
-
-BINARY = "hwuitest"
-IGNORED_METRICS = ["Stats since", "Total frames rendered"]
-
-
-class HWUITest(Workload):
-
- name = 'hwuitest'
- description = """
- Tests UI rendering latency on android devices
- """
- supported_platforms = ['android']
-
- parameters = [
- Parameter('test', kind=caseless_string, default="shadowgrid",
- allowed_values=["shadowgrid", "rectgrid", "oval"],
- description="""
- The test to run:
- - ``'shadowgrid'``: creates a grid of rounded rects that
- cast shadows, high CPU & GPU load
- - ``'rectgrid'``: creates a grid of 1x1 rects
- - ``'oval'``: draws 1 oval
- """),
- Parameter('loops', kind=int, default=3,
- description="The number of test iterations."),
- Parameter('frames', kind=int, default=150,
- description="The number of frames to run the test over."),
- ]
-
- def setup(self, context):
- host_exe = context.resolver.get(Executable(self,
- self.device.abi,
- BINARY))
- self.device.install(host_exe)
-
- def run(self, context):
- self.output = self.device.execute("{} {} {} {}".format(BINARY,
- self.test.lower(),
- self.loops,
- self.frames))
-
- def update_result(self, context):
- outfile = os.path.join(context.output_directory, 'hwuitest.output')
- with open(outfile, 'w') as wfh:
- wfh.write(self.output)
- context.add_artifact('hwuitest', outfile, kind='raw')
-
- normal = re.compile(r'(?P<value>\d*)(?P<unit>\w*)')
- with_pct = re.compile(r'(?P<value>\d*) \((?P<percent>.*)%\)')
- count = 0
- for line in self.output.splitlines():
- #Filters out "Success!" and blank lines
- try:
- metric, value_string = [p.strip() for p in line.split(':', 1)]
- except ValueError:
- continue
-
- # Filters out unwanted lines
- if metric in IGNORED_METRICS:
- continue
-
- if metric == "Janky frames":
- count += 1
- match = with_pct.match(value_string).groupdict()
- context.result.add_metric(metric,
- match['value'],
- None,
- classifiers={"loop": count,
- "frames": self.frames})
- context.result.add_metric(metric + "_pct",
- match['value'],
- "%",
- classifiers={"loop": count,
- "frames": self.frames})
- else:
- match = normal.match(value_string).groupdict()
- context.result.add_metric(metric,
- match['value'],
- match['unit'],
- classifiers={"loop": count,
- "frames": self.frames})
-
- def teardown(self, context):
- self.device.uninstall(BINARY)
diff --git a/wlauto/workloads/idle/__init__.py b/wlauto/workloads/idle/__init__.py
deleted file mode 100644
index 9fdb6f0b..00000000
--- a/wlauto/workloads/idle/__init__.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101
-
-import time
-
-from wlauto import Workload, Parameter
-from wlauto.exceptions import WorkloadError, ConfigError
-
-
-class IdleWorkload(Workload):
-
- name = 'idle'
- description = """
- Do nothing for the specified duration.
-
- On android devices, this may optionally stop the Android run time, if
- ``stop_android`` is set to ``True``.
-
- .. note:: This workload requires the device to be rooted.
-
- """
-
- parameters = [
- Parameter('duration', kind=int, default=20,
- description='Specifies the duration, in seconds, of this workload.'),
- Parameter('stop_android', kind=bool, default=False,
- description='Specifies whether the Android run time should be stopped. '
- '(Can be set only for Android devices).'),
- ]
-
- def setup(self, context):
- if self.stop_android:
- if self.device.os != 'android':
- raise ConfigError('stop_android can only be set for Android devices')
- if not self.device.is_rooted:
- raise WorkloadError('Idle workload requires the device to be rooted in order to stop Android.')
-
- def run(self, context):
- self.logger.debug('idling...')
- if self.stop_android:
- timeout = self.duration + 10
- self.device.execute('stop && sleep {} && start'.format(self.duration),
- timeout=timeout, as_root=True)
- else:
- time.sleep(self.duration)
-
- def teardown(self, context):
- if self.stop_android:
- self.logger.debug('Waiting for Android restart to complete...')
- # Wait for the boot animation to start and then to finish.
- while self.device.execute('getprop init.svc.bootanim').strip() == 'stopped':
- time.sleep(0.2)
- while self.device.execute('getprop init.svc.bootanim').strip() == 'running':
- time.sleep(1)
diff --git a/wlauto/workloads/iozone/LICENSE b/wlauto/workloads/iozone/LICENSE
deleted file mode 100644
index dbcb9eb3..00000000
--- a/wlauto/workloads/iozone/LICENSE
+++ /dev/null
@@ -1,22 +0,0 @@
-
- Copyright 1991, 1992, 1994, 1998, 1999, 2002 William D. Norcott
-
- License to freely use and distribute this software is hereby granted
- by the author, subject to the condition that this copyright notice
- remains intact. The author retains the exclusive right to publish
- derivative works based on this work, including, but not limited to,
- revised versions of this work.
-
-
- THIS SOFTWARE IS PROVIDED BY DON CAPPS AND THE IOZONE CREW "AS IS"
- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
- PARTICULAR PURPOSE ARE DISCLAIMED.
-
- IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE FOR ANY
- DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
- GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
- INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
- IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
- OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE.
diff --git a/wlauto/workloads/iozone/__init__.py b/wlauto/workloads/iozone/__init__.py
deleted file mode 100644
index b0c83b79..00000000
--- a/wlauto/workloads/iozone/__init__.py
+++ /dev/null
@@ -1,316 +0,0 @@
-# Copyright 2012-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# pylint: disable=attribute-defined-outside-init
-from collections import OrderedDict
-from itertools import izip_longest
-import os
-import re
-import csv
-
-
-from wlauto import Workload, Parameter, Executable
-from wlauto.exceptions import ConfigError
-from wlauto.utils.types import list_of_ints
-
-
-iozone_results_txt = 'iozone_results.txt'
-
-
-class Iozone(Workload):
- name = 'iozone'
- description = """
- Iozone is a filesystem benchmark that runs a series of disk
- I/O performance tests.
-
- Here is a list of tests that you can run in the iozone
- workload. The descriptions are from the official iozone
- document.
-
- 0 - Write Test
- Measure performance of writing a new file. Other
- tests rely on the file written by this, so it must
- always be enabled (WA will automatically neable this
- if not specified).
-
- 1 - Rewrite Test
- Measure performance of writing an existing file.
-
- 2 - Read Test
- Measure performance of reading an existing file.
-
- 3 - Reread Test
- Measure performance of rereading an existing file.
-
- 4 - Random Read Test
- Measure performance of reading a file by accessing
- random locations within the file.
-
- 5 - Random Write Test
- Measure performance of writing a file by accessing
- random locations within the file.
-
- 6 - Backwards Read Test
- Measure performance of reading a file backwards.
-
- 7 - Record Rewrite Test
- Measure performance of writing and rewriting a
- particular spot within the file.
-
- 8 - Strided Read Test
- Measure performance of reading a file with strided
- access behavior.
-
- 9 - Fwrite Test
- Measure performance of writing a file using the
- library function fwrite() that performances
- buffered write operations.
-
- 10 - Frewrite Test
- Measure performance of writing a file using the
- the library function fwrite() that performs
- buffered and blocked write operations.
-
- 11 - Fread Test
- Measure performance of reading a file using the
- library function fread() that performs buffered
- and blocked read operations.
-
- 12 - Freread Test
- Same as the Fread Test except the current file
- being read was read previously sometime in the
- past.
-
- By default, iozone will run all tests in auto mode. To run
- specific tests, they must be written in the form of:
-
- [0,1,4,5]
-
- Please enable classifiers in your agenda or config file
- in order to display the results properly in the results.csv
- file.
-
- The official website for iozone is at www.iozone.org.
- """
-
- parameters = [
- Parameter('tests', kind=list_of_ints, allowed_values=range(13),
- description='List of performance tests to run.'),
- Parameter('auto_mode', kind=bool, default=True,
- description='Run tests in auto mode.'),
- Parameter('timeout', kind=int, default=14400,
- description='Timeout for the workload.'),
- Parameter('file_size', kind=int,
- description='Fixed file size.'),
- Parameter('record_length', kind=int,
- description='Fixed record length.'),
- Parameter('threads', kind=int,
- description='Number of threads'),
- Parameter('other_params', kind=str, default='',
- description='Other parameter. Run iozone -h to see'
- ' list of options.')
- ]
-
- def initialize(self, context):
- Iozone.host_binary = context.resolver.get(Executable(self,
- self.device.abi,
- 'iozone'))
- Iozone.device_binary = self.device.install(Iozone.host_binary)
-
- def setup(self, context):
- self.results = os.path.join(self.device.working_directory,
- iozone_results_txt)
- self.command = self._build_command()
-
- if self.threads and self.auto_mode:
- raise ConfigError("You cannot set the number of threads and enable"
- " auto mode at the same time.")
-
- def _build_command(self):
- # pylint: disable=access-member-before-definition
- iozone_command = 'cd {} && {}'.format(self.device.working_directory,
- self.device_binary)
-
- if self.auto_mode:
- iozone_command += ' -a'
-
- if self.tests:
- if 0 not in self.tests:
- self.tests = [0] + self.tests
- iozone_command += ''.join([' -i {}'.format(t) for t in self.tests])
-
- if self.record_length > 0:
- iozone_command += ' -r {}'.format(self.record_length)
-
- if self.threads > 0:
- iozone_command += ' -t {}'.format(self.threads)
-
- if self.file_size > 0:
- iozone_command += ' -s {}'.format(self.file_size)
-
- if self.other_params:
- iozone_command += ' ' + self.other_params
-
- # enable reporting mode for parsing non-thread results
- iozone_command += ' -R > {}'.format(self.results)
-
- # check if -b option is used
- match = re.search(r'-b (.?\w+.?\w+?\s)', iozone_command)
- if match:
- self.user_file = match.group(1)
- self.device_output_file = os.path.join(self.device.working_directory,
- self.user_file)
-
- return iozone_command
-
- def run(self, context):
- self.device.execute(self.command, timeout=self.timeout)
-
- def update_result(self, context):
- self.device.pull(self.results, context.output_directory)
- self.outfile = os.path.join(context.output_directory,
- iozone_results_txt)
-
- if '-b' in self.other_params:
- self.device.pull(self.device_output_file,
- context.output_directory)
-
- # if running in thread mode
- if self.threads:
- thread_results = self.parse_thread_results()
-
- for name, value, units in thread_results:
- context.add_metric(name, value, units)
-
- # for non-thread mode results
- else:
- with open(self.outfile, 'r') as iozone_file:
- iozone_file = (line.replace('\"', '') for line in iozone_file)
- table_list = []
-
- # begin parsing results
- for line in iozone_file:
- if 'Writer report' in line:
- table_list.append(line.split())
- break
-
- for line in iozone_file:
- if 'exiting' in line or 'completed' in line:
- break
- else:
- table_list.append(line.split())
-
- # create csv file
- self.write_to_csv(context, table_list)
-
- # parse metrics
- self.parse_metrics(context, table_list)
-
- def write_to_csv(self, context, csv_table_list):
- self.test_file = os.path.join(context.output_directory,
- 'table_results.csv')
-
- # create csv file for writing
- csv_file = open(self.test_file, 'w')
- wr = csv.writer(csv_file, delimiter=',')
-
- # shift second row by adding extra element
- # for "prettier" formatting
- index = 0
- for element in csv_table_list:
- if element:
- if index == 1:
- element.insert(0, '0')
- index += 1
- else:
- index = 0
-
- # write to csv file
- for item in csv_table_list:
- wr.writerow(item)
-
- csv_file.close()
-
- # break list of results into smaller groups based on
- # test name
- def parse_metrics(self, context, plist): # pylint: disable=no-self-use
- subvalue_list = []
- value_list = []
- for values in plist:
- if values:
- subvalue_list.append(values)
- else:
- value_list.append(subvalue_list)
- subvalue_list = []
-
- # If users run a list of specific tests, make
- # sure that the results for the last test
- # executed are appended.
- if subvalue_list:
- value_list.append(subvalue_list)
-
- for reports in value_list:
- # grab report name and convert it to a string
- report_name = reports[0]
- report_name = report_name[:-1]
- report_name = '_'.join(report_name).lower()
-
- record_sizes = reports[1]
- values = reports[2:]
-
- for v in values:
- templist = OrderedDict(izip_longest(record_sizes, v))
-
- for reclen, value in templist.items():
- if reclen is '0':
- fs = value
-
- if value is None:
- value = '0'
-
- classifiers = {'reclen': reclen, 'file_size': fs}
- if reclen != '0':
- context.add_metric(report_name, int(value), 'kb/s',
- classifiers=classifiers)
-
- # parse thread-mode results
- def parse_thread_results(self):
- results = []
- with open(self.outfile, 'r') as iozone_file:
- for line in iozone_file:
- # grab section of data we care about
- if 'Throughput report' in line:
- break
- else:
- if '=' in line:
- if 'Time Resolution' not in line:
- line = line.replace('=', '')
- line = line.split()
-
- # grab headers
- if len(line) >= 8:
- header = line[0]
- subheader = ' '.join(line[-5:-2])
- header += ' ' + subheader
- else:
- header = ' '.join(line[0:2])
-
- units = line[-1]
- value = line[-2]
- tup = (header, value, units)
- results.append(tup)
-
- return results
-
- def finalize(self, context):
- self.device.uninstall(self.device_binary)
diff --git a/wlauto/workloads/iozone/bin/arm64/iozone b/wlauto/workloads/iozone/bin/arm64/iozone
deleted file mode 100755
index c489e0cf..00000000
--- a/wlauto/workloads/iozone/bin/arm64/iozone
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/iozone/bin/armeabi/iozone b/wlauto/workloads/iozone/bin/armeabi/iozone
deleted file mode 100755
index a04fe066..00000000
--- a/wlauto/workloads/iozone/bin/armeabi/iozone
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/ironman/__init__.py b/wlauto/workloads/ironman/__init__.py
deleted file mode 100644
index 1bbef415..00000000
--- a/wlauto/workloads/ironman/__init__.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=R0801
-import os
-import time
-
-from wlauto import GameWorkload
-from wlauto.exceptions import WorkloadError, DeviceError
-from wlauto.utils.misc import check_output
-
-
-class IronMan(GameWorkload):
-
- name = 'ironman3'
- description = """
- Iron Man 3 game.
-
- """
- package = 'com.gameloft.android.ANMP.GloftIMHM'
- activity = '.GameActivity'
-
- asset_file = 'obb:com.gameloft.android.ANMP.GloftIMHM.tar.gz'
diff --git a/wlauto/workloads/ironman/revent_files/Nexus10.run.revent b/wlauto/workloads/ironman/revent_files/Nexus10.run.revent
deleted file mode 100644
index 96955bad..00000000
--- a/wlauto/workloads/ironman/revent_files/Nexus10.run.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/ironman/revent_files/Nexus10.setup.revent b/wlauto/workloads/ironman/revent_files/Nexus10.setup.revent
deleted file mode 100644
index 8cc49d3a..00000000
--- a/wlauto/workloads/ironman/revent_files/Nexus10.setup.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/krazykart/__init__.py b/wlauto/workloads/krazykart/__init__.py
deleted file mode 100644
index 055816a7..00000000
--- a/wlauto/workloads/krazykart/__init__.py
+++ /dev/null
@@ -1,28 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import GameWorkload
-
-
-class KrazyKartRacing(GameWorkload):
-
- name = 'krazykart'
- description = """
- Krazy Kart Racing game.
-
- """
- package = 'com.polarbit.sg2.krazyracers'
- activity = '.krazyracers'
diff --git a/wlauto/workloads/krazykart/revent_files/.empty b/wlauto/workloads/krazykart/revent_files/.empty
deleted file mode 100644
index e69de29b..00000000
--- a/wlauto/workloads/krazykart/revent_files/.empty
+++ /dev/null
diff --git a/wlauto/workloads/linpack/__init__.py b/wlauto/workloads/linpack/__init__.py
deleted file mode 100644
index 3f728ab9..00000000
--- a/wlauto/workloads/linpack/__init__.py
+++ /dev/null
@@ -1,64 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,E0203
-
-import os
-import re
-
-from wlauto import AndroidUiAutoBenchmark, Parameter
-
-
-class Linpack(AndroidUiAutoBenchmark):
-
- name = 'linpack'
- description = """
- The LINPACK Benchmarks are a measure of a system's floating point computing
- power.
-
- http://en.wikipedia.org/wiki/LINPACK_benchmarks
-
- From the article:
-
- Introduced by Jack Dongarra, they measure how fast a computer solves
- a dense n by n system of linear equations Ax = b, which is a common task in
- engineering.
-
- """
- package = 'com.greenecomputing.linpackpro'
- activity = '.Linpack'
- summary_metrics = ['Linpack ST', 'Linpack MT']
- regex = re.compile(r'LINPACK RESULT: (?P<type>\w+) (?P<value>\S+)')
-
- parameters = [
- Parameter('output_file', default=None,
- description='On-device output file path.'),
- ]
-
- def __init__(self, device, **kwargs):
- super(Linpack, self).__init__(device, **kwargs)
- if self.output_file is None:
- self.output_file = os.path.join(self.device.working_directory, 'linpack.txt')
- self.uiauto_params['output_file'] = self.output_file
-
- def update_result(self, context):
- super(Linpack, self).update_result(context)
- with open(self.logcat_log) as fh:
- for line in fh:
- match = self.regex.search(line)
- if match:
- metric = 'Linpack ' + match.group('type')
- value = float(match.group('value'))
- context.result.add_metric(metric, value, 'MFLOPS')
diff --git a/wlauto/workloads/linpack/com.arm.wlauto.uiauto.linpack.jar b/wlauto/workloads/linpack/com.arm.wlauto.uiauto.linpack.jar
deleted file mode 100644
index 8835bdee..00000000
--- a/wlauto/workloads/linpack/com.arm.wlauto.uiauto.linpack.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/linpack/uiauto/build.sh b/wlauto/workloads/linpack/uiauto/build.sh
deleted file mode 100755
index 5ff5da2e..00000000
--- a/wlauto/workloads/linpack/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.linpack.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.linpack.jar ..
-fi
diff --git a/wlauto/workloads/linpack/uiauto/build.xml b/wlauto/workloads/linpack/uiauto/build.xml
deleted file mode 100644
index a532fd35..00000000
--- a/wlauto/workloads/linpack/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.linpack" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/linpack/uiauto/project.properties b/wlauto/workloads/linpack/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/linpack/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/linpack/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/linpack/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index de6c39ef..00000000
--- a/wlauto/workloads/linpack/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,59 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.linpack;
-
-import java.util.concurrent.TimeUnit;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "linpack";
-
- public void runUiAutomation() throws Exception{
- UiSelector selector = new UiSelector();
- UiObject runSingleButton = new UiObject(selector.text("Run Single Thread"));
- runSingleButton.click();
- runSingleButton.waitUntilGone(500);
- runSingleButton.waitForExists(TimeUnit.SECONDS.toMillis(30));
-
- UiObject mflops = new UiObject(new UiSelector().className("android.widget.TextView").instance(2));
- Log.v(TAG, String.format("LINPACK RESULT: ST %s", mflops.getText()));
-
- UiObject runMultiButton = new UiObject(selector.text("Run Multi-Thread"));
- runMultiButton.click();
- runMultiButton.waitUntilGone(500);
- runMultiButton.waitForExists(TimeUnit.SECONDS.toMillis(30));
-
- Log.v(TAG, String.format("LINPACK RESULT: MT %s", mflops.getText()));
-
- Bundle status = new Bundle();
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
-}
diff --git a/wlauto/workloads/linpack_cli/LICENSE b/wlauto/workloads/linpack_cli/LICENSE
deleted file mode 100644
index 918f3f43..00000000
--- a/wlauto/workloads/linpack_cli/LICENSE
+++ /dev/null
@@ -1,6 +0,0 @@
-The source for the linpack binaries included with this workload may
-be viewed here:
-
-http://www.netlib.org/benchmark/linpackc.new
-
-This code is in the public domain.
diff --git a/wlauto/workloads/linpack_cli/__init__.py b/wlauto/workloads/linpack_cli/__init__.py
deleted file mode 100644
index e11b41fa..00000000
--- a/wlauto/workloads/linpack_cli/__init__.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# pylint: disable=attribute-defined-outside-init
-import os
-
-from wlauto import Workload, Parameter, Executable
-
-
-class LinpackCliWorkload(Workload):
-
- name = 'linpack-cli'
- description = """
- linpack benchmark with a command line interface
-
- Benchmarks FLOPS (floating point operations per second).
-
- This is the oldschool version of the bencmark. Source may be viewed here:
-
- http://www.netlib.org/benchmark/linpackc.new
-
- """
-
- parameters = [
- Parameter('array_size', kind=int, default=200,
- description='size of arrays to be used by the benchmark.'),
- ]
-
- binary = None # set during initialization
-
- def initialize(self, context):
- host_exe = context.resolver.get(Executable(self, self.device.abi, 'linpack'))
- LinpackCliWorkload.binary = self.device.install(host_exe)
-
- def setup(self, context):
- self.command = '(echo {}; echo q) | {}'.format(self.array_size, self.binary)
-
- def run(self, context):
- self.raw_output = self.device.execute(self.command,
- timeout=(self.array_size / 10) ** 2,
- check_exit_code=False)
-
- def update_result(self, context):
- raw_outfile = os.path.join(context.output_directory, 'linpack-raw.txt')
- with open(raw_outfile, 'w') as wfh:
- wfh.write(self.raw_output)
- context.add_artifact('linpack-raw', raw_outfile, kind='raw')
-
- marker = '--------------------'
- lines = iter(self.raw_output.split('\n'))
- for line in lines:
- if marker in line:
- break
-
- for line in lines:
- line = line.strip()
- if not line:
- break
- parts = line.split()
- classifiers = {'reps': int(parts[0])}
- context.add_metric('time', float(parts[1]), 'seconds',
- lower_is_better=True, classifiers=classifiers)
- context.add_metric('KFLOPS', float(parts[5]), 'KFLOPS',
- lower_is_better=True, classifiers=classifiers)
-
- def finalize(self, context):
- self.device.uninstall(self.binary)
diff --git a/wlauto/workloads/linpack_cli/bin/arm64/linpack b/wlauto/workloads/linpack_cli/bin/arm64/linpack
deleted file mode 100755
index 4b4c1c34..00000000
--- a/wlauto/workloads/linpack_cli/bin/arm64/linpack
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/linpack_cli/bin/armeabi/linpack b/wlauto/workloads/linpack_cli/bin/armeabi/linpack
deleted file mode 100755
index 66362be6..00000000
--- a/wlauto/workloads/linpack_cli/bin/armeabi/linpack
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/lmbench/__init__.py b/wlauto/workloads/lmbench/__init__.py
deleted file mode 100644
index a342ab93..00000000
--- a/wlauto/workloads/lmbench/__init__.py
+++ /dev/null
@@ -1,162 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#pylint: disable=E1101,W0201
-
-import os
-
-from wlauto import Workload, Parameter, Executable
-from wlauto.utils.types import list_or_integer, list_or_string
-
-
-class lmbench(Workload):
-
- name = 'lmbench'
-
- # Define supported tests. Each requires a _setup_{name} routine below
- test_names = ['lat_mem_rd', 'bw_mem']
-
- description = """
- Run a subtest from lmbench, a suite of portable ANSI/C microbenchmarks for
- UNIX/POSIX.
-
- In general, lmbench measures two key features: latency and bandwidth. This
- workload supports a subset of lmbench tests. lat_mem_rd can be used to
- measure latencies to memory (including caches). bw_mem can be used to
- measure bandwidth to/from memory over a range of operations.
-
- Further details, and source code are available from:
-
- http://sourceforge.net/projects/lmbench/.
-
- See lmbench/bin/README for license details.
- """
-
- parameters = [
- Parameter('test', default='lat_mem_rd', allowed_values=test_names,
- description='''
- Specifies an lmbench test to run.
- '''),
- Parameter('stride', kind=list_or_integer, default=[128],
- description='''
- Stride for lat_mem_rd test. Workload will iterate over one or
- more integer values.
- '''),
- Parameter('thrash', kind=bool, default=True,
- description='Sets -t flag for lat_mem_rd_test'),
- Parameter('size', kind=list_or_string, default="4m",
- description='Data set size for lat_mem_rd bw_mem tests.'),
- Parameter('mem_category', kind=list_or_string,
- default=('rd', 'wr', 'cp', 'frd', 'fwr', 'fcp', 'bzero', 'bcopy'),
- description='List of memory catetories for bw_mem test.'),
- Parameter('parallelism', kind=int, default=None,
- description='Parallelism flag for tests that accept it.'),
- Parameter('warmup', kind=int, default=None,
- description='Warmup flag for tests that accept it.'),
- Parameter('repetitions', kind=int, default=None,
- description='Repetitions flag for tests that accept it.'),
- Parameter('force_abi', kind=str, default=None,
- description='''
- Override device abi with this value. Can be used to force
- arm32 on 64-bit devices.
- '''),
- Parameter('run_timeout', kind=int, default=900,
- description="""
- Timeout for execution of the test.
- """),
- Parameter('times', kind=int, default=1, constraint=lambda x: x > 0,
- description="""
- Specifies the number of times the benchmark will be run in a
- "tight loop", i.e. without performaing setup/teardown
- inbetween. This parameter is distinct from "repetitions", as
- the latter takes place within the benchmark and produces a
- single result.
- """),
- Parameter('taskset_mask', kind=int,
- description="""
- Specifies the CPU mask the benchmark process will be pinned to.
- """),
- ]
-
- def setup(self, context):
-
- abi = self.device.abi
- if self.force_abi:
- abi = self.force_abi
-
- # self.test has been pre-validated, so this _should_ only fail if there's an abi mismatch
- host_exe = context.resolver.get(Executable(self, abi, self.test))
- self.device_exe = self.device.install(host_exe)
- self.commands = []
-
- setup_test = getattr(self, '_setup_{}'.format(self.test))
- setup_test()
-
- def run(self, context):
- self.output = []
-
- for time in xrange(self.times):
- for command in self.commands:
- self.output.append("Output for time #{}, {}: ".format(time + 1, command))
- self.output.append(self.device.execute(command, timeout=self.run_timeout, check_exit_code=False))
-
- def update_result(self, context):
- for output in self.output:
- self.logger.debug(output)
- outfile = os.path.join(context.output_directory, 'lmbench.output')
- with open(outfile, 'w') as wfh:
- for output in self.output:
- wfh.write(output)
- context.add_artifact('lmbench', 'lmbench.output', 'data')
-
- def teardown(self, context):
- self.device.uninstall(self.test)
-
- #
- # Test setup routines
- #
- def _setup_lat_mem_rd(self):
- command_stub = self._setup_common()
- if self.thrash:
- command_stub = command_stub + '-t '
-
- for size in self.size:
- command = command_stub + size + ' '
- for stride in self.stride:
- self.commands.append(command + str(stride))
-
- def _setup_bw_mem(self):
- command_stub = self._setup_common()
-
- for size in self.size:
- command = command_stub + size + ' '
- for what in self.mem_category:
- self.commands.append(command + what)
-
- def _setup_common(self):
- parts = []
- if self.taskset_mask:
- parts.append('{} taskset 0x{:x} {}'.format(self.device.busybox,
- self.taskset_mask,
- self.device_exe))
- else:
- parts.append(self.device_exe)
- if self.parallelism is not None:
- parts.append('-P {}'.format(self.parallelism))
- if self.warmup is not None:
- parts.append('-W {}'.format(self.warmup))
- if self.repetitions is not None:
- parts.append('-N {}'.format(self.repetitions))
- return ' '.join(parts) + ' '
diff --git a/wlauto/workloads/lmbench/bin/COPYING b/wlauto/workloads/lmbench/bin/COPYING
deleted file mode 100644
index a43ea212..00000000
--- a/wlauto/workloads/lmbench/bin/COPYING
+++ /dev/null
@@ -1,339 +0,0 @@
- GNU GENERAL PUBLIC LICENSE
- Version 2, June 1991
-
- Copyright (C) 1989, 1991 Free Software Foundation, Inc.
- 675 Mass Ave, Cambridge, MA 02139, USA
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
- Preamble
-
- The licenses for most software are designed to take away your
-freedom to share and change it. By contrast, the GNU General Public
-License is intended to guarantee your freedom to share and change free
-software--to make sure the software is free for all its users. This
-General Public License applies to most of the Free Software
-Foundation's software and to any other program whose authors commit to
-using it. (Some other Free Software Foundation software is covered by
-the GNU Library General Public License instead.) You can apply it to
-your programs, too.
-
- When we speak of free software, we are referring to freedom, not
-price. Our General Public Licenses are designed to make sure that you
-have the freedom to distribute copies of free software (and charge for
-this service if you wish), that you receive source code or can get it
-if you want it, that you can change the software or use pieces of it
-in new free programs; and that you know you can do these things.
-
- To protect your rights, we need to make restrictions that forbid
-anyone to deny you these rights or to ask you to surrender the rights.
-These restrictions translate to certain responsibilities for you if you
-distribute copies of the software, or if you modify it.
-
- For example, if you distribute copies of such a program, whether
-gratis or for a fee, you must give the recipients all the rights that
-you have. You must make sure that they, too, receive or can get the
-source code. And you must show them these terms so they know their
-rights.
-
- We protect your rights with two steps: (1) copyright the software, and
-(2) offer you this license which gives you legal permission to copy,
-distribute and/or modify the software.
-
- Also, for each author's protection and ours, we want to make certain
-that everyone understands that there is no warranty for this free
-software. If the software is modified by someone else and passed on, we
-want its recipients to know that what they have is not the original, so
-that any problems introduced by others will not reflect on the original
-authors' reputations.
-
- Finally, any free program is threatened constantly by software
-patents. We wish to avoid the danger that redistributors of a free
-program will individually obtain patent licenses, in effect making the
-program proprietary. To prevent this, we have made it clear that any
-patent must be licensed for everyone's free use or not licensed at all.
-
- The precise terms and conditions for copying, distribution and
-modification follow.
-
- GNU GENERAL PUBLIC LICENSE
- TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
-
- 0. This License applies to any program or other work which contains
-a notice placed by the copyright holder saying it may be distributed
-under the terms of this General Public License. The "Program", below,
-refers to any such program or work, and a "work based on the Program"
-means either the Program or any derivative work under copyright law:
-that is to say, a work containing the Program or a portion of it,
-either verbatim or with modifications and/or translated into another
-language. (Hereinafter, translation is included without limitation in
-the term "modification".) Each licensee is addressed as "you".
-
-Activities other than copying, distribution and modification are not
-covered by this License; they are outside its scope. The act of
-running the Program is not restricted, and the output from the Program
-is covered only if its contents constitute a work based on the
-Program (independent of having been made by running the Program).
-Whether that is true depends on what the Program does.
-
- 1. You may copy and distribute verbatim copies of the Program's
-source code as you receive it, in any medium, provided that you
-conspicuously and appropriately publish on each copy an appropriate
-copyright notice and disclaimer of warranty; keep intact all the
-notices that refer to this License and to the absence of any warranty;
-and give any other recipients of the Program a copy of this License
-along with the Program.
-
-You may charge a fee for the physical act of transferring a copy, and
-you may at your option offer warranty protection in exchange for a fee.
-
- 2. You may modify your copy or copies of the Program or any portion
-of it, thus forming a work based on the Program, and copy and
-distribute such modifications or work under the terms of Section 1
-above, provided that you also meet all of these conditions:
-
- a) You must cause the modified files to carry prominent notices
- stating that you changed the files and the date of any change.
-
- b) You must cause any work that you distribute or publish, that in
- whole or in part contains or is derived from the Program or any
- part thereof, to be licensed as a whole at no charge to all third
- parties under the terms of this License.
-
- c) If the modified program normally reads commands interactively
- when run, you must cause it, when started running for such
- interactive use in the most ordinary way, to print or display an
- announcement including an appropriate copyright notice and a
- notice that there is no warranty (or else, saying that you provide
- a warranty) and that users may redistribute the program under
- these conditions, and telling the user how to view a copy of this
- License. (Exception: if the Program itself is interactive but
- does not normally print such an announcement, your work based on
- the Program is not required to print an announcement.)
-
-These requirements apply to the modified work as a whole. If
-identifiable sections of that work are not derived from the Program,
-and can be reasonably considered independent and separate works in
-themselves, then this License, and its terms, do not apply to those
-sections when you distribute them as separate works. But when you
-distribute the same sections as part of a whole which is a work based
-on the Program, the distribution of the whole must be on the terms of
-this License, whose permissions for other licensees extend to the
-entire whole, and thus to each and every part regardless of who wrote it.
-
-Thus, it is not the intent of this section to claim rights or contest
-your rights to work written entirely by you; rather, the intent is to
-exercise the right to control the distribution of derivative or
-collective works based on the Program.
-
-In addition, mere aggregation of another work not based on the Program
-with the Program (or with a work based on the Program) on a volume of
-a storage or distribution medium does not bring the other work under
-the scope of this License.
-
- 3. You may copy and distribute the Program (or a work based on it,
-under Section 2) in object code or executable form under the terms of
-Sections 1 and 2 above provided that you also do one of the following:
-
- a) Accompany it with the complete corresponding machine-readable
- source code, which must be distributed under the terms of Sections
- 1 and 2 above on a medium customarily used for software interchange; or,
-
- b) Accompany it with a written offer, valid for at least three
- years, to give any third party, for a charge no more than your
- cost of physically performing source distribution, a complete
- machine-readable copy of the corresponding source code, to be
- distributed under the terms of Sections 1 and 2 above on a medium
- customarily used for software interchange; or,
-
- c) Accompany it with the information you received as to the offer
- to distribute corresponding source code. (This alternative is
- allowed only for noncommercial distribution and only if you
- received the program in object code or executable form with such
- an offer, in accord with Subsection b above.)
-
-The source code for a work means the preferred form of the work for
-making modifications to it. For an executable work, complete source
-code means all the source code for all modules it contains, plus any
-associated interface definition files, plus the scripts used to
-control compilation and installation of the executable. However, as a
-special exception, the source code distributed need not include
-anything that is normally distributed (in either source or binary
-form) with the major components (compiler, kernel, and so on) of the
-operating system on which the executable runs, unless that component
-itself accompanies the executable.
-
-If distribution of executable or object code is made by offering
-access to copy from a designated place, then offering equivalent
-access to copy the source code from the same place counts as
-distribution of the source code, even though third parties are not
-compelled to copy the source along with the object code.
-
- 4. You may not copy, modify, sublicense, or distribute the Program
-except as expressly provided under this License. Any attempt
-otherwise to copy, modify, sublicense or distribute the Program is
-void, and will automatically terminate your rights under this License.
-However, parties who have received copies, or rights, from you under
-this License will not have their licenses terminated so long as such
-parties remain in full compliance.
-
- 5. You are not required to accept this License, since you have not
-signed it. However, nothing else grants you permission to modify or
-distribute the Program or its derivative works. These actions are
-prohibited by law if you do not accept this License. Therefore, by
-modifying or distributing the Program (or any work based on the
-Program), you indicate your acceptance of this License to do so, and
-all its terms and conditions for copying, distributing or modifying
-the Program or works based on it.
-
- 6. Each time you redistribute the Program (or any work based on the
-Program), the recipient automatically receives a license from the
-original licensor to copy, distribute or modify the Program subject to
-these terms and conditions. You may not impose any further
-restrictions on the recipients' exercise of the rights granted herein.
-You are not responsible for enforcing compliance by third parties to
-this License.
-
- 7. If, as a consequence of a court judgment or allegation of patent
-infringement or for any other reason (not limited to patent issues),
-conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot
-distribute so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you
-may not distribute the Program at all. For example, if a patent
-license would not permit royalty-free redistribution of the Program by
-all those who receive copies directly or indirectly through you, then
-the only way you could satisfy both it and this License would be to
-refrain entirely from distribution of the Program.
-
-If any portion of this section is held invalid or unenforceable under
-any particular circumstance, the balance of the section is intended to
-apply and the section as a whole is intended to apply in other
-circumstances.
-
-It is not the purpose of this section to induce you to infringe any
-patents or other property right claims or to contest validity of any
-such claims; this section has the sole purpose of protecting the
-integrity of the free software distribution system, which is
-implemented by public license practices. Many people have made
-generous contributions to the wide range of software distributed
-through that system in reliance on consistent application of that
-system; it is up to the author/donor to decide if he or she is willing
-to distribute software through any other system and a licensee cannot
-impose that choice.
-
-This section is intended to make thoroughly clear what is believed to
-be a consequence of the rest of this License.
-
- 8. If the distribution and/or use of the Program is restricted in
-certain countries either by patents or by copyrighted interfaces, the
-original copyright holder who places the Program under this License
-may add an explicit geographical distribution limitation excluding
-those countries, so that distribution is permitted only in or among
-countries not thus excluded. In such case, this License incorporates
-the limitation as if written in the body of this License.
-
- 9. The Free Software Foundation may publish revised and/or new versions
-of the General Public License from time to time. Such new versions will
-be similar in spirit to the present version, but may differ in detail to
-address new problems or concerns.
-
-Each version is given a distinguishing version number. If the Program
-specifies a version number of this License which applies to it and "any
-later version", you have the option of following the terms and conditions
-either of that version or of any later version published by the Free
-Software Foundation. If the Program does not specify a version number of
-this License, you may choose any version ever published by the Free Software
-Foundation.
-
- 10. If you wish to incorporate parts of the Program into other free
-programs whose distribution conditions are different, write to the author
-to ask for permission. For software which is copyrighted by the Free
-Software Foundation, write to the Free Software Foundation; we sometimes
-make exceptions for this. Our decision will be guided by the two goals
-of preserving the free status of all derivatives of our free software and
-of promoting the sharing and reuse of software generally.
-
- NO WARRANTY
-
- 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
-FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN
-OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
-PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS
-TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE
-PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
-REPAIR OR CORRECTION.
-
- 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
-WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
-REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
-INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
-OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
-TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
-YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
-PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGES.
-
- END OF TERMS AND CONDITIONS
-
- Appendix: How to Apply These Terms to Your New Programs
-
- If you develop a new program, and you want it to be of the greatest
-possible use to the public, the best way to achieve this is to make it
-free software which everyone can redistribute and change under these terms.
-
- To do so, attach the following notices to the program. It is safest
-to attach them to the start of each source file to most effectively
-convey the exclusion of warranty; and each file should have at least
-the "copyright" line and a pointer to where the full notice is found.
-
- <one line to give the program's name and a brief idea of what it does.>
- Copyright (C) 19yy <name of author>
-
- This program is free software; you can redistribute it and/or modify
- it under the terms of the GNU General Public License as published by
- the Free Software Foundation; either version 2 of the License, or
- (at your option) any later version.
-
- This program is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- GNU General Public License for more details.
-
- You should have received a copy of the GNU General Public License
- along with this program; if not, write to the Free Software
- Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
-
-Also add information on how to contact you by electronic and paper mail.
-
-If the program is interactive, make it output a short notice like this
-when it starts in an interactive mode:
-
- Gnomovision version 69, Copyright (C) 19yy name of author
- Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
- This is free software, and you are welcome to redistribute it
- under certain conditions; type `show c' for details.
-
-The hypothetical commands `show w' and `show c' should show the appropriate
-parts of the General Public License. Of course, the commands you use may
-be called something other than `show w' and `show c'; they could even be
-mouse-clicks or menu items--whatever suits your program.
-
-You should also get your employer (if you work as a programmer) or your
-school, if any, to sign a "copyright disclaimer" for the program, if
-necessary. Here is a sample; alter the names:
-
- Yoyodyne, Inc., hereby disclaims all copyright interest in the program
- `Gnomovision' (which makes passes at compilers) written by James Hacker.
-
- <signature of Ty Coon>, 1 April 1989
- Ty Coon, President of Vice
-
-This General Public License does not permit incorporating your program into
-proprietary programs. If your program is a subroutine library, you may
-consider it more useful to permit linking proprietary applications with the
-library. If this is what you want to do, use the GNU Library General
-Public License instead of this License.
diff --git a/wlauto/workloads/lmbench/bin/COPYING-2 b/wlauto/workloads/lmbench/bin/COPYING-2
deleted file mode 100644
index 3e1f7cc6..00000000
--- a/wlauto/workloads/lmbench/bin/COPYING-2
+++ /dev/null
@@ -1,108 +0,0 @@
-%M% %I% %E%
-
-The set of programs and documentation known as "lmbench" are distributed
-under the Free Software Foundation's General Public License with the
-following additional restrictions (which override any conflicting
-restrictions in the GPL):
-
-1. You may not distribute results in any public forum, in any publication,
- or in any other way if you have modified the benchmarks.
-
-2. You may not distribute the results for a fee of any kind. This includes
- web sites which generate revenue from advertising.
-
-If you have modifications or enhancements that you wish included in
-future versions, please mail those to me, Larry McVoy, at lm@bitmover.com.
-
-=========================================================================
-
-Rationale for the publication restrictions:
-
-In summary:
-
- a) LMbench is designed to measure enough of an OS that if you do well in
- all catagories, you've covered latency and bandwidth in networking,
- disks, file systems, VM systems, and memory systems.
- b) Multiple times in the past people have wanted to report partial results.
- Without exception, they were doing so to show a skewed view of whatever
- it was they were measuring (for example, one OS fit small processes into
- segments and used the segment register to switch them, getting good
- results, but did not want to report large process context switches
- because those didn't look as good).
- c) We insist that if you formally report LMbench results, you have to
- report all of them and make the raw results file easily available.
- Reporting all of them means in that same publication, a pointer
- does not count. Formally, in this context, means in a paper,
- on a web site, etc., but does not mean the exchange of results
- between OS developers who are tuning a particular subsystem.
-
-We have a lot of history with benchmarking and feel strongly that there
-is little to be gained and a lot to be lost if we allowed the results
-to be published in isolation, without the complete story being told.
-
-There has been a lot of discussion about this, with people not liking this
-restriction, more or less on the freedom principle as far as I can tell.
-We're not swayed by that, our position is that we are doing the right
-thing for the OS community and will stick to our guns on this one.
-
-It would be a different matter if there were 3 other competing
-benchmarking systems out there that did what LMbench does and didn't have
-the same reporting rules. There aren't and as long as that is the case,
-I see no reason to change my mind and lots of reasons not to do so. I'm
-sorry if I'm a pain in the ass on this topic, but I'm doing the right
-thing for you and the sooner people realize that the sooner we can get on
-to real work.
-
-Operating system design is a largely an art of balancing tradeoffs.
-In many cases improving one part of the system has negative effects
-on other parts of the system. The art is choosing which parts to
-optimize and which to not optimize. Just like in computer architecture,
-you can optimize the common instructions (RISC) or the uncommon
-instructions (CISC), but in either case there is usually a cost to
-pay (in RISC uncommon instructions are more expensive than common
-instructions, and in CISC common instructions are more expensive
-than required). The art lies in knowing which operations are
-important and optmizing those while minimizing the impact on the
-rest of the system.
-
-Since lmbench gives a good overview of many important system features,
-users may see the performance of the system as a whole, and can
-see where tradeoffs may have been made. This is the driving force
-behind the publication restriction: any idiot can optimize certain
-subsystems while completely destroying overall system performance.
-If said idiot publishes *only* the numbers relating to the optimized
-subsystem, then the costs of the optimization are hidden and readers
-will mistakenly believe that the optimization is a good idea. By
-including the publication restriction readers would be able to
-detect that the optimization improved the subsystem performance
-while damaging the rest of the system performance and would be able
-to make an informed decision as to the merits of the optimization.
-
-Note that these restrictions only apply to *publications*. We
-intend and encourage lmbench's use during design, development,
-and tweaking of systems and applications. If you are tuning the
-linux or BSD TCP stack, then by all means, use the networking
-benchmarks to evaluate the performance effects of various
-modifications; Swap results with other developers; use the
-networking numbers in isolation. The restrictions only kick
-in when you go to *publish* the results. If you sped up the
-TCP stack by a factor of 2 and want to publish a paper with the
-various tweaks or algorithms used to accomplish this goal, then
-you can publish the networking numbers to show the improvement.
-However, the paper *must* also include the rest of the standard
-lmbench numbers to show how your tweaks may (or may not) have
-impacted the rest of the system. The full set of numbers may
-be included in an appendix, but they *must* be included in the
-paper.
-
-This helps protect the community from adopting flawed technologies
-based on incomplete data. It also helps protect the community from
-misleading marketing which tries to sell systems based on partial
-(skewed) lmbench performance results.
-
-We have seen many cases in the past where partial or misleading
-benchmark results have caused great harm to the community, and
-we want to ensure that our benchmark is not used to perpetrate
-further harm and support false or misleading claims.
-
-
diff --git a/wlauto/workloads/lmbench/bin/README b/wlauto/workloads/lmbench/bin/README
deleted file mode 100644
index fa1f02f4..00000000
--- a/wlauto/workloads/lmbench/bin/README
+++ /dev/null
@@ -1,17 +0,0 @@
-This directory contains a subset of lmbench tests supported by Workload Automation.
-
-The binaries are provided under the terms of the GNU General Public License, Version 2,
-consistent with lmbench's additional restrictions.
-Refer to COPYING and COPYING-2 files for details.
-
-The binaries were built from lmbench-3.0-a9 source code, available publically from
- http://sourceforge.net/projects/lmbench/, specifically
- http://sourceforge.net/projects/lmbench/files/development/
-
-The binaries provided here are built and statically linked with the stable
-Linaro GNU 4.9 toolchain from November 2014.
-Source available from:
- http://releases.linaro.org/14.11/components/toolchain/gcc-linaro/4.9
-Binaries available from:
- https://releases.linaro.org/14.11/components/toolchain/binaries/aarch64-linux-gnu
- https://releases.linaro.org/14.11/components/toolchain/binaries/arm-linux-gnueabihf
diff --git a/wlauto/workloads/lmbench/bin/arm64/bw_mem b/wlauto/workloads/lmbench/bin/arm64/bw_mem
deleted file mode 100755
index 07b8dacf..00000000
--- a/wlauto/workloads/lmbench/bin/arm64/bw_mem
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/lmbench/bin/arm64/lat_mem_rd b/wlauto/workloads/lmbench/bin/arm64/lat_mem_rd
deleted file mode 100755
index 39587ed9..00000000
--- a/wlauto/workloads/lmbench/bin/arm64/lat_mem_rd
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/lmbench/bin/armeabi/bw_mem b/wlauto/workloads/lmbench/bin/armeabi/bw_mem
deleted file mode 100755
index 0496f077..00000000
--- a/wlauto/workloads/lmbench/bin/armeabi/bw_mem
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/lmbench/bin/armeabi/lat_mem_rd b/wlauto/workloads/lmbench/bin/armeabi/lat_mem_rd
deleted file mode 100755
index 67fe8fcf..00000000
--- a/wlauto/workloads/lmbench/bin/armeabi/lat_mem_rd
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/manual/__init__.py b/wlauto/workloads/manual/__init__.py
deleted file mode 100644
index 39c1f30a..00000000
--- a/wlauto/workloads/manual/__init__.py
+++ /dev/null
@@ -1,104 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=E1101,W0201,E0203
-import os
-import time
-
-from wlauto import Workload, Parameter
-from wlauto.exceptions import ConfigError
-from wlauto.utils.misc import getch
-from wlauto.utils.types import boolean
-
-
-class ManualWorkloadConfig(object):
-
- default_duration = 30
-
- def __init__(self,
- duration=None, # Seconds
- user_triggered=None,
- view=None,
- enable_logcat=True
- ):
- self.user_triggered = user_triggered if user_triggered is not None else (False if duration else True)
- self.duration = duration or (None if self.user_triggered else self.default_duration)
- self.view = view
- self.enable_logcat = enable_logcat
-
-
-class ManualWorkload(Workload):
-
- name = 'manual'
- description = """
- Yields control to the user, either for a fixed period or based on user input, to perform
- custom operations on the device, about which workload automation does not know of.
-
- """
-
- parameters = [
- Parameter('duration', kind=int, default=None,
- description=('Control of the devices is yielded for the duration (in seconds) specified. '
- 'If not specified, ``user_triggered`` is assumed.')),
- Parameter('user_triggered', kind=boolean, default=None,
- description="""If ``True``, WA will wait for user input after starting the workload;
- otherwise fixed duration is expected. Defaults to ``True`` if ``duration``
- is not specified, and ``False`` otherwise.
- """),
- Parameter('view', default='SurfaceView',
- description="""Specifies the View of the workload. This enables instruments that require a
- View to be specified, such as the ``fps`` instrument."""),
- Parameter('enable_logcat', kind=boolean,
- description='If ``True``, ``manual`` workload will collect logcat as part of the results.'),
- ]
-
- def setup(self, context):
- self.logger.info('Any setup required by your workload should be done now.')
- self.logger.info('As soon as you are done hit any key and wait for the message')
- self.logger.info('"START NOW!" to begin your manual workload.')
- self.logger.info('')
- self.logger.info('hit any key to finalize your setup...')
- getch()
-
- def run(self, context):
- self.logger.info('START NOW!')
- if self.duration:
- time.sleep(self.duration)
- elif self.user_triggered:
- self.logger.info('')
- self.logger.info('hit any key to end your workload execution...')
- getch()
- else:
- raise ConfigError('Illegal parameters for manual workload')
- self.logger.info('DONE! your results are now being collected!')
-
- def update_result(self, context):
- if self.enable_logcat:
- logcat_dir = os.path.join(context.output_directory, 'logcat')
- context.device_manager.dump_logcat(logcat_dir)
-
- def teardown(self, context):
- pass
-
- def validate(self):
- if self.duration is None:
- if self.user_triggered is None:
- self.user_triggered = True
- elif self.user_triggered is False:
- self.duration = self.default_duration
- if self.user_triggered and self.duration:
- message = 'Manual Workload can either specify duration or be user triggered, but not both'
- raise ConfigError(message)
- if not self.user_triggered and not self.duration:
- raise ConfigError('Either user_triggered must be ``True`` or duration must be > 0.')
diff --git a/wlauto/workloads/memcpy/__init__.py b/wlauto/workloads/memcpy/__init__.py
deleted file mode 100644
index 81249b31..00000000
--- a/wlauto/workloads/memcpy/__init__.py
+++ /dev/null
@@ -1,75 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,W0201
-
-import os
-import re
-
-from wlauto import Workload, Parameter, Executable
-
-
-THIS_DIR = os.path.dirname(__file__)
-
-
-RESULT_REGEX = re.compile('Total time: ([\d.]+) s.*Bandwidth: ([\d.]+) MB/s', re.S)
-
-
-class MemcpyTest(Workload):
-
- name = 'memcpy'
- description = """
- Runs memcpy in a loop.
-
- This will run memcpy in a loop for a specified number of times on a buffer
- of a specified size. Additionally, the affinity of the test can be set to one
- or more specific cores.
-
- This workload is single-threaded. It genrates no scores or metrics by itself.
-
- """
-
- parameters = [
- Parameter('buffer_size', kind=int, default=1024 * 1024 * 5,
- description='Specifies the size, in bytes, of the buffer to be copied.'),
- Parameter('iterations', kind=int, default=1000,
- description='Specfies the number of iterations that will be performed.'),
- Parameter('cpus', kind=list, default=[],
- description="""A list of integers specifying ordinals of cores to which the affinity
- of the test process should be set. If not specified, all avaiable cores
- will be used.
- """),
- ]
-
- def setup(self, context):
- self.binary_name = 'memcpy'
- host_binary = context.resolver.get(Executable(self, self.device.abi, self.binary_name))
- self.device_binary = self.device.install_if_needed(host_binary)
-
- self.command = '{} -i {} -s {}'.format(self.device_binary, self.iterations, self.buffer_size)
- if self.cpus:
- for c in self.cpus:
- self.command += ' -c {}'.format(c)
-
- def run(self, context):
- self.result = self.device.execute(self.command, timeout=300)
-
- def update_result(self, context):
- match = RESULT_REGEX.search(self.result)
- context.result.add_metric('time', float(match.group(1)), 'seconds', lower_is_better=True)
- context.result.add_metric('bandwidth', float(match.group(2)), 'MB/s')
-
- def teardown(self, context):
- pass
diff --git a/wlauto/workloads/memcpy/bin/arm64/memcpy b/wlauto/workloads/memcpy/bin/arm64/memcpy
deleted file mode 100755
index 39982df8..00000000
--- a/wlauto/workloads/memcpy/bin/arm64/memcpy
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/memcpy/bin/armeabi/memcpy b/wlauto/workloads/memcpy/bin/armeabi/memcpy
deleted file mode 100755
index 4af3239a..00000000
--- a/wlauto/workloads/memcpy/bin/armeabi/memcpy
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/memcpy/src/build.sh b/wlauto/workloads/memcpy/src/build.sh
deleted file mode 100755
index 3638949a..00000000
--- a/wlauto/workloads/memcpy/src/build.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-ndk-build
-
-if [[ $? -eq 0 ]]; then
- cp libs/armeabi/memcpy ..
-fi
diff --git a/wlauto/workloads/memcpy/src/jni/Android.mk b/wlauto/workloads/memcpy/src/jni/Android.mk
deleted file mode 100644
index 77d438e6..00000000
--- a/wlauto/workloads/memcpy/src/jni/Android.mk
+++ /dev/null
@@ -1,11 +0,0 @@
-LOCAL_PATH := $(call my-dir)
-include $(CLEAR_VARS)
-
-LOCAL_SRC_FILES := memcopy.c
-
-LOCAL_LD_LIBS := -lrt
-
-LOCAL_MODULE := memcpy
-
-
-include $(BUILD_EXECUTABLE)
diff --git a/wlauto/workloads/memcpy/src/jni/memcopy.c b/wlauto/workloads/memcpy/src/jni/memcopy.c
deleted file mode 100644
index 19f569d3..00000000
--- a/wlauto/workloads/memcpy/src/jni/memcopy.c
+++ /dev/null
@@ -1,114 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-#define _GNU_SOURCE
-#include <stdlib.h>
-#include <stdio.h>
-#include <string.h>
-#include <sched.h>
-#include <unistd.h>
-#include <sys/syscall.h>
-#include <pthread.h>
-#include <time.h>
-
-const int MAX_CPUS = 8;
-const int DEFAULT_ITERATIONS = 1000;
-const int DEFAULT_BUFFER_SIZE = 1024 * 1024 * 5;
-
-int set_affinity(size_t cpus_size, int* cpus)
-{
- int i;
- int mask = 0;
-
- for(i = 0; i < cpus_size; ++i)
- {
- mask |= 1 << cpus[i];
- }
-
- return syscall(__NR_sched_setaffinity, 0, sizeof(mask), &mask);
-}
-
-int main(int argc, char** argv)
-{
- int cpus[MAX_CPUS];
- int next_cpu = 0;
- int iterations = DEFAULT_ITERATIONS;
- int buffer_size = DEFAULT_BUFFER_SIZE;
-
- int c;
- while ((c = getopt(argc, argv, "i:c:s:")) != -1)
- switch (c)
- {
- case 'c':
- cpus[next_cpu++] = atoi(optarg);
- if (next_cpu == MAX_CPUS)
- {
- fprintf(stderr, "Max CPUs exceeded.");
- abort();
- }
- break;
- case 'i':
- iterations = atoi(optarg);
- break;
- case 's':
- buffer_size = atoi(optarg);
- break;
- default:
- abort();
- break;
- }
-
- int ret;
- if (next_cpu != 0)
- if (ret = set_affinity(next_cpu, cpus))
- {
- fprintf(stderr, "sched_setaffinity returnred %i.", ret);
- abort();
- }
-
- char* source = malloc(buffer_size);
- char* dest = malloc(buffer_size);
-
- struct timespec before, after;
- if (clock_gettime(CLOCK_MONOTONIC, &before))
- {
- fprintf(stderr, "Could not get start time.");
- abort();
- }
-
- int i;
- for (i = 0; i < iterations; ++i)
- {
- memcpy(dest, source, buffer_size);
- }
-
- if (clock_gettime(CLOCK_MONOTONIC, &after))
- {
- fprintf(stderr, "Could not get end time.");
- abort();
- }
-
- free(dest);
- free(source);
-
- long delta_sec = (long)(after.tv_sec - before.tv_sec);
- long delta_nsec = after.tv_nsec - before.tv_nsec;
- double delta = (double)delta_sec + delta_nsec / 1e9;
- printf("Total time: %f s\n", delta);
- printf("Bandwidth: %f MB/s\n", buffer_size / delta * iterations / 1e6);
-
- return 0;
-}
diff --git a/wlauto/workloads/nenamark/__init__.py b/wlauto/workloads/nenamark/__init__.py
deleted file mode 100644
index f8c5a4a9..00000000
--- a/wlauto/workloads/nenamark/__init__.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import os
-import re
-import time
-
-from wlauto import AndroidBenchmark, Parameter
-
-
-class Nenamark(AndroidBenchmark):
-
- name = 'nenamark'
- description = """
- NenaMark is an OpenGL-ES 2.0 graphics performance benchmark for Android
- devices.
-
- http://nena.se/nenamark_story
-
- From the website:
-
- The NenaMark2 benchmark scene averages about 45k triangles, with a span
- between 26k and 68k triangles. It averages 96 batches per frame and contains
- about 15 Mb of texture data (non-packed).
- """
- package = 'se.nena.nenamark2'
- activity = 'se.nena.nenamark2.NenaMark2'
-
- parameters = [
- Parameter('duration', kind=int, default=120,
- description="""
- Number of seconds to wait before considering the benchmark
- finished
- """),
- ]
-
- regex = re.compile('.*NenaMark2.*Score.*?([0-9\.]*)fps')
-
- def run(self, context):
- time.sleep(5) # wait for nenamark menu to show up
- self.device.execute('input keyevent 23')
- time.sleep(self.duration)
-
- def update_result(self, context):
- super(Nenamark, self).update_result(context)
- with open(self.logcat_log) as fh:
- for line in fh:
- match = self.regex.search(line)
- if match:
- score = match.group(1)
- context.result.add_metric('nenamark score', score)
- break
-
diff --git a/wlauto/workloads/peacekeeper/__init__.py b/wlauto/workloads/peacekeeper/__init__.py
deleted file mode 100644
index 79b9949f..00000000
--- a/wlauto/workloads/peacekeeper/__init__.py
+++ /dev/null
@@ -1,129 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,W0201,E0203
-import os
-import urllib2
-from HTMLParser import HTMLParser
-
-from wlauto import AndroidUiAutoBenchmark, Parameter
-from wlauto.exceptions import WorkloadError
-
-
-BROWSER_MAP = {
- 'firefox': {
- 'package': 'org.mozilla.firefox',
- 'activity': '.App',
- },
- 'chrome': {
- 'package': 'com.android.chrome',
- 'activity': 'com.google.android.apps.chrome.Main',
- },
-}
-
-
-class Peacekeeper(AndroidUiAutoBenchmark):
-
- name = 'peacekeeper'
- description = """
- Peacekeeper is a free and fast browser test that measures a browser's speed.
-
- .. note::
-
- This workload requires a network connection as well as support for
- one of the two currently-supported browsers. Moreover, TC2 has
- compatibility issue with chrome
-
- """
- run_timeout = 15 * 60
-
- parameters = [
- Parameter('browser', default='firefox', allowed_values=['firefox', 'chrome'],
- description='The browser to be benchmarked.'),
- Parameter('output_file', default=None,
- description="""The result URL of peacekeeper benchmark will be written
- into this file on device after completion of peacekeeper benchmark.
- Defaults to peacekeeper.txt in the device's ``working_directory``.
- """),
- Parameter('peacekeeper_url', default='http://peacekeeper.futuremark.com/run.action',
- description='The URL to run the peacekeeper benchmark.'),
- ]
-
- def __init__(self, device, **kwargs):
- super(Peacekeeper, self).__init__(device, **kwargs)
- self.version = self.browser
-
- def update_result(self, context):
- super(Peacekeeper, self).update_result(context)
- url = None
-
- # Pull the result page url, which contains the results, from the
- # peacekeeper.txt file and process it
- self.device.pull(self.output_file, context.output_directory)
- result_file = os.path.join(context.output_directory, 'peacekeeper.txt')
- with open(result_file) as fh:
- for line in fh:
- url = line
-
- # Fetch the html page containing the results
- if not url:
- raise WorkloadError('The url is empty, error while running peacekeeper benchmark')
-
- req = urllib2.Request(url)
- response = urllib2.urlopen(req)
- result_page = response.read()
-
- # Parse the HTML content using HTML parser
- parser = PeacekeeperParser()
- parser.feed(result_page)
-
- # Add peacekeeper_score into results file
- context.result.add_metric('peacekeeper_score', parser.peacekeeper_score)
-
- def validate(self):
- if self.output_file is None:
- self.output_file = os.path.join(self.device.working_directory, 'peacekeeper.txt')
- if self.browser == 'chrome' and self.device == 'TC2':
- raise WorkloadError('Chrome not supported on TC2')
-
- self.uiauto_params['output_file'] = self.output_file
- self.uiauto_params['browser'] = self.browser
- self.uiauto_params['peacekeeper_url'] = self.peacekeeper_url
-
- self.package = BROWSER_MAP[self.browser]['package']
- self.activity = BROWSER_MAP[self.browser]['activity']
-
-
-class PeacekeeperParser(HTMLParser):
- def __init__(self):
- HTMLParser.__init__(self)
- self.flag = False
- self.peacekeeper_score = ''
-
- def handle_starttag(self, tag, attrs):
- if tag == 'div':
- for name, value in attrs:
- if name == 'class' and value == 'resultBarContainer clearfix resultBarSelected':
- self.flag = True
- elif self.flag and name == 'class' and value == 'resultBarComment':
- self.flag = False
- self.peacekeeper_score = self.peacekeeper_score.split('details')[1]
-
- def handle_endtag(self, tag):
- pass
-
- def handle_data(self, data):
- if self.flag:
- self.peacekeeper_score += data.strip()
diff --git a/wlauto/workloads/peacekeeper/com.arm.wlauto.uiauto.peacekeeper.jar b/wlauto/workloads/peacekeeper/com.arm.wlauto.uiauto.peacekeeper.jar
deleted file mode 100644
index 4dbd9465..00000000
--- a/wlauto/workloads/peacekeeper/com.arm.wlauto.uiauto.peacekeeper.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/peacekeeper/uiauto/build.sh b/wlauto/workloads/peacekeeper/uiauto/build.sh
deleted file mode 100755
index 96df2690..00000000
--- a/wlauto/workloads/peacekeeper/uiauto/build.sh
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.peacekeeper.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.peacekeeper.jar ..
-fi
diff --git a/wlauto/workloads/peacekeeper/uiauto/build.xml b/wlauto/workloads/peacekeeper/uiauto/build.xml
deleted file mode 100644
index 7d60a557..00000000
--- a/wlauto/workloads/peacekeeper/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.peacekeeper" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/peacekeeper/uiauto/project.properties b/wlauto/workloads/peacekeeper/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/peacekeeper/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/peacekeeper/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/peacekeeper/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index 2384b800..00000000
--- a/wlauto/workloads/peacekeeper/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,115 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.peacekeeper;
-
-import java.io.File;
-import java.io.InputStreamReader;
-import java.io.Reader;
-import java.net.URL;
-import java.net.URLConnection;
-import java.util.regex.Matcher;
-import java.util.regex.Pattern;
-import java.io.PrintWriter;
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.KeyEvent;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "peacekeeper";
-
- public void runUiAutomation() throws Exception {
- // maximum time for running peacekeeper benchmark 80 * 10 sec
- final int TIMEOUT = 80;
-
- // reading the input parameter
- Bundle parameters = getParams();
- String browser = parameters.getString("browser");
- String outputFile = parameters.getString("output_file");
- String peacekeeperUrl = parameters.getString("peacekeeper_url");
-
- String urlAddress = "";
-
- PrintWriter writer = new PrintWriter(outputFile, "UTF-8");
-
- // firefox browser uiautomator code
- if (browser.equals("firefox")) {
-
- UiObject addressBar = new UiObject(new UiSelector()
- .className("android.widget.TextView")
- .text("Enter Search or Address"));
- addressBar.click();
- UiObject setUrl = new UiObject(new UiSelector()
- .className("android.widget.EditText"));
- setUrl.clearTextField();
- setUrl.setText(peacekeeperUrl);
- getUiDevice().pressEnter();
-
- UiObject currentUrl = new UiObject(new UiSelector()
- .className("android.widget.TextView").index(1));
- for (int i = 0; i < TIMEOUT; i++) {
-
- if (currentUrl.getText()
- .equals("Peacekeeper - free universal browser test for HTML5 from Futuremark")) {
-
- // write url address to peacekeeper.txt file
- currentUrl.click();
- urlAddress = setUrl.getText();
- writer.println(urlAddress);
- break;
- }
- sleep(10);
- }
- } else if (browser.equals("chrome")) { // Code for Chrome browser
- UiObject adressBar = new UiObject(new UiSelector()
- .className("android.widget.EditText")
- .description("Search or type url"));
-
- adressBar.clearTextField();
- adressBar.setText(peacekeeperUrl);
- getUiDevice().pressEnter();
- for (int i = 0; i < TIMEOUT; i++) {
-
- if (!adressBar.getText().contains("run.action")) {
-
- // write url address to peacekeeper.txt file
- urlAddress = adressBar.getText();
- if (!urlAddress.contains("http"))
- urlAddress = "http://" + urlAddress;
- writer.println(urlAddress);
- break;
- }
- sleep(10);
- }
- }
- writer.close();
- getUiDevice().pressHome();
- }
-}
diff --git a/wlauto/workloads/power_loadtest/__init__.py b/wlauto/workloads/power_loadtest/__init__.py
deleted file mode 100644
index 9faef57b..00000000
--- a/wlauto/workloads/power_loadtest/__init__.py
+++ /dev/null
@@ -1,122 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-# pylint: disable=attribute-defined-outside-init
-
-import os
-import re
-
-from wlauto import Workload, Parameter
-from wlauto.exceptions import WorkloadError
-from wlauto.utils.misc import which, check_output
-from wlauto.utils.types import arguments, numeric
-
-
-# Location of the power_LoadTest under the chroot
-#POWER_LOADTEST_DIR = '/mnt/host/source/src/third_party/autotest/files/client/site_tests/power_LoadTest'
-MARKER = '---------------------------'
-STATUS_REGEX = re.compile(r'^\S+\s+\[\s*(\S+)\s*\]')
-METRIC_REGEX = re.compile(r'^\S+\s+(\S+)\s*(\S+)')
-
-
-class PowerLoadtest(Workload):
-
- name = 'power_loadtest'
- description = '''
- power_LoadTest (part of ChromeOS autotest suite) continuously cycles through a set of
- browser-based activities and monitors battery drain on a device.
-
- .. note:: This workload *must* be run inside a CromeOS SDK chroot.
-
- See: https://www.chromium.org/chromium-os/testing/power-testing
-
- '''
- supported_platforms = ['chromeos']
-
- parameters = [
- Parameter('board', default=os.getenv('BOARD'),
- description='''
- The name of the board to be used for the test. If this is not specified,
- BOARD environment variable will be used.
- '''),
- Parameter('variant',
- description='''
- The variant of the test to run; If not specified, the full power_LoadTest will
- run (until the device battery is drained). The only other variant available in the
- vanilla test is "1hour", but further variants may be added by providing custom
- control files.
- '''),
- Parameter('test_that_args', kind=arguments, default='',
- description='''
- Extra arguments to be passed to test_that_invocation.
- '''),
- Parameter('run_timeout', kind=int, default=24 * 60 * 60,
- description='''
- Timeout, in seconds, for the test execution.
- '''),
- ]
-
- def setup(self, context):
- if self.device.os != 'chromeos':
- raise WorkloadError('{} only supports ChromeOS devices'.format(self.name))
- self.test_that = which('test_that')
- if not self.test_that:
- message = ('Could not find "test_that"; {} must be running in a ChromeOS SDK chroot '
- '(did you execute "cros_sdk"?)')
- raise WorkloadError(message.format(self.name))
- self.command = self._build_command()
- self.raw_output = None
- # make sure no other test is running
- self.device.execute('killall -9 autotest', check_exit_code=False)
-
- def run(self, context):
- self.logger.debug(self.command)
- self.raw_output, _ = check_output(self.command, timeout=self.run_timeout, shell=True)
-
- def update_result(self, context):
- if not self.raw_output:
- self.logger.warning('No power_LoadTest output detected; run failed?')
- return
- raw_outfile = os.path.join(context.output_directory, 'power_loadtest.raw')
- with open(raw_outfile, 'w') as wfh:
- wfh.write(self.raw_output)
- context.add_artifact('power_LoadTest_raw', raw_outfile, kind='raw')
- lines = iter(self.raw_output.split('\n'))
- # Results are delimitted from the rest of the output by MARKER
- for line in lines:
- if MARKER in line:
- break
- for line in lines:
- match = STATUS_REGEX.search(line)
- if match:
- status = match.group(1)
- if status != 'PASSED':
- self.logger.warning(line)
- match = METRIC_REGEX.search(line)
- if match:
- try:
- context.result.add_metric(match.group(1), numeric(match.group(2)), lower_is_better=True)
- except ValueError:
- pass # non-numeric metrics aren't supported
-
- def _build_command(self):
- test_name = 'power_LoadTest'
- if self.variant:
- test_name += '.' + self.variant
- parts = [self.test_that, self.device.host, test_name]
- if self.board:
- parts.append('-b {}'.format(self.board))
- parts.append(str(self.test_that_args))
- return ' '.join(parts)
-
diff --git a/wlauto/workloads/quadrant/__init__.py b/wlauto/workloads/quadrant/__init__.py
deleted file mode 100644
index 5670ceea..00000000
--- a/wlauto/workloads/quadrant/__init__.py
+++ /dev/null
@@ -1,112 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import re
-from collections import defaultdict
-
-from wlauto import AndroidUiAutoBenchmark
-
-
-TEST_TYPES = {
- 'benchmark_cpu_branching_logic': 'time',
- 'benchmark_cpu_matrix_int': 'time',
- 'benchmark_cpu_matrix_long': 'time',
- 'benchmark_cpu_matrix_short': 'time',
- 'benchmark_cpu_matrix_byte': 'time',
- 'benchmark_cpu_matrix_float': 'time',
- 'benchmark_cpu_matrix_double': 'time',
- 'benchmark_cpu_checksum': 'time',
- 'benchmark_cpu': 'aggregate',
- 'benchmark_memory_transfer': 'time',
- 'benchmark_memory': 'aggregate',
- 'benchmark_io_fs_write': 'time',
- 'benchmark_io_fs_read': 'time',
- 'benchmark_io_db_write': 'time',
- 'benchmark_io_db_read': 'time',
- 'benchmark_io': 'aggregate',
- 'benchmark_g2d_fractal': 'rate',
- 'benchmark_g2d': 'aggregate',
- 'benchmark_g3d_corridor': 'rate',
- 'benchmark_g3d_planet': 'rate',
- 'benchmark_g3d_dna': 'rate',
- 'benchmark_g3d': 'aggregate',
- 'benchmark': 'aggregate',
-}
-
-TYPE_TESTS = defaultdict(list)
-for k, v in TEST_TYPES.iteritems():
- TYPE_TESTS[v].append(k)
-
-TYPE_UNITS = {
- 'time': 'ms',
- 'rate': 'Hz',
-}
-
-REGEX_TEMPLATES = {
- 'aggregate': r'(?P<metric>{}) aggregate score is (?P<score>\d+)',
- 'time': r'(?P<metric>{}) executed in (?P<time>\d+) ms, '
- r'reference time: (?P<reference>\d+) ms, '
- r'score: (?P<score>\d+)',
- 'rate': r'(?P<metric>{}) executed with a rate of (?P<rate>[0-9.]+)/sec, '
- r'reference rate: (?P<reference>[0-9.]+)/sec, '
- r'score: (?P<score>\d+)',
-}
-
-TEST_REGEXES = {}
-for test_, type_ in TEST_TYPES.items():
- TEST_REGEXES[test_] = re.compile(REGEX_TEMPLATES[type_].format(test_))
-
-
-class Quadrant(AndroidUiAutoBenchmark):
-
- name = 'quadrant'
- description = """
- Quadrant is a benchmark for mobile devices, capable of measuring CPU, memory,
- I/O and 3D graphics performance.
-
- http://www.aurorasoftworks.com/products/quadrant
-
- From the website:
- Quadrant outputs a score for the following categories: 2D, 3D, Mem, I/O, CPU
- , Total.
- """
- package = 'com.aurorasoftworks.quadrant.ui.professional'
- activity = '.QuadrantProfessionalLauncherActivity'
- summary_metrics = ['benchmark_score']
-
- run_timeout = 10 * 60
-
- def __init__(self, device, **kwargs):
- super(Quadrant, self).__init__(device, **kwargs)
- self.uiauto_params['has_gpu'] = self.device.has_gpu
- self.regex = {}
-
- def update_result(self, context):
- super(Quadrant, self).update_result(context)
- with open(self.logcat_log) as fh:
- for line in fh:
- for test, regex in TEST_REGEXES.items():
- match = regex.search(line)
- if match:
- test_type = TEST_TYPES[test]
- data = match.groupdict()
- if test_type != 'aggregate':
- context.result.add_metric(data['metric'] + '_' + test_type,
- data[test_type],
- TYPE_UNITS[test_type])
- context.result.add_metric(data['metric'] + '_score', data['score'])
- break
-
diff --git a/wlauto/workloads/quadrant/com.arm.wlauto.uiauto.quadrant.jar b/wlauto/workloads/quadrant/com.arm.wlauto.uiauto.quadrant.jar
deleted file mode 100644
index 2c5aac3d..00000000
--- a/wlauto/workloads/quadrant/com.arm.wlauto.uiauto.quadrant.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/quadrant/uiauto/build.sh b/wlauto/workloads/quadrant/uiauto/build.sh
deleted file mode 100755
index eba2b1cc..00000000
--- a/wlauto/workloads/quadrant/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.quadrant.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.quadrant.jar ..
-fi
diff --git a/wlauto/workloads/quadrant/uiauto/build.xml b/wlauto/workloads/quadrant/uiauto/build.xml
deleted file mode 100644
index 113eccbe..00000000
--- a/wlauto/workloads/quadrant/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.quadrant" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/quadrant/uiauto/project.properties b/wlauto/workloads/quadrant/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/quadrant/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/quadrant/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/quadrant/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index f8fe8749..00000000
--- a/wlauto/workloads/quadrant/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,120 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.quadrant;
-
-import java.util.concurrent.TimeUnit;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "quadrant";
-
- public void runUiAutomation() throws Exception {
- Bundle status = new Bundle();
- Bundle params = getParams();
- boolean hasGpu = Boolean.parseBoolean(params.getString("has_gpu").toLowerCase());
-
- clearLogcat();
- handleFtuInfoDialogIfNecessary();
- goToRunCustomBenchmark();
- selectTestsToRun(hasGpu);
- hitStart();
- handleWarningIfNecessary();
- waitForResults();
-
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
- public void handleFtuInfoDialogIfNecessary() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject infoText = new UiObject(selector.text("Information"));
- if (infoText.waitForExists(TimeUnit.SECONDS.toMillis(10)))
- {
- UiObject okButton = new UiObject(selector.text("OK")
- .className("android.widget.Button"));
- okButton.click();
- }
- else
- {
- // FTU dialog didn't come up.
- }
- }
-
- public void goToRunCustomBenchmark() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject runCustom = new UiObject(selector.text("Run custom benchmark")
- .className("android.widget.TextView"));
- runCustom.clickAndWaitForNewWindow();
- }
-
- // By default, all tests are selected. However, if our device does not have a GPU, then
- // running graphics tests may cause a crash, so we disable those.
- public void selectTestsToRun(boolean hasGpu) throws Exception {
- if(!hasGpu) {
- UiSelector selector = new UiSelector();
- UiObject gfx2d = new UiObject(selector.text("2D graphics")
- .className("android.widget.CheckBox"));
- gfx2d.click();
-
- UiObject gfx3d = new UiObject(selector.text("3D graphics")
- .className("android.widget.CheckBox"));
- gfx3d.click();
- }
- }
-
- public void hitStart() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject startButton = new UiObject(selector.text("Start")
- .className("android.widget.Button")
- .packageName("com.aurorasoftworks.quadrant.ui.professional"));
- startButton.click();
- }
-
- // Even if graphics tests aren't selected, Quadrant will still show a warning about running
- // with software rendering.
- public void handleWarningIfNecessary() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject warning = new UiObject(selector.text("Warning"));
- if (warning.waitForExists(TimeUnit.SECONDS.toMillis(2))) {
- UiObject closeButton = new UiObject(selector.text("Close")
- .className("android.widget.Button"));
- if (closeButton.exists()) {
- closeButton.click();
- }
- }
- else
- {
- // Warning dialog didn't come up.
- }
- }
-
- public void waitForResults() throws Exception {
- waitForLogcatText("benchmark aggregate score is", TimeUnit.SECONDS.toMillis(200));
- }
-}
diff --git a/wlauto/workloads/real_linpack/__init__.py b/wlauto/workloads/real_linpack/__init__.py
deleted file mode 100644
index 2f8121ab..00000000
--- a/wlauto/workloads/real_linpack/__init__.py
+++ /dev/null
@@ -1,66 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,W0201,E0203
-
-import re
-
-from wlauto import AndroidUiAutoBenchmark, Parameter
-
-
-class RealLinpack(AndroidUiAutoBenchmark):
-
- name = 'real-linpack'
- description = """
- This version of `Linpack <http://en.wikipedia.org/wiki/LINPACK_benchmarks>`
- was developed by Dave Butcher. RealLinpack tries to find the number of threads
- that give you the maximum linpack score.
-
- RealLinpack runs 20 runs of linpack for each number of threads and
- calculates the mean and confidence. It stops when the
- score's confidence interval drops below the current best score
- interval. That is, when (current_score + confidence) < (best_score -
- best_score_confidence)
-
- """
- package = 'com.arm.RealLinpack'
- activity = '.RealLinpackActivity'
-
- parameters = [
- Parameter('max_threads', kind=int, default=16, constraint=lambda x: x > 0,
- description='The maximum number of threads that real linpack will try.'),
- ]
-
- def __init__(self, device, **kwargs):
- super(RealLinpack, self).__init__(device, **kwargs)
- self.uiauto_params['max_threads'] = self.max_threads
- self.run_timeout = 120 + 120 * self.max_threads # a base of 2 minutes plus 2 minutes for each thread
-
- def update_result(self, context):
- super(RealLinpack, self).update_result(context)
- score_regex = re.compile(r'Optimum.*threads:\s*([0-9])+.*score:\s*([0-9]+\.[0-9]+).*MFLOPS')
- match_found = False
- with open(self.logcat_log) as logcat_file:
- for line in logcat_file:
- match = re.search(score_regex, line)
- if match:
- number_of_threads = match.group(1)
- score = match.group(2)
- context.result.add_metric('optimal number of threads', number_of_threads, None)
- context.result.add_metric('score', score, 'MFLOPS')
- match_found = True
- break
- if not match_found:
- self.logger.warning('Failed To collect results for real linpack')
diff --git a/wlauto/workloads/real_linpack/com.arm.wlauto.uiauto.reallinpack.jar b/wlauto/workloads/real_linpack/com.arm.wlauto.uiauto.reallinpack.jar
deleted file mode 100644
index 133435d0..00000000
--- a/wlauto/workloads/real_linpack/com.arm.wlauto.uiauto.reallinpack.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/real_linpack/uiauto/build.sh b/wlauto/workloads/real_linpack/uiauto/build.sh
deleted file mode 100755
index 645f225a..00000000
--- a/wlauto/workloads/real_linpack/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.reallinpack.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.reallinpack.jar ..
-fi
diff --git a/wlauto/workloads/real_linpack/uiauto/build.xml b/wlauto/workloads/real_linpack/uiauto/build.xml
deleted file mode 100644
index 7771a5d3..00000000
--- a/wlauto/workloads/real_linpack/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.reallinpack" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/real_linpack/uiauto/project.properties b/wlauto/workloads/real_linpack/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/real_linpack/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/real_linpack/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/real_linpack/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index a24d9783..00000000
--- a/wlauto/workloads/real_linpack/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,51 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.reallinpack;
-
-import android.app.Activity;
-import android.os.Bundle;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public void runUiAutomation() throws Exception{
- Bundle status = new Bundle();
- status.putString("product", getUiDevice().getProductName());
- UiSelector selector = new UiSelector();
- // set the maximum number of threads
- String maxThreads = getParams().getString("max_threads");
- UiObject maxThreadNumberField = new UiObject(selector.index(3));
- maxThreadNumberField.clearTextField();
- maxThreadNumberField.setText(maxThreads);
- // start the benchamrk
- UiObject btn_st = new UiObject(selector.text("Run"));
- btn_st.click();
- btn_st.waitUntilGone(500);
- // set timeout for the benchmark
- btn_st.waitForExists(60 * 60 * 1000);
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
-}
diff --git a/wlauto/workloads/realracing3/__init__.py b/wlauto/workloads/realracing3/__init__.py
deleted file mode 100644
index cfeaa416..00000000
--- a/wlauto/workloads/realracing3/__init__.py
+++ /dev/null
@@ -1,35 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import time
-
-from wlauto.common.android.workload import GameWorkload
-from wlauto.exceptions import WorkloadError, DeviceError
-
-
-class RealRacing3(GameWorkload):
-
- name = 'realracing3'
- description = """
- Real Racing 3 game.
- """
- package = 'com.ea.games.r3_row'
- activity = 'com.firemint.realracing3.MainActivity'
- loading_time = 90
- asset_file = 'com.ea.games.r3_row.tar.gz'
- saved_state_file = 'rr3-save.tar.gz'
-
-
diff --git a/wlauto/workloads/recentfling/__init__.py b/wlauto/workloads/recentfling/__init__.py
deleted file mode 100644
index 152758e9..00000000
--- a/wlauto/workloads/recentfling/__init__.py
+++ /dev/null
@@ -1,100 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-#pylint: disable=E1101,W0201
-
-import os
-import re
-from collections import defaultdict
-
-from wlauto import Workload, Parameter, File
-from wlauto.utils.types import caseless_string
-from wlauto.exceptions import WorkloadError
-
-
-class Recentfling(Workload):
-
- name = 'recentfling'
- description = """
- Tests UI jank on android devices.
-
- For this workload to work, ``recentfling.sh`` and ``defs.sh`` must be placed
- in ``~/.workload_automation/dependencies/recentfling/``. These can be found
- in the [AOSP Git repository](https://android.googlesource.com/platform/system/extras/+/master/tests/).
-
- To change the apps that are opened at the start of the workload you will need
- to modify the ``defs.sh`` file. You will need to add your app to ``dfltAppList``
- and then add a variable called ``{app_name}Activity`` with the name of the
- activity to launch (where ``{add_name}`` is the name you put into ``dfltAppList``).
-
- You can get a list of activities available on your device by running
- ``adb shell pm list packages -f``
- """
- supported_platforms = ['android']
-
- parameters = [
- Parameter('loops', kind=int, default=3,
- description="The number of test iterations."),
- ]
-
- def initialise(self, context): # pylint: disable=no-self-use
- if context.device.get_sdk_version() < 23:
- raise WorkloadError("This workload relies on ``dumpsys gfxinfo`` \
- only present in Android M and onwards")
-
- def setup(self, context):
- self.defs_host = context.resolver.get(File(self, "defs.sh"))
- self.recentfling_host = context.resolver.get(File(self, "recentfling.sh"))
- self.device.push(self.recentfling_host, self.device.working_directory)
- self.device.push(self.defs_host, self.device.working_directory)
- self._kill_recentfling()
- self.device.ensure_screen_is_on()
-
- def run(self, context):
- cmd = "echo $$>{dir}/pidfile; exec {dir}/recentfling.sh -i {}; rm {dir}/pidfile"
- cmd = cmd.format(self.loops, dir=self.device.working_directory)
- try:
- self.output = self.device.execute(cmd, timeout=120)
- except KeyboardInterrupt:
- self._kill_recentfling()
- raise
-
- def update_result(self, context):
- group_names = ["90th Percentile", "95th Percentile", "99th Percentile", "Jank", "Jank%"]
- count = 0
- for line in self.output.strip().splitlines():
- p = re.compile("Frames: \d+ latency: (?P<pct90>\d+)/(?P<pct95>\d+)/(?P<pct99>\d+) Janks: (?P<jank>\d+)\((?P<jank_pct>\d+)%\)")
- match = p.search(line)
- if match:
- count += 1
- if line.startswith("AVE: "):
- group_names = ["Average " + g for g in group_names]
- count = 0
- for metric in zip(group_names, match.groups()):
- context.result.add_metric(metric[0],
- metric[1],
- None,
- classifiers={"loop": count or "Average"})
-
- def teardown(self, context):
- self.device.remove(self.device.path.join(self.device.working_directory,
- "recentfling.sh"))
- self.device.remove(self.device.path.join(self.device.working_directory,
- "defs.sh"))
-
- def _kill_recentfling(self):
- pid = self.device.execute('cat {}/pidfile'.format(self.device.working_directory))
- if pid:
- self.device.kill(pid.strip(), signal='SIGKILL')
diff --git a/wlauto/workloads/rt_app/LICENSE b/wlauto/workloads/rt_app/LICENSE
deleted file mode 100644
index e03ab67e..00000000
--- a/wlauto/workloads/rt_app/LICENSE
+++ /dev/null
@@ -1,8 +0,0 @@
-rt-app binaries and workgen script included with this workload are distributed
-under GPL version 2; The full text of the license may be viewed here:
-
-http://www.gnu.org/licenses/gpl-2.0.html
-
-Source for these binaries may be obtained from Linaro here:
-
-https://git.linaro.org/power/rt-app.git
diff --git a/wlauto/workloads/rt_app/__init__.py b/wlauto/workloads/rt_app/__init__.py
deleted file mode 100644
index 82253e6e..00000000
--- a/wlauto/workloads/rt_app/__init__.py
+++ /dev/null
@@ -1,281 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import re
-import json
-import tarfile
-from collections import OrderedDict
-from subprocess import CalledProcessError
-
-from wlauto import Workload, Parameter, Executable, File
-from wlauto.exceptions import WorkloadError, ResourceError
-from wlauto.instrumentation import instrument_is_enabled
-from wlauto.utils.misc import check_output
-
-RAW_OUTPUT_FILENAME = 'raw-output.txt'
-TARBALL_FILENAME = 'rtapp-logs.tar.gz'
-BINARY_NAME = 'rt-app'
-PACKAGED_USE_CASE_DIRECTORY = os.path.abspath(os.path.join(os.path.dirname(__file__), 'use_cases'))
-
-PLOAD_REGEX = re.compile(r'pLoad = (\d+)(\w+) : calib_cpu (\d+)')
-ERROR_REGEX = re.compile(r'error')
-CRIT_REGEX = re.compile(r'crit')
-
-
-class RtApp(Workload):
- # pylint: disable=no-member,attribute-defined-outside-init
-
- name = 'rt-app'
- description = """
- A test application that simulates cofigurable real-time periodic load.
-
- rt-app is a test application that starts multiple periodic threads in order to
- simulate a real-time periodic load. It supports SCHED_OTHER, SCHED_FIFO,
- SCHED_RR as well as the AQuoSA framework and SCHED_DEADLINE.
-
- The load is described using JSON-like config files. Below are a couple of simple
- examples.
-
- .. code-block:: json
-
- {
- /*
- * Simple use case which creates a thread that run 1ms then sleep 9ms
- * until the use case is stopped with Ctrl+C
- */
- "tasks" : {
- "thread0" : {
- "loop" : -1,
- "run" : 20000,
- "sleep" : 80000
- }
- },
- "global" : {
- "duration" : 2,
- "calibration" : "CPU0",
- "default_policy" : "SCHED_OTHER",
- "pi_enabled" : false,
- "lock_pages" : false,
- "logdir" : "./",
- "log_basename" : "rt-app1",
- "ftrace" : false,
- "gnuplot" : true,
- }
- }
-
- .. code-block:: json
-
- {
- /*
- * Simple use case with 2 threads that runs for 10 ms and wake up each
- * other until the use case is stopped with Ctrl+C
- */
- "tasks" : {
- "thread0" : {
- "loop" : -1,
- "run" : 10000,
- "resume" : "thread1",
- "suspend" : "thread0"
- },
- "thread1" : {
- "loop" : -1,
- "run" : 10000,
- "resume" : "thread0",
- "suspend" : "thread1"
- }
- }
- }
-
- Please refer to the exising configs in ``%s`` for more examples.
-
- The version of rt-app currently used with this workload contains enhancements and
- modifications done by Linaro. The source code for this version may be obtained here:
-
- http://git.linaro.org/power/rt-app.git
-
- The upstream version of rt-app is hosted here:
-
- https://github.com/scheduler-tools/rt-app
-
- """ % PACKAGED_USE_CASE_DIRECTORY
-
- parameters = [
- Parameter('config', kind=str, default='taskset',
- description='''
- Use case configuration file to run with rt-app. This may be
- either the name of one of the "standard" configuratons included
- with the workload. or a path to a custom JSON file provided by
- the user. Either way, the ".json" extension is implied and will
- be added automatically if not specified in the argument.
-
- The following is th list of standard configuraionts currently
- included with the workload: {}
-
- '''.format(', '.join(os.listdir(PACKAGED_USE_CASE_DIRECTORY)))),
- Parameter('duration', kind=int,
- description='''
- Duration of the workload execution in Seconds. If specified, this
- will override the corresponing parameter in the JSON config.
- '''),
- Parameter('taskset_mask', kind=int,
- description='Constrain execution to specific CPUs.'),
- Parameter('uninstall_on_exit', kind=bool, default=False,
- description="""
- If set to ``True``, rt-app binary will be uninstalled from the device
- at the end of the run.
- """),
- Parameter('force_install', kind=bool, default=False,
- description="""
- If set to ``True``, rt-app binary will always be deployed to the
- target device at the begining of the run, regardless of whether it
- was already installed there.
- """),
- ]
-
- def initialize(self, context):
- # initialize() runs once per run. setting a class variable to make it
- # available to other instances of the workload
- RtApp.device_working_directory = self.device.path.join(self.device.working_directory,
- 'rt-app-working')
- RtApp.host_binary = context.resolver.get(Executable(self,
- self.device.abi,
- BINARY_NAME), strict=False)
- RtApp.workgen_script = context.resolver.get(File(self, 'workgen'))
- if not self.device.is_rooted: # some use cases require root privileges
- raise WorkloadError('rt-app requires the device to be rooted.')
- self.device.execute('mkdir -p {}'.format(self.device_working_directory))
- self._deploy_rt_app_binary_if_necessary()
-
- def setup(self, context):
- self.log_basename = context.spec.label
- self.host_json_config = self._load_json_config(context)
- self.config_file_on_device = self.device.path.join(self.device_working_directory,
- os.path.basename(self.host_json_config))
- self.device.push(self.host_json_config, self.config_file_on_device, timeout=60)
- self.command = '{} {}'.format(self.device_binary, self.config_file_on_device)
-
- time_buffer = 30
- self.timeout = self.duration + time_buffer
-
- def run(self, context):
- self.output = self.device.invoke(self.command,
- on_cpus=self.taskset_mask,
- timeout=self.timeout,
- as_root=True)
-
- def update_result(self, context):
- self._pull_rt_app_logs(context)
- context.result.classifiers = dict(
- duration=self.duration,
- task_count=self.task_count,
- )
-
- outfile = os.path.join(context.output_directory, RAW_OUTPUT_FILENAME)
- with open(outfile, 'w') as wfh:
- wfh.write(self.output)
-
- error_count = 0
- crit_count = 0
- for line in self.output.split('\n'):
- match = PLOAD_REGEX.search(line)
- if match:
- pload_value = match.group(1)
- pload_unit = match.group(2)
- calib_cpu_value = match.group(3)
- context.result.add_metric('pLoad', float(pload_value), pload_unit)
- context.result.add_metric('calib_cpu', float(calib_cpu_value))
-
- error_match = ERROR_REGEX.search(line)
- if error_match:
- error_count += 1
-
- crit_match = CRIT_REGEX.search(line)
- if crit_match:
- crit_count += 1
-
- context.result.add_metric('error_count', error_count, 'count')
- context.result.add_metric('crit_count', crit_count, 'count')
-
- def finalize(self, context):
- if self.uninstall_on_exit:
- self.device.uninstall(self.device_binary)
- self.device.execute('rm -rf {}'.format(self.device_working_directory))
-
- def _deploy_rt_app_binary_if_necessary(self):
- # called from initialize() so gets invoked once per run
- RtApp.device_binary = self.device.get_installed("rt-app")
- if self.force_install or not RtApp.device_binary:
- if not self.host_binary:
- message = '''rt-app is not installed on the device and could not be
- found in workload resources'''
- raise ResourceError(message)
- RtApp.device_binary = self.device.install(self.host_binary)
-
- def _load_json_config(self, context):
- user_config_file = self._get_raw_json_config(context.resolver)
- config_file = self._generate_workgen_config(user_config_file,
- context.output_directory)
- with open(config_file) as fh:
- config_data = json.load(fh, object_pairs_hook=OrderedDict)
- self._update_rt_app_config(config_data)
- self.duration = config_data['global'].get('duration', 0)
- self.task_count = len(config_data.get('tasks', []))
- with open(config_file, 'w') as wfh:
- json.dump(config_data, wfh, indent=4)
- return config_file
-
- def _get_raw_json_config(self, resolver):
- if os.path.splitext(self.config)[1] != '.json':
- self.config += '.json'
- if os.path.isfile(self.config):
- return os.path.abspath(self.config)
- partial_path = os.path.join('use_cases', self.config)
- return resolver.get(File(self, partial_path))
-
- def _generate_workgen_config(self, user_file, output_directory):
- output_file = os.path.join(output_directory, 'unkind.json')
- # use workgen dry run option to generate a use case
- # file with proper JSON grammar on host first
- try:
- check_output('python {} -d -o {} {}'.format(self.workgen_script,
- output_file,
- user_file),
- shell=True)
- except CalledProcessError as e:
- message = 'Could not generate config using workgen, got "{}"'
- raise WorkloadError(message.format(e))
- return output_file
-
- def _update_rt_app_config(self, config_data):
- config_data['global'] = config_data.get('global', {})
- config_data['global']['logdir'] = self.device_working_directory
- config_data['global']['log_basename'] = self.log_basename
- if self.duration is not None:
- config_data['global']['duration'] = self.duration
-
- def _pull_rt_app_logs(self, context):
- tar_command = '{} tar czf {}/{} -C {} .'.format(self.device.busybox,
- self.device_working_directory,
- TARBALL_FILENAME,
- self.device_working_directory)
- self.device.execute(tar_command, timeout=300)
- device_path = self.device.path.join(self.device_working_directory, TARBALL_FILENAME)
- host_path = os.path.join(context.output_directory, TARBALL_FILENAME)
- self.device.pull(device_path, host_path, timeout=120)
- with tarfile.open(host_path, 'r:gz') as tf:
- tf.extractall(context.output_directory)
- os.remove(host_path)
- self.device.execute('rm -rf {}/*'.format(self.device_working_directory))
diff --git a/wlauto/workloads/rt_app/bin/arm64/rt-app b/wlauto/workloads/rt_app/bin/arm64/rt-app
deleted file mode 100755
index 0d251ee3..00000000
--- a/wlauto/workloads/rt_app/bin/arm64/rt-app
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/rt_app/bin/armeabi/rt-app b/wlauto/workloads/rt_app/bin/armeabi/rt-app
deleted file mode 100755
index ff862e7b..00000000
--- a/wlauto/workloads/rt_app/bin/armeabi/rt-app
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/rt_app/use_cases/browser-long.json b/wlauto/workloads/rt_app/use_cases/browser-long.json
deleted file mode 100644
index be8df0c4..00000000
--- a/wlauto/workloads/rt_app/use_cases/browser-long.json
+++ /dev/null
@@ -1,134 +0,0 @@
-{
- "tasks" : {
- "BrowserMain" : {
- "loop" : 3,
- "phases" : {
- "start" : {
- "loop" : 1,
- "sleep" : 400000,
- "run" : 15000,
- "resume" : "Browser",
- "run" : 7000,
- "sleep" : 8000
- },
- "render1" : {
- "loop" : 50,
- "resume" : "BrowserSub",
- "run" : 3000
- },
- "render2" : {
- "loop" : 1,
- "suspend" : "Browser",
- "run" : 10000,
- "resume" : "Browser",
- "run" : 5000
- },
- "render3" : {
- "loop" : 20,
- "resume" : "BrowserSub",
- "run" : 3000
- },
- "stop" : {
- "loop" : 1,
- "run" : 2000,
- "sleep" : 200000,
- "suspend" : "Browser",
- "sleep" : 600000
- },
- "scroll" : {
- "loop" : 4,
- "resume" : "Browser",
- "suspend" : "BrowserNext",
- "run" : 1000
- },
- "stop2" : {
- "loop" : 1,
- "suspend" : "Browser",
- "run" : 200,
- "sleep" : 800000
- }
- }
- },
- "BrowserSub1" : {
- "priority" : -6,
- "loop" : -1,
- "suspend" : "BrowserSub",
- "run" : 100
- },
- "BrowserSub2" : {
- "priority" : -6,
- "loop" : -1,
- "suspend" : "BrowserSub",
- "run" : 100
- },
- "BrowserDisplay" : {
- "priority" : -6,
- "loop" : -1,
- "suspend" : "Browser",
- "run" : 300,
- "resume" : "BrowserNext",
- "run" : 12000,
- "lock" : "mutex11",
- "sync" : { "ref" : "queue11", "mutex": "mutex11" },
- "unlock" : "mutex11",
- "run" : 300,
- "resume" : "Binder-display",
- "run" : 400
- },
- "Binder-dummy" : {
- "priority" : -6,
- "loop" : -1,
- "lock" : "mutex11",
- "wait" : { "ref" : "queue11", "mutex": "mutex11" },
- "unlock" : "mutex11",
- "run" : 200,
- "lock" : "mutex11",
- "signal" : "queue11",
- "unlock" : "mutex11",
- "run" : 100
- },
- "Binder-display" : {
- "priority" : -6,
- "loop" : -1,
- "suspend" : "Binder-display",
- "run" : 300,
- "resume" : "Event-Browser",
- "resume" : "Event-Display"
- },
- "Event-Browser" : {
- "priority" : -9,
- "loop" : -1,
- "suspend" : "Event-Browser",
- "run" : 50,
- "sleep" : 16000,
- "run" : 50,
- "resume" : "Browser"
- },
- "Event-Display" : {
- "priority" : -9,
- "loop" : -1,
- "suspend" : "Event-Display",
- "run" : 50,
- "sleep" : 16000,
- "run" : 50,
- "resume" : "Display"
- },
- "Display" : {
- "priority" : -8,
- "loop" : -1,
- "suspend" : "Display",
- "run" : 16000
- },
- },
- "global" : {
- "default_policy" : "SCHED_OTHER",
- "duration" : 600,
- "ftrace" : false,
- "gnuplot" : false,
- "logdir" : "./",
- "log_basename" : "web",
- "lock_pages" : true,
- "frag" : 1,
- "calibration" : "CPU0"
- }
-}
diff --git a/wlauto/workloads/rt_app/use_cases/browser-short.json b/wlauto/workloads/rt_app/use_cases/browser-short.json
deleted file mode 100644
index 46631040..00000000
--- a/wlauto/workloads/rt_app/use_cases/browser-short.json
+++ /dev/null
@@ -1,134 +0,0 @@
-{
- "tasks" : {
- "BrowserMain" : {
- "loop" : 3,
- "phases" : {
- "start" : {
- "loop" : 1,
- "sleep" : 400000,
- "run" : 15000,
- "resume" : "Browser",
- "run" : 7000,
- "sleep" : 8000
- },
- "render1" : {
- "loop" : 50,
- "resume" : "BrowserSub",
- "run" : 3000
- },
- "render2" : {
- "loop" : 1,
- "suspend" : "Browser",
- "run" : 10000,
- "resume" : "Browser",
- "run" : 5000
- },
- "render3" : {
- "loop" : 20,
- "resume" : "BrowserSub",
- "run" : 3000
- },
- "stop" : {
- "loop" : 1,
- "run" : 2000,
- "sleep" : 200000,
- "suspend" : "Browser",
- "sleep" : 600000
- },
- "scroll" : {
- "loop" : 4,
- "resume" : "Browser",
- "suspend" : "BrowserNext",
- "run" : 1000
- },
- "stop2" : {
- "loop" : 1,
- "suspend" : "Browser",
- "run" : 200,
- "sleep" : 800000
- }
- }
- },
- "BrowserSub1" : {
- "priority" : -6,
- "loop" : -1,
- "suspend" : "BrowserSub",
- "run" : 100
- },
- "BrowserSub2" : {
- "priority" : -6,
- "loop" : -1,
- "suspend" : "BrowserSub",
- "run" : 100
- },
- "BrowserDisplay" : {
- "priority" : -6,
- "loop" : -1,
- "suspend" : "Browser",
- "run" : 300,
- "resume" : "BrowserNext",
- "run" : 12000,
- "lock" : "mutex11",
- "sync" : { "ref" : "queue11", "mutex": "mutex11" },
- "unlock" : "mutex11",
- "run" : 300,
- "resume" : "Binder-display",
- "run" : 400
- },
- "Binder-dummy" : {
- "priority" : -6,
- "loop" : -1,
- "lock" : "mutex11",
- "wait" : { "ref" : "queue11", "mutex": "mutex11" },
- "unlock" : "mutex11",
- "run" : 200,
- "lock" : "mutex11",
- "signal" : "queue11",
- "unlock" : "mutex11",
- "run" : 100
- },
- "Binder-display" : {
- "priority" : -6,
- "loop" : -1,
- "suspend" : "Binder-display",
- "run" : 300,
- "resume" : "Event-Browser",
- "resume" : "Event-Display"
- },
- "Event-Browser" : {
- "priority" : -9,
- "loop" : -1,
- "suspend" : "Event-Browser",
- "run" : 50,
- "sleep" : 16000,
- "run" : 50,
- "resume" : "Browser"
- },
- "Event-Display" : {
- "priority" : -9,
- "loop" : -1,
- "suspend" : "Event-Display",
- "run" : 50,
- "sleep" : 16000,
- "run" : 50,
- "resume" : "Display"
- },
- "Display" : {
- "priority" : -8,
- "loop" : -1,
- "suspend" : "Display",
- "run" : 16000
- },
- },
- "global" : {
- "default_policy" : "SCHED_OTHER",
- "duration" : 6,
- "ftrace" : false,
- "gnuplot" : false,
- "logdir" : "./",
- "log_basename" : "web",
- "lock_pages" : true,
- "frag" : 1,
- "calibration" : "CPU0"
- }
-}
diff --git a/wlauto/workloads/rt_app/use_cases/mp3-long.json b/wlauto/workloads/rt_app/use_cases/mp3-long.json
deleted file mode 100644
index 7c179d0e..00000000
--- a/wlauto/workloads/rt_app/use_cases/mp3-long.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
- "tasks" : {
- "AudioTick" : {
- "priority" : -19,
- "loop" : -1,
- "cpus" : [0],
- "phases" : {
- "p1" : {
- "loop" : 1,
- "resume" : "AudioOut",
- "timer" : { "ref" : "tick", "period": 6000 }
- },
- "p2" : {
- "loop" : 4,
- "timer" : { "ref" : "tick", "period": 6000 }
- }
- }
- },
- "AudioOut" : {
- "priority" : -19,
- "loop" : -1,
- "run" : 275,
- "resume" : "AudioTrack",
- "run" : 4725,
- "suspend" : "AudioOut"
- },
- "AudioTrack" : {
- "priority" : -16,
- "loop" : -1,
- "suspend" : "AudioTrack",
- "run" : 300,
- "resume" : "mp3.decoder"
- },
- "mp3.decoder" : {
- "priority" : -2,
- "loop" : -1,
- "suspend" : "mp3.decoder",
- "run" : 1000,
- "lock" : "mutex",
- "signal" : "queue",
- "wait" : { "ref" : "queue", "mutex": "mutex" },
- "unlock" : "mutex",
- "run" : 150
- },
- "OMXCall" : {
- "priority" : -2,
- "loop" : -1,
- "lock" : "mutex",
- "wait" : { "ref" : "queue", "mutex": "mutex" },
- "unlock" : "mutex",
- "run" : 300,
- "lock" : "mutex",
- "signal" : "queue",
- "unlock" : "mutex"
- }
- },
- "global" : {
- "default_policy" : "SCHED_OTHER",
- "duration" : 600,
- "ftrace" : false,
- "gnuplot" : false,
- "logdir" : "./",
- "log_basename" : "mp3",
- "lock_pages" : true,
- "frag" : 1,
- "calibration" : "CPU0"
- }
-}
diff --git a/wlauto/workloads/rt_app/use_cases/mp3-short.json b/wlauto/workloads/rt_app/use_cases/mp3-short.json
deleted file mode 100644
index ad307233..00000000
--- a/wlauto/workloads/rt_app/use_cases/mp3-short.json
+++ /dev/null
@@ -1,68 +0,0 @@
-{
- "tasks" : {
- "AudioTick" : {
- "priority" : -19,
- "loop" : -1,
- "cpus" : [0],
- "phases" : {
- "p1" : {
- "loop" : 1,
- "resume" : "AudioOut",
- "timer" : { "ref" : "tick", "period": 6000 }
- },
- "p2" : {
- "loop" : 4,
- "timer" : { "ref" : "tick", "period": 6000 }
- }
- }
- },
- "AudioOut" : {
- "priority" : -19,
- "loop" : -1,
- "run" : 275,
- "resume" : "AudioTrack",
- "run" : 4725,
- "suspend" : "AudioOut"
- },
- "AudioTrack" : {
- "priority" : -16,
- "loop" : -1,
- "suspend" : "AudioTrack",
- "run" : 300,
- "resume" : "mp3.decoder"
- },
- "mp3.decoder" : {
- "priority" : -2,
- "loop" : -1,
- "suspend" : "mp3.decoder",
- "run" : 1000,
- "lock" : "mutex",
- "signal" : "queue",
- "wait" : { "ref" : "queue", "mutex": "mutex" },
- "unlock" : "mutex",
- "run" : 150
- },
- "OMXCall" : {
- "priority" : -2,
- "loop" : -1,
- "lock" : "mutex",
- "wait" : { "ref" : "queue", "mutex": "mutex" },
- "unlock" : "mutex",
- "run" : 300,
- "lock" : "mutex",
- "signal" : "queue",
- "unlock" : "mutex"
- }
- },
- "global" : {
- "default_policy" : "SCHED_OTHER",
- "duration" : 6,
- "ftrace" : false,
- "gnuplot" : false,
- "logdir" : "./",
- "log_basename" : "mp3",
- "lock_pages" : true,
- "frag" : 1,
- "calibration" : "CPU0"
- }
-}
diff --git a/wlauto/workloads/rt_app/use_cases/spreading-tasks.json b/wlauto/workloads/rt_app/use_cases/spreading-tasks.json
deleted file mode 100644
index be781187..00000000
--- a/wlauto/workloads/rt_app/use_cases/spreading-tasks.json
+++ /dev/null
@@ -1,52 +0,0 @@
-{
- "tasks" : {
- "thread1" : {
- "instance" : 1,
- "loop" : -1,
- "phases" : {
- "light" : {
- "loop" : 300,
- "run" : 1000,
- "timer" : { "ref" : "unique", "period" : 10000 }
- },
- "heavy" : {
- "loop" : 300,
- "run" : 7000,
- "timer" : { "ref" : "unique", "period" : 10000 }
- }
- }
- },
- "thread2" : {
- "instance" : 1,
- "loop" : -1,
- "phases" : {
- "light1" : {
- "loop" : 900,
- "run" : 1000,
- "timer" : { "ref" : "unique", "period" : 10000 }
- },
- "heavy1" : {
- "loop" : 600,
- "run" : 7000,
- "timer" : { "ref" : "unique", "period" : 10000 }
- },
- "light2" : {
- "loop" : 300,
- "run" : 1000,
- "timer" : { "ref" : "unique", "period" : 10000 }
- },
- "heavy1" : {
- "loop" : 600,
- "run" : 7000,
- "timer" : { "ref" : "unique", "period" : 10000 }
- },
- }
- }
- },
- "global" : {
- "duration" : 60,
- "default_policy" : "SCHED_OTHER",
- "calibration" : "CPU0"
- }
-}
-
diff --git a/wlauto/workloads/rt_app/use_cases/taskset.json b/wlauto/workloads/rt_app/use_cases/taskset.json
deleted file mode 100644
index ddcb389d..00000000
--- a/wlauto/workloads/rt_app/use_cases/taskset.json
+++ /dev/null
@@ -1,186 +0,0 @@
-{
- "tasks": {
- "ThreadA": {
- "exec": 5000,
- "period": 24000,
- "priority": -19,
- "cpus": [
- 0
- ],
- "lock_order": [
- "r0",
- "trig1"
- ],
- "resources": {
- "r0": {
- "duration": 1000
- },
- "trig1": {
- "duration": 0
- }
- }
- },
- "ThreadB": {
- "priority": -16,
- "phases": {
- "phase1": {
- "exec": 300,
- "period": 24000,
- "sleep": false,
- "loop": 1,
- "lock_order": [
- "wait1",
- "r0",
- "trig2"
- ],
- "resources": {
- "wait1": {
- "duration": 0,
- "access": [
- "trig1_mutex"
- ]
- },
- "r0": {
- "duration": 300
- },
- "trig2": {
- "duration": 0
- }
- }
- },
- "phase2": {
- "exec": 4000,
- "period": 24000,
- "loop": 2,
- "sleep": false,
- "lock_order": [
- "wait1",
- "r0",
- "trig2"
- ],
- "resources": {
- "wait1": {
- "duration": 0,
- "access": [
- "trig1_mutex"
- ]
- },
- "r0": {
- "duration": 300
- },
- "trig2": {
- "duration": 0
- }
- }
- }
- }
- },
- "ThreadC": {
- "exec": 1150,
- "period": 24000,
- "priority": -2,
- "sleep": false,
- "lock_order": [
- "wait2",
- "r0",
- "sync3"
- ],
- "resources": {
- "wait2": {
- "duration": 0,
- "access": [
- "trig2_mutex"
- ]
- },
- "r0": {
- "duration": 1000
- },
- "sync3": {
- "duration": 0,
- "access": [
- "trig3_mutex"
- ]
- }
- }
- },
- "ThreadD": {
- "exec": 300,
- "period": 24000,
- "deadline": 24000,
- "priority": -2,
- "sleep": false,
- "lock_order": [
- "wait3",
- "r0",
- "trig3"
- ],
- "resources": {
- "wait3": {
- "duration": 0,
- "access": [
- "trig3_mutex"
- ]
- },
- "r0": {
- "duration": 300
- },
- "trig3": {
- "duration": 0,
- "access": [
- "trig3_mutex"
- ]
- }
- }
- }
- },
- "resources": {
- "trig1_mutex": {
- "type": "mutex"
- },
- "wait1": {
- "type": "wait"
- },
- "trig1": {
- "type": "signal",
- "target": "wait1"
- },
- "trig2_mutex": {
- "type": "mutex"
- },
- "wait2": {
- "type": "wait"
- },
- "trig2": {
- "type": "signal",
- "target": "wait2"
- },
- "trig3_mutex": {
- "type": "mutex"
- },
- "wait3": {
- "type": "wait"
- },
- "trig3": {
- "type": "signal",
- "target": "wait3"
- },
- "sync3": {
- "type": "sync",
- "target": "wait3"
- },
- "r0": {
- "type": "run"
- }
- },
- "global": {
- "default_policy": "SCHED_OTHER",
- "duration": 5,
- "ftrace": true,
- "gnuplot": false,
- "logdir": "/root/wa",
- "log_basename": "rt-app",
- "lock_pages": true,
- "frag": 1,
- "calibration": "CPU1"
- }
-}
diff --git a/wlauto/workloads/rt_app/use_cases/video-long.json b/wlauto/workloads/rt_app/use_cases/video-long.json
deleted file mode 100644
index bca02d93..00000000
--- a/wlauto/workloads/rt_app/use_cases/video-long.json
+++ /dev/null
@@ -1,252 +0,0 @@
-{
- "tasks" : {
- "surfaceflinger" : {
- "priority" : -7,
- "loop" : -1,
- "suspend",
- "run" : 1500
- },
-
- "DispSync" : {
- "priority" : -7,
- "loop" : -1,
- "phases" : {
- "p1" : {
- "suspend",
- "run" : 35,
- "resume" : "EventThread",
- "run" : 40,
- },
-
- "p2" : {
- "loop" : 2,
- "suspend",
- "run" : 30
- }
- },
- },
-
- "hwc_eventmon" : {
- "priority" : -19,
- "loop" : -1,
- "resume" : "DispSync",
- "run" : 115,
- "timer" : { "ref" : "timerA", "period" : 16667 }
- },
-
- "EventThread1" : {
- "priority" : -8,
- "loop" : -1,
- "phases" : {
- "p1" : {
- "suspend" : "EventThread",
- "run" : 25,
- "resume" : "DispSync",
- "sleep" : 9650,
- "run" : 70,
- "resume" : "DispSync",
- "run" : 80
- },
-
- "p2" : {
- "suspend" : "EventThread",
- "run" : 90,
- "resume" : "DispSync"
- }
- }
- },
-
- "EventThread2" : {
- "priority" : -8,
- "loop" : -1,
- "phases" : {
- "p1" : {
- "suspend" : "EventThread",
- "run" : 30,
- "resume" : "surfaceflinger"
- },
-
- "p2" : {
- "suspend" : "EventThread",
- "run" : 35,
- "sleep" : 2000,
- "run" : 110,
- "resume" : "DispSync",
- "run" : 60
- }
- }
- },
-
- "waker" : {
- "priority" : -19,
- "loop" : -1,
- "resume" : "NuPlayerRenderer",
- "timer" : { "ref" : "timerB", "period" : 33333 }
- },
-
- "NuPlayerRenderer" : {
- "priority" : -15,
- "loop" : -1,
- "phases" : {
- "p1" : {
- "loop" : 3,
- "suspend" : "NuPlayerRenderer",
- "run" : 140,
- "resume" : "NuPlayerDriver1",
- "run" : 95
- },
-
- "p2" : {
- "sleep" : 27000,
- "run" : 580,
- "resume" : "NPDecoder",
- "resume" : "NPDecoder-CL",
- "resume" : "gle.aac.decoder"
- }
- }
- },
-
- "NuPlayerDriver1" : {
- "priority" : -15,
- "loop" : -1,
- "suspend",
- "run" : 100,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 50,
- "suspend" : "NuPlayerDriver",
- "run" : 80,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 370,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 135,
- "resume" : "NuPlayerDriver"
- },
-
- "NuPlayerDriver2" : {
- "priority" : -15,
- "loop" : -1,
- "suspend" : "NuPlayerDriver",
- "run" : 110,
- "resume" : "NuPlayerDriver",
- "resume" : "CodecLooper1",
- "sleep" : 2500,
- "run" : 80,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 50,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 70,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 35
- },
-
- "CodecLooper1" : {
- "priority" : -15,
- "loop" : -1,
- "suspend",
- "run" : 230,
- "sleep" : 80,
- "run" : 150,
- "sleep" : 210,
- "run" : 330,
- "resume" : "CodecLooper2",
- "sleep" : 900,
- "run" : 170,
- "sleep" : 670,
- "run" : 125,
- "resume" : "CodecLooper2"
- },
-
- "CodecLooper2" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 160,
- "resume" : "CodecLooper3",
- "sleep" : 590,
- "resume" : "OMXCallbackDisp2",
- "run" : 75,
- "suspend",
- "run" : 260
- },
-
- "OMXCallbackDisp2" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 180
- },
-
- "CodecLooper3" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 1000
- },
-
- "NPDecoder" : {
- "priority" : -15,
- "loop" : -1,
- "suspend",
- "run" : 500,
- "sleep" : 680,
- "resume" : "OMXCallbackDisp1",
- "run" : 2000
- },
-
- "NPDecoder-CL" : {
- "priority" : -15,
- "loop" : -1,
- "suspend",
- "run" : 570,
- "sleep" : 570,
- "run" : 2100
- },
-
- "gle.aac.decoder" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 2400,
- "sleep" : 430,
- "run" : 45
- },
-
- "OMXCallbackDisp1" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 135,
- "sleep" : 230,
- "run" : 140,
- "sleep" : 330,
- "run" : 190,
- "sleep" : 550,
- "run" : 160
- }
- },
-
- "global" : {
- "default_policy" : "SCHED_OTHER",
- "duration" : 600,
- "ftrace" : false,
- "gnuplot" : false,
- "logdir" : "./",
- "log_basename" : "video",
- "lock_pages" : true,
- "frag" : 1,
- "calibration" : "CPU0"
- }
-}
-
diff --git a/wlauto/workloads/rt_app/use_cases/video-short.json b/wlauto/workloads/rt_app/use_cases/video-short.json
deleted file mode 100644
index 45925250..00000000
--- a/wlauto/workloads/rt_app/use_cases/video-short.json
+++ /dev/null
@@ -1,252 +0,0 @@
-{
- "tasks" : {
- "surfaceflinger" : {
- "priority" : -7,
- "loop" : -1,
- "suspend",
- "run" : 1500
- },
-
- "DispSync" : {
- "priority" : -7,
- "loop" : -1,
- "phases" : {
- "p1" : {
- "suspend",
- "run" : 35,
- "resume" : "EventThread",
- "run" : 40,
- },
-
- "p2" : {
- "loop" : 2,
- "suspend",
- "run" : 30
- }
- },
- },
-
- "hwc_eventmon" : {
- "priority" : -19,
- "loop" : -1,
- "resume" : "DispSync",
- "run" : 115,
- "timer" : { "ref" : "timerA", "period" : 16667 }
- },
-
- "EventThread1" : {
- "priority" : -8,
- "loop" : -1,
- "phases" : {
- "p1" : {
- "suspend" : "EventThread",
- "run" : 25,
- "resume" : "DispSync",
- "sleep" : 9650,
- "run" : 70,
- "resume" : "DispSync",
- "run" : 80
- },
-
- "p2" : {
- "suspend" : "EventThread",
- "run" : 90,
- "resume" : "DispSync"
- }
- }
- },
-
- "EventThread2" : {
- "priority" : -8,
- "loop" : -1,
- "phases" : {
- "p1" : {
- "suspend" : "EventThread",
- "run" : 30,
- "resume" : "surfaceflinger"
- },
-
- "p2" : {
- "suspend" : "EventThread",
- "run" : 35,
- "sleep" : 2000,
- "run" : 110,
- "resume" : "DispSync",
- "run" : 60
- }
- }
- },
-
- "waker" : {
- "priority" : -19,
- "loop" : -1,
- "resume" : "NuPlayerRenderer",
- "timer" : { "ref" : "timerB", "period" : 33333 }
- },
-
- "NuPlayerRenderer" : {
- "priority" : -15,
- "loop" : -1,
- "phases" : {
- "p1" : {
- "loop" : 3,
- "suspend" : "NuPlayerRenderer",
- "run" : 140,
- "resume" : "NuPlayerDriver1",
- "run" : 95
- },
-
- "p2" : {
- "sleep" : 27000,
- "run" : 580,
- "resume" : "NPDecoder",
- "resume" : "NPDecoder-CL",
- "resume" : "gle.aac.decoder"
- }
- }
- },
-
- "NuPlayerDriver1" : {
- "priority" : -15,
- "loop" : -1,
- "suspend",
- "run" : 100,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 50,
- "suspend" : "NuPlayerDriver",
- "run" : 80,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 370,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 135,
- "resume" : "NuPlayerDriver"
- },
-
- "NuPlayerDriver2" : {
- "priority" : -15,
- "loop" : -1,
- "suspend" : "NuPlayerDriver",
- "run" : 110,
- "resume" : "NuPlayerDriver",
- "resume" : "CodecLooper1",
- "sleep" : 2500,
- "run" : 80,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 50,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 70,
- "lock" : "NuPlayerDriver",
- "sync" : { "ref" : "NuPlayerDriver", "mutex" : "NuPlayerDriver" },
- "unlock" : "NuPlayerDriver",
- "run" : 35
- },
-
- "CodecLooper1" : {
- "priority" : -15,
- "loop" : -1,
- "suspend",
- "run" : 230,
- "sleep" : 80,
- "run" : 150,
- "sleep" : 210,
- "run" : 330,
- "resume" : "CodecLooper2",
- "sleep" : 900,
- "run" : 170,
- "sleep" : 670,
- "run" : 125,
- "resume" : "CodecLooper2"
- },
-
- "CodecLooper2" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 160,
- "resume" : "CodecLooper3",
- "sleep" : 590,
- "resume" : "OMXCallbackDisp2",
- "run" : 75,
- "suspend",
- "run" : 260
- },
-
- "OMXCallbackDisp2" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 180
- },
-
- "CodecLooper3" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 1000
- },
-
- "NPDecoder" : {
- "priority" : -15,
- "loop" : -1,
- "suspend",
- "run" : 500,
- "sleep" : 680,
- "resume" : "OMXCallbackDisp1",
- "run" : 2000
- },
-
- "NPDecoder-CL" : {
- "priority" : -15,
- "loop" : -1,
- "suspend",
- "run" : 570,
- "sleep" : 570,
- "run" : 2100
- },
-
- "gle.aac.decoder" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 2400,
- "sleep" : 430,
- "run" : 45
- },
-
- "OMXCallbackDisp1" : {
- "priority" : -1,
- "loop" : -1,
- "suspend",
- "run" : 135,
- "sleep" : 230,
- "run" : 140,
- "sleep" : 330,
- "run" : 190,
- "sleep" : 550,
- "run" : 160
- }
- },
-
- "global" : {
- "default_policy" : "SCHED_OTHER",
- "duration" : 6,
- "ftrace" : false,
- "gnuplot" : false,
- "logdir" : "./",
- "log_basename" : "video",
- "lock_pages" : true,
- "frag" : 1,
- "calibration" : "CPU0"
- }
-}
-
diff --git a/wlauto/workloads/rt_app/workgen b/wlauto/workloads/rt_app/workgen
deleted file mode 100755
index c85d6b0a..00000000
--- a/wlauto/workloads/rt_app/workgen
+++ /dev/null
@@ -1,236 +0,0 @@
-#!/usr/bin/env python
-
-import os
-import sys
-import getopt
-import subprocess
-import signal
-import re
-
-def check_unikid_json(infile, outfile, verbose=0):
- if not os.path.exists(infile):
- print "WARN: %s does not exist", infile
-
- try:
- fi = open(infile, "r")
- except IOError:
- print "WARN: Unable to open %s", infile
- sys.exit(2)
-
- lines = fi.readlines()
- fi.close()
-
- try:
- fo = open(outfile, "w+")
- except IOError:
- print "WARN: Unable to open %s", f
- sys.exit(2)
-
- curid = 1
- refcount = 0
- idlist = {}
- myid = []
- for myline in lines:
- if "{" in myline:
- refcount += 1
- myid.append(curid)
- curid = 1
- idlist[refcount] = {}
-
- if "}" in myline:
- del idlist[refcount]
- curid = myid.pop()
- refcount -= 1
-
- try:
- key_id, value = myline.split(":", 1)
- except ValueError:
- fo.write(myline)
- continue
-
- key_id = key_id.strip('\"\t\n\r ')
- value = value.strip(',\"\t\n\r ')
-
- if key_id in idlist[refcount]:
- newkey_id = key_id + str(curid)
- while newkey_id in idlist[refcount]:
- curid += 1
- newkey_id = key_id + str(curid)
-
- if verbose:
- print "level ", refcount, " : key ", key_id, " changed into ", newkey_id
-
- myline = myline.replace(key_id, newkey_id, 1)
- key_id = newkey_id
-
- idlist[refcount][key_id] = value
- fo.write(myline)
-
- fo.close()
-
- return
-
-def check_suspend_json(infile, outfile, verbose=0):
- if not os.path.exists(infile):
- print "WARN: %s does not exist", infile
-
- try:
- fi = open(infile, "r")
- except IOError:
- print "WARN: Unable to open %s", infile
- sys.exit(2)
-
- lines = fi.readlines()
- fi.close()
-
- try:
- fo = open(outfile, "w+")
- except IOError:
- print "WARN: Unable to open %s", f
- sys.exit(2)
-
-
- taskobj = 0
- curid = ""
- for myline in lines:
-
- exception = 0
- key_id = "exception"
-
- try:
- key_id, value = myline.split(":", 1)
- except ValueError:
- if "suspend" in myline:
- key_id = "suspend"
- exception = 1
-
- key_id = key_id.strip('\"\t\n\r ')
-
- if not "tasks" in key_id and \
- taskobj == 0:
- fo.write(myline)
- continue
-
- if "{" in myline:
- taskobj += 1
- if taskobj == 2:
- curid = key_id
-
- if "}" in myline:
- taskobj -= 1
-
- if "suspend" in key_id and \
- exception == 1:
-
- if verbose:
- print "value ", curid, " added to suspend key"
-
- if "," in myline:
- myline = myline.replace(",", " : " + "\"" + curid + "\",", 1)
- else:
- myline = myline.replace("\n", " : " + "\"" + curid + "\"\n", 1)
-
- fo.write(myline)
-
- fo.close()
-
- return
-
-# remove trailing commas that may appear after closing
-# brackets and last entries in every section
-def remove_trailing_commas(outfile):
- try:
- f = open(outfile, 'r+')
- except IOError:
- print "WARN: Unable to open %s", f
- sys.exit(2)
-
- lines = f.read()
- check_last_entry_regex = r'(.),(\n\s*})'
- check_end_bracket_regex = r'(}),(\n\s*})'
-
- lines = re.sub(check_last_entry_regex, r'\g<1>\g<2>', lines)
- lines = re.sub(check_end_bracket_regex, r'\g<1>\g<2>', lines)
-
- f.seek(0)
- f.write(lines)
- f.truncate()
- f.close()
-
- return
-
-
-# Search for comments to remove
-def comment_remover(text):
- def replacer(match):
- s = match.group(0)
- if s.startswith('/'):
- return " " # note: a space and not an empty string
- else:
- return s
-
- pattern = re.compile(
- r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"',
- re.DOTALL | re.MULTILINE)
-
- return re.sub(pattern, replacer, text)
-
-# Remove all comments inside the file
-def remove_all_comments(outfile):
- try:
- f = open(outfile, 'r+')
- except IOError:
- print "WARN: Unable to open %s", f
- sys.exit(2)
-
- lines = f.read()
-
- lines = comment_remover(lines)
- f.seek(0)
- f.write(lines)
- f.truncate()
-
- f.close()
-
- return
-
-if __name__ == '__main__':
-
- def handleSigTERM(signum, frame):
- sys.exit()
-
- signal.signal(signal.SIGTERM, handleSigTERM)
- signal.signal(signal.SIGINT, handleSigTERM)
-
- outfile = "unikid.json"
- selfupdate = 0
- verbose = 0
- dry_run = False
-
- try:
- opts, args = getopt.getopt(sys.argv[1:], "o:avd")
- except getopt.GetoptError as err:
- print str(err) # will print something like "option -a not recognized"
- sys.exit(2)
-
- for o, a in opts:
- if o == "-o":
- outfile = a
- if o == "-a":
- selfupdate = 1
- if o == "-v":
- verbose = 1
- if o == "-d":
- dry_run = True
-
- for f in args:
- if selfupdate:
- outfile = f
-
- check_suspend_json(f, outfile)
- check_unikid_json(outfile, outfile)
- remove_trailing_commas(outfile)
- remove_all_comments(outfile)
-
- if not dry_run:
- subprocess.call(["rt-app", outfile])
diff --git a/wlauto/workloads/shellscript/__init__.py b/wlauto/workloads/shellscript/__init__.py
deleted file mode 100644
index 632eb52a..00000000
--- a/wlauto/workloads/shellscript/__init__.py
+++ /dev/null
@@ -1,65 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,W0201,E0203
-
-import os
-
-from wlauto import Workload, Parameter
-from wlauto.exceptions import ConfigError
-
-
-class ShellScript(Workload):
-
- name = 'shellscript'
- description = """
- Runs an arbitrary shellscript on the device.
-
- """
-
- parameters = [
- Parameter('script_file', mandatory=True,
- description=('The path (on the host) to the shell script file. This must be '
- 'an absolute path (though it may contain ~).')),
- Parameter('argstring', default='',
- description='A string that should contain arguments passed to the script.'),
- Parameter('timeout', kind=int, default=60,
- description='Timeout, in seconds, for the script run time.'),
- ]
-
- def __init__(self, device, **kwargs):
- super(ShellScript, self).__init__(device, **kwargs)
- self.script_file = os.path.expanduser(self.script_file)
- if not os.path.isfile(self.script_file):
- raise ConfigError('Can\'t access file (is the path correct?): {}'.format(self.script_file))
- self.output = None
- self.command = None
- self.on_device_script_file = None
-
- def setup(self, context):
- self.on_device_script_file = self.device.path.join(self.device.working_directory,
- os.path.basename(self.script_file))
- self.device.push(self.script_file, self.on_device_script_file)
- self.command = 'sh {} {}'.format(self.on_device_script_file, self.argstring)
-
- def run(self, context):
- self.output = self.device.execute(self.command, timeout=self.timeout)
-
- def update_result(self, context):
- with open(os.path.join(context.output_directory, 'output.txt'), 'w') as wfh:
- wfh.write(self.output)
-
- def teardown(self, context):
- self.device.remove(self.on_device_script_file)
diff --git a/wlauto/workloads/skypevideo/__init__.py b/wlauto/workloads/skypevideo/__init__.py
deleted file mode 100644
index 58959e1f..00000000
--- a/wlauto/workloads/skypevideo/__init__.py
+++ /dev/null
@@ -1,130 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,W0201,E0203
-
-import time
-
-from wlauto import UiAutomatorWorkload, Parameter
-from wlauto.utils.types import boolean
-
-
-class SkypeVideo(UiAutomatorWorkload):
-
- name = 'skypevideo'
- description = """
- Initiates Skype video call to a specified contact for a pre-determined duration.
- (Note: requires Skype to be set up appropriately).
-
- This workload is intended for monitoring the behaviour of a device while a Skype
- video call is in progress (a common use case). It does not produce any score or
- metric and the intention is that some addition instrumentation is enabled while
- running this workload.
-
- This workload, obviously, requires a network connection (ideally, wifi).
-
- This workload accepts the following parameters:
-
-
- **Skype Setup**
-
- - You should install Skype client from Google Play Store on the device
- (this was tested with client version 4.5.0.39600; other recent versions
- should also work).
- - You must have an account set up and logged into Skype on the device.
- - The contact to be called must be added (and has accepted) to the
- account. It's possible to have multiple contacts in the list, however
- the contact to be called *must* be visible on initial navigation to the
- list.
- - The contact must be able to received the call. This means that there
- must be a Skype client running (somewhere) with the contact logged in
- and that client must have been configured to auto-accept calls from the
- account on the device (how to set this varies between different versions
- of Skype and between platforms -- please search online for specific
- instructions).
- https://support.skype.com/en/faq/FA3751/can-i-automatically-answer-all-my-calls-with-video-in-skype-for-windows-desktop
-
- """
-
- package = 'com.skype.raider'
-
- parameters = [
- Parameter('duration', kind=int, default=300,
- description='Duration of the video call in seconds.'),
- Parameter('contact', mandatory=True,
- description="""
- The name of the Skype contact to call. The contact must be already
- added (see below). *If use_gui is set*, then this must be the skype
- ID of the contact, *otherwise*, this must be the name of the
- contact as it appears in Skype client's contacts list. In the latter case
- it *must not* contain underscore characters (``_``); it may, however, contain
- spaces. There is no default, you **must specify the name of the contact**.
-
- .. note:: You may alternatively specify the contact name as
- ``skype_contact`` setting in your ``config.py``. If this is
- specified, the ``contact`` parameter is optional, though
- it may still be specified (in which case it will override
- ``skype_contact`` setting).
- """),
- Parameter('use_gui', kind=boolean, default=False,
- description="""
- Specifies whether the call should be placed directly through a
- Skype URI, or by navigating the GUI. The URI is the recommended way
- to place Skype calls on a device, but that does not seem to work
- correctly on some devices (the URI seems to just start Skype, but not
- place the call), so an alternative exists that will start the Skype app
- and will then navigate the UI to place the call (incidentally, this method
- does not seem to work on all devices either, as sometimes Skype starts
- backgrounded...). Please note that the meaning of ``contact`` prameter
- is different depending on whether this is set. Defaults to ``False``.
-
- .. note:: You may alternatively specify this as ``skype_use_gui`` setting
- in your ``config.py``.
- """),
-
- ]
-
- def __init__(self, device, **kwargs):
- super(SkypeVideo, self).__init__(device, **kwargs)
- if self.use_gui:
- self.uiauto_params['name'] = self.contact.replace(' ', '_')
- self.uiauto_params['duration'] = self.duration
- self.run_timeout = self.duration + 30
-
- def setup(self, context):
- if self.use_gui:
- super(SkypeVideo, self).setup(context)
- self.device.execute('am force-stop {}'.format(self.package))
- self.device.execute('am start -W -a android.intent.action.VIEW -d skype:')
- else:
- self.device.execute('am force-stop {}'.format(self.package))
-
- def run(self, context):
- if self.use_gui:
- super(SkypeVideo, self).run(context)
- else:
- command = "am start -W -a android.intent.action.VIEW -d \"skype:{}?call&video=true\""
- self.logger.debug(self.device.execute(command.format(self.contact)))
- self.logger.debug('Call started; waiting for {} seconds...'.format(self.duration))
- time.sleep(self.duration)
- self.device.execute('am force-stop com.skype.raider')
-
- def update_result(self, context):
- pass
-
- def teardown(self, context):
- if self.use_gui:
- super(SkypeVideo, self).teardown(context)
- self.device.execute('am force-stop {}'.format(self.package))
diff --git a/wlauto/workloads/skypevideo/com.arm.wlauto.uiauto.skypevideo.jar b/wlauto/workloads/skypevideo/com.arm.wlauto.uiauto.skypevideo.jar
deleted file mode 100644
index dff2302a..00000000
--- a/wlauto/workloads/skypevideo/com.arm.wlauto.uiauto.skypevideo.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/skypevideo/uiauto/build.sh b/wlauto/workloads/skypevideo/uiauto/build.sh
deleted file mode 100755
index db6f8ff4..00000000
--- a/wlauto/workloads/skypevideo/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.skypevideo.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.skypevideo.jar ..
-fi
diff --git a/wlauto/workloads/skypevideo/uiauto/build.xml b/wlauto/workloads/skypevideo/uiauto/build.xml
deleted file mode 100644
index c2fdeb90..00000000
--- a/wlauto/workloads/skypevideo/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.skypevideo" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/skypevideo/uiauto/project.properties b/wlauto/workloads/skypevideo/uiauto/project.properties
deleted file mode 100644
index ce39f2d0..00000000
--- a/wlauto/workloads/skypevideo/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-18
diff --git a/wlauto/workloads/skypevideo/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/skypevideo/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index 0743372e..00000000
--- a/wlauto/workloads/skypevideo/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,72 +0,0 @@
-/* Copyright 2014-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.skypevideo;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.KeyEvent;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "skypevideo";
- public static String videoCallButtonResourceId = "com.skype.raider:id/chat_menu_item_call_video";
- public static String noContactMessage = "Could not find contact \"%s\" in the contacts list.";
-
- public void runUiAutomation() throws Exception {
- Bundle parameters = getParams();
- String contactName = parameters.getString("name").replace('_', ' ');
- int duration = Integer.parseInt(parameters.getString("duration"));
-
- selectContact(contactName);
- initiateCall(duration);
- }
-
- public void selectContact(String name) throws Exception {
- UiSelector selector = new UiSelector();
- UiObject peopleTab = new UiObject(selector.text("People"));
- peopleTab.click();
- sleep(1); // tab transition
-
- // Note: this assumes that the contact is in view and does not attempt to scroll to find it.
- // The expectation is that this automation will be used with a dedicated account that was set
- // up for the purpose and so would only have the intended target plus one or two other contacts
- // at most in the list. If that is not the case, then this needs to be re-written to scroll to
- // find the contact if necessary.
- UiObject contactCard = new UiObject(selector.text(name));
- if (!contactCard.exists()) {
- throw new UiObjectNotFoundException(String.format(noContactMessage, name));
- }
- contactCard.clickAndWaitForNewWindow();
- }
-
- public void initiateCall(int duration) throws Exception {
- UiSelector selector = new UiSelector();
- UiObject videoCallButton = new UiObject(selector.resourceId(videoCallButtonResourceId));
- videoCallButton.click();
- sleep(duration);
- }
-}
diff --git a/wlauto/workloads/smartbench/__init__.py b/wlauto/workloads/smartbench/__init__.py
deleted file mode 100644
index 4b7cbe3e..00000000
--- a/wlauto/workloads/smartbench/__init__.py
+++ /dev/null
@@ -1,59 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import os
-import re
-import time
-
-from wlauto import AndroidUiAutoBenchmark
-
-
-class Smartbench(AndroidUiAutoBenchmark):
-
- name = 'smartbench'
- description = """
- Smartbench is a multi-core friendly benchmark application that measures the
- overall performance of an android device. It reports both Productivity and
- Gaming Index.
-
- https://play.google.com/store/apps/details?id=com.smartbench.twelve&hl=en
-
- From the website:
-
- It will be better prepared for the quad-core world. Unfortunately this also
- means it will run slower on older devices. It will also run slower on
- high-resolution tablet devices. All 3D tests are now rendered in full native
- resolutions so naturally it will stress hardware harder on these devices.
- This also applies to higher resolution hand-held devices.
- """
- package = 'com.smartbench.twelve'
- activity = '.Smartbench2012'
- summary_metrics = ['Smartbench: valueGame', 'Smartbench: valueProd']
- run_timeout = 10 * 60
-
- prod_regex = re.compile('valueProd=(\d+)')
- game_regex = re.compile('valueGame=(\d+)')
-
- def update_result(self, context):
- super(Smartbench, self).update_result(context)
- with open(self.logcat_log) as fh:
- text = fh.read()
- match = self.prod_regex.search(text)
- prod = int(match.group(1))
- match = self.game_regex.search(text)
- game = int(match.group(1))
- context.result.add_metric('Smartbench: valueProd', prod)
- context.result.add_metric('Smartbench: valueGame', game)
diff --git a/wlauto/workloads/smartbench/com.arm.wlauto.uiauto.smartbench.jar b/wlauto/workloads/smartbench/com.arm.wlauto.uiauto.smartbench.jar
deleted file mode 100644
index f388cbe0..00000000
--- a/wlauto/workloads/smartbench/com.arm.wlauto.uiauto.smartbench.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/smartbench/uiauto/build.sh b/wlauto/workloads/smartbench/uiauto/build.sh
deleted file mode 100755
index bf76a67e..00000000
--- a/wlauto/workloads/smartbench/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.smartbench.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.smartbench.jar ..
-fi
diff --git a/wlauto/workloads/smartbench/uiauto/build.xml b/wlauto/workloads/smartbench/uiauto/build.xml
deleted file mode 100644
index ee913c41..00000000
--- a/wlauto/workloads/smartbench/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.smartbench" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/smartbench/uiauto/project.properties b/wlauto/workloads/smartbench/uiauto/project.properties
deleted file mode 100644
index a3ee5ab6..00000000
--- a/wlauto/workloads/smartbench/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-17
diff --git a/wlauto/workloads/smartbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/smartbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index e8c3aac1..00000000
--- a/wlauto/workloads/smartbench/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.smartbench;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "smartbench";
-
- public void runUiAutomation() throws Exception {
- Bundle status = new Bundle();
- status.putString("product", getUiDevice().getProductName());
- UiSelector selector = new UiSelector();
- sleep(3);
- UiObject text_bench = new UiObject(selector.text("Run SmartBench")
- .className("android.widget.TextView"));
- text_bench.click();
-
- try{
- UiObject complete_text = new UiObject(selector .textContains("Display Index Scores")
- .className("android.widget.TextView"));
-
- waitObject(complete_text);
-
- sleep(2);
- complete_text.click();
- } finally{
- //complete_text.click();
- }
-
- sleep(5);
- takeScreenshot("SmartBench");
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-
-}
diff --git a/wlauto/workloads/spec2000/__init__.py b/wlauto/workloads/spec2000/__init__.py
deleted file mode 100644
index df4e0da4..00000000
--- a/wlauto/workloads/spec2000/__init__.py
+++ /dev/null
@@ -1,350 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-#pylint: disable=E1101,W0201
-import os
-import re
-import string
-import tarfile
-from collections import defaultdict
-
-from wlauto import Workload, Parameter, Alias
-from wlauto.exceptions import ConfigError, WorkloadError
-from wlauto.common.resources import PluginAsset
-from wlauto.utils.misc import get_cpu_mask
-from wlauto.utils.types import boolean, list_or_string
-
-
-class Spec2000(Workload):
-
- name = 'spec2000'
- description = """
- SPEC2000 benchmarks measuring processor, memory and compiler.
-
- http://www.spec.org/cpu2000/
-
- From the web site:
-
- SPEC CPU2000 is the next-generation industry-standardized CPU-intensive benchmark suite. SPEC
- designed CPU2000 to provide a comparative measure of compute intensive performance across the
- widest practical range of hardware. The implementation resulted in source code benchmarks
- developed from real user applications. These benchmarks measure the performance of the
- processor, memory and compiler on the tested system.
-
- .. note:: At the moment, this workload relies on pre-built SPEC binaries (included in an
- asset bundle). These binaries *must* be built according to rules outlined here::
-
- http://www.spec.org/cpu2000/docs/runrules.html#toc_2.0
-
- in order for the results to be valid SPEC2000 results.
-
- .. note:: This workload does not attempt to generate results in an admissible SPEC format. No
- metadata is provided (though some, but not all, of the required metdata is colleted
- by WA elsewhere). It is upto the user to post-process results to generated
- SPEC-admissible results file, if that is their intention.
-
- *base vs peak*
-
- SPEC2000 defines two build/test configuration: base and peak. Base is supposed to use basic
- configuration (e.g. default compiler flags) with no tuning, and peak is specifically optimized for
- a system. Since this workload uses externally-built binaries, there is no way for WA to be sure
- what configuration is used -- the user is expected to keep track of that. Be aware that
- base/peak also come with specfic requirements for the way workloads are run (e.g. how many instances
- on multi-core systems)::
-
- http://www.spec.org/cpu2000/docs/runrules.html#toc_3
-
- These are not enforced by WA, so it is again up to the user to ensure that correct workload
- parameters are specfied inthe agenda, if they intend to collect "official" SPEC results. (Those
- interested in collecting official SPEC results should also note that setting runtime parameters
- would violate SPEC runs rules that state that no configuration must be done to the platform
- after boot).
-
- *bundle structure*
-
- This workload expects the actual benchmark binaries to be provided in a tarball "bundle" that has
- a very specific structure. At the top level of the tarball, there should be two directories: "fp"
- and "int" -- for each of the SPEC2000 categories. Under those, there is a sub-directory per benchmark.
- Each benchmark sub-directory contains three sub-sub-directorie:
-
- - "cpus" contains a subdirector for each supported cpu (e.g. a15) with a single executable binary
- for that cpu, in addition to a "generic" subdirectory that has not been optimized for a specific
- cpu and should run on any ARM system.
- - "data" contains all additional files (input, configuration, etc) that the benchmark executable
- relies on.
- - "scripts" contains one or more one-liner shell scripts that invoke the benchmark binary with
- appropriate command line parameters. The name of the script must be in the format
- <benchmark name>[.<variant name>].sh, i.e. name of benchmark, optionally followed by variant
- name, followed by ".sh" plugin. If there is more than one script, then all of them must
- have a variant; if there is only one script the it should not cotain a variant.
-
- A typical bundle may look like this::
-
- |- fp
- | |-- ammp
- | | |-- cpus
- | | | |-- generic
- | | | | |-- ammp
- | | | |-- a15
- | | | | |-- ammp
- | | | |-- a7
- | | | | |-- ammp
- | | |-- data
- | | | |-- ammp.in
- | | |-- scripts
- | | | |-- ammp.sh
- | |-- applu
- . . .
- . . .
- . . .
- |- int
- .
-
- """
-
- # TODO: This is a bit of a hack. Need to re-think summary metric indication
- # (also more than just summary/non-summary classification?)
- class _SPECSummaryMetrics(object):
- def __contains__(self, item):
- return item.endswith('_real')
-
- asset_file = 'spec2000-assets.tar.gz'
-
- aliases = [
- Alias('spec2k'),
- ]
-
- summary_metrics = _SPECSummaryMetrics()
-
- parameters = [
- Parameter('benchmarks', kind=list_or_string,
- description='Specfiles the SPEC benchmarks to run.'),
- Parameter('mode', kind=str, allowed_values=['speed', 'rate'], default='speed',
- description='SPEC benchmarks can report either speed to execute or throughput/rate. '
- 'In the latter case, several "threads" will be spawned.'),
- Parameter('number_of_threads', kind=int, default=None,
- description='Specify the number of "threads" to be used in \'rate\' mode. (Note: '
- 'on big.LITTLE systems this is the number of threads, for *each cluster*). '),
-
- Parameter('force_extract_assets', kind=boolean, default=False,
- description='if set to ``True``, will extract assets from the bundle, even if they are '
- 'already extracted. Note: this option implies ``force_push_assets``.'),
- Parameter('force_push_assets', kind=boolean, default=False,
- description='If set to ``True``, assets will be pushed to device even if they\'re already '
- 'present.'),
- Parameter('timeout', kind=int, default=20 * 60,
- description='Timemout, in seconds, for the execution of single spec test.'),
- ]
-
- speed_run_template = 'cd {datadir}; time ({launch_command})'
- rate_run_template = 'cd {datadir}; time ({loop}; wait)'
- loop_template = 'for i in $(busybox seq 1 {threads}); do {launch_command} 1>/dev/null 2>&1 & done'
- launch_template = 'busybox taskset {cpumask} {command} 1>/dev/null 2>&1'
-
- timing_regex = re.compile(r'(?P<minutes>\d+)m(?P<seconds>[\d.]+)s\s+(?P<category>\w+)')
-
- def init_resources(self, context):
- self._load_spec_benchmarks(context)
-
- def setup(self, context):
- cpus = self.device.core_names
- if not cpus:
- raise WorkloadError('Device has not specifed CPU cores configruation.')
- cpumap = defaultdict(list)
- for i, cpu in enumerate(cpus):
- cpumap[cpu.lower()].append(i)
- for benchspec in self.benchmarks:
- commandspecs = self._verify_and_deploy_benchmark(benchspec, cpumap)
- self._build_command(benchspec, commandspecs)
-
- def run(self, context):
- for name, command in self.commands:
- self.timings[name] = self.device.execute(command, timeout=self.timeout)
-
- def update_result(self, context):
- for benchmark, output in self.timings.iteritems():
- matches = self.timing_regex.finditer(output)
- found = False
- for match in matches:
- category = match.group('category')
- mins = float(match.group('minutes'))
- secs = float(match.group('seconds'))
- total = secs + 60 * mins
- context.result.add_metric('_'.join([benchmark, category]),
- total, 'seconds',
- lower_is_better=True)
- found = True
- if not found:
- self.logger.error('Could not get timings for {}'.format(benchmark))
-
- def validate(self):
- if self.force_extract_assets:
- self.force_push_assets = True
- if self.benchmarks is None: # pylint: disable=access-member-before-definition
- self.benchmarks = ['all']
- for benchname in self.benchmarks:
- if benchname == 'all':
- self.benchmarks = self.loaded_benchmarks.keys()
- break
- if benchname not in self.loaded_benchmarks:
- raise ConfigError('Unknown SPEC benchmark: {}'.format(benchname))
- if self.mode == 'speed':
- if self.number_of_threads is not None:
- raise ConfigError('number_of_threads cannot be specified in speed mode.')
- else:
- raise ValueError('Unexpected SPEC2000 mode: {}'.format(self.mode)) # Should never get here
- self.commands = []
- self.timings = {}
-
- def _load_spec_benchmarks(self, context):
- self.loaded_benchmarks = {}
- self.categories = set()
- if self.force_extract_assets or len(os.listdir(self.dependencies_directory)) < 2:
- bundle = context.resolver.get(PluginAsset(self, self.asset_file))
- with tarfile.open(bundle, 'r:gz') as tf:
- tf.extractall(self.dependencies_directory)
- for entry in os.listdir(self.dependencies_directory):
- entrypath = os.path.join(self.dependencies_directory, entry)
- if os.path.isdir(entrypath):
- for bench in os.listdir(entrypath):
- self.categories.add(entry)
- benchpath = os.path.join(entrypath, bench)
- self._load_benchmark(benchpath, entry)
-
- def _load_benchmark(self, path, category):
- datafiles = []
- cpus = []
- for df in os.listdir(os.path.join(path, 'data')):
- datafiles.append(os.path.join(path, 'data', df))
- for cpu in os.listdir(os.path.join(path, 'cpus')):
- cpus.append(cpu)
- commandsdir = os.path.join(path, 'commands')
- for command in os.listdir(commandsdir):
- bench = SpecBenchmark()
- bench.name = os.path.splitext(command)[0]
- bench.path = path
- bench.category = category
- bench.datafiles = datafiles
- bench.cpus = cpus
- with open(os.path.join(commandsdir, command)) as fh:
- bench.command_template = string.Template(fh.read().strip())
- self.loaded_benchmarks[bench.name] = bench
-
- def _verify_and_deploy_benchmark(self, benchspec, cpumap): # pylint: disable=R0914
- """Verifies that the supplied benchmark spec is valid and deploys the required assets
- to the device (if necessary). Returns a list of command specs (one for each CPU cluster)
- that can then be used to construct the final command."""
- bench = self.loaded_benchmarks[benchspec]
- basename = benchspec.split('.')[0]
- datadir = self.device.path.join(self.device.working_directory, self.name, basename)
- if self.force_push_assets or not self.device.file_exists(datadir):
- self.device.execute('mkdir -p {}'.format(datadir))
- for datafile in bench.datafiles:
- self.device.push(datafile, self.device.path.join(datadir, os.path.basename(datafile)))
-
- if self.mode == 'speed':
- cpus = [self._get_fastest_cpu().lower()]
- else:
- cpus = cpumap.keys()
-
- cmdspecs = []
- for cpu in cpus:
- try:
- host_bin_file = bench.get_binary(cpu)
- except ValueError, e:
- try:
- msg = e.message
- msg += ' Attempting to use generic binary instead.'
- self.logger.debug(msg)
- host_bin_file = bench.get_binary('generic')
- cpu = 'generic'
- except ValueError, e:
- raise ConfigError(e.message) # re-raising as user error
- binname = os.path.basename(host_bin_file)
- binary = self.device.install(host_bin_file, with_name='.'.join([binname, cpu]))
- commandspec = CommandSpec()
- commandspec.command = bench.command_template.substitute({'binary': binary})
- commandspec.datadir = datadir
- commandspec.cpumask = get_cpu_mask(cpumap[cpu])
- cmdspecs.append(commandspec)
- return cmdspecs
-
- def _build_command(self, name, commandspecs):
- if self.mode == 'speed':
- if len(commandspecs) != 1:
- raise AssertionError('Must be exactly one command spec specifed in speed mode.')
- spec = commandspecs[0]
- launch_command = self.launch_template.format(command=spec.command, cpumask=spec.cpumask)
- self.commands.append((name,
- self.speed_run_template.format(datadir=spec.datadir,
- launch_command=launch_command)))
- elif self.mode == 'rate':
- loops = []
- for spec in commandspecs:
- launch_command = self.launch_template.format(command=spec.command, cpumask=spec.cpumask)
- loops.append(self.loop_template.format(launch_command=launch_command, threads=spec.threads))
- self.commands.append((name,
- self.rate_run_template.format(datadir=spec.datadir,
- loop='; '.join(loops))))
- else:
- raise ValueError('Unexpected SPEC2000 mode: {}'.format(self.mode)) # Should never get here
-
- def _get_fastest_cpu(self):
- cpu_types = set(self.device.core_names)
- if len(cpu_types) == 1:
- return cpu_types.pop()
- fastest_cpu = None
- fastest_freq = 0
- for cpu_type in cpu_types:
- try:
- idx = self.device.get_core_online_cpu(cpu_type)
- freq = self.device.get_cpu_max_frequency(idx)
- if freq > fastest_freq:
- fastest_freq = freq
- fastest_cpu = cpu_type
- except ValueError:
- pass
- if not fastest_cpu:
- raise WorkloadError('No active CPUs found on device. Something is very wrong...')
- return fastest_cpu
-
-
-class SpecBenchmark(object):
-
- def __init__(self):
- self.name = None
- self.path = None
- self.category = None
- self.command_template = None
- self.cpus = []
- self.datafiles = []
-
- def get_binary(self, cpu):
- if cpu not in self.cpus:
- raise ValueError('CPU {} is not supported by {}.'.format(cpu, self.name))
- binpath = os.path.join(self.path, 'cpus', cpu, self.name.split('.')[0])
- if not os.path.isfile(binpath):
- raise ValueError('CPU {} is not supported by {}.'.format(cpu, self.name))
- return binpath
-
-
-class CommandSpec(object):
-
- def __init__(self):
- self.cpumask = None
- self.datadir = None
- self.command = None
- self.threads = None
diff --git a/wlauto/workloads/sqlite/__init__.py b/wlauto/workloads/sqlite/__init__.py
deleted file mode 100644
index 60a01276..00000000
--- a/wlauto/workloads/sqlite/__init__.py
+++ /dev/null
@@ -1,48 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-import re
-
-from wlauto import AndroidUiAutoBenchmark
-
-
-class Sqlite(AndroidUiAutoBenchmark):
-
- name = 'sqlitebm'
- description = """
- Measures the performance of the sqlite database. It determines within
- what time the target device processes a number of SQL queries.
-
- """
- package = 'com.redlicense.benchmark.sqlite'
- activity = '.Main'
- summary_metrics = ['Overall']
-
- score_regex = re.compile(r'V/sqlite.*:\s+([\w ]+) = ([\d\.]+) sec')
-
- def update_result(self, context):
- super(Sqlite, self).update_result(context)
- with open(self.logcat_log) as fh:
- text = fh.read()
- for match in self.score_regex.finditer(text):
- metric = match.group(1)
- value = match.group(2)
- try:
- value = float(value)
- except ValueError:
- self.logger.warn("Reported results do not match expected format (seconds)")
- context.result.add_metric(metric, value, 'Seconds', lower_is_better=True)
-
diff --git a/wlauto/workloads/sqlite/com.arm.wlauto.uiauto.sqlite.jar b/wlauto/workloads/sqlite/com.arm.wlauto.uiauto.sqlite.jar
deleted file mode 100644
index e8b77514..00000000
--- a/wlauto/workloads/sqlite/com.arm.wlauto.uiauto.sqlite.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/sqlite/uiauto/build.sh b/wlauto/workloads/sqlite/uiauto/build.sh
deleted file mode 100755
index b8bcdf89..00000000
--- a/wlauto/workloads/sqlite/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.sqlite.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.sqlite.jar ..
-fi
diff --git a/wlauto/workloads/sqlite/uiauto/build.xml b/wlauto/workloads/sqlite/uiauto/build.xml
deleted file mode 100644
index aa324270..00000000
--- a/wlauto/workloads/sqlite/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.sqlite" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/sqlite/uiauto/project.properties b/wlauto/workloads/sqlite/uiauto/project.properties
deleted file mode 100644
index ce39f2d0..00000000
--- a/wlauto/workloads/sqlite/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-18
diff --git a/wlauto/workloads/sqlite/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/sqlite/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index c11725e3..00000000
--- a/wlauto/workloads/sqlite/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,103 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.sqlite;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.KeyEvent;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "sqlite";
-
- public void runUiAutomation() throws Exception {
- Bundle status = new Bundle();
- status.putString("product", getUiDevice().getProductName());
- UiSelector selector = new UiSelector();
-
- UiObject text_start = new UiObject(selector.text("Start")
- .className("android.widget.Button"));
- text_start.click();
-
- try {
- UiObject stop_text = new UiObject(selector.textContains("Stop")
- .className("android.widget.Button"));
- waitUntilNoObject(stop_text, 600);
-
- sleep(2);
- this.extractResults();
- } finally {
- }
- }
-
- public void extractResults() throws UiObjectNotFoundException{
- UiSelector selector = new UiSelector();
- UiScrollable resultList = new UiScrollable(selector.className("android.widget.ScrollView"));
- resultList.scrollToBeginning(5);
- selector = resultList.getSelector();
- int index = 0;
- while (true){
- UiObject lastEntry = new UiObject(selector.childSelector(new UiSelector()
- .className("android.widget.LinearLayout")
- .childSelector(new UiSelector()
- .index(index)
- .childSelector(new UiSelector()
- .className("android.widget.LinearLayout")))));
- if (lastEntry.exists()){
- UiObject value = new UiObject(selector.childSelector(new UiSelector()
- .className("android.widget.LinearLayout")
- .childSelector(new UiSelector()
- .index(index)
- .childSelector(new UiSelector()
- .resourceIdMatches(".*test_result.*")))));
- Log.v("sqlite", "Overall = " + value.getText().replace("\n", " "));
- break;
- }
-
- UiObject label = new UiObject(selector.childSelector(new UiSelector()
- .className("android.widget.LinearLayout")
- .childSelector(new UiSelector()
- .index(index)
- .childSelector(new UiSelector()
- .index(0)))));
- UiObject value = new UiObject(selector.childSelector(new UiSelector()
- .className("android.widget.LinearLayout")
- .childSelector(new UiSelector()
- .index(index)
- .childSelector(new UiSelector()
- .index(1)))));
- index++;
- if (!label.exists()){
- resultList.scrollForward();
- index--;
- sleep(1);
- continue;
- }
- Log.v("sqlite", label.getText() + " = " + value.getText().replace("\n", " "));
- }
- }
-}
diff --git a/wlauto/workloads/stream/__init__.py b/wlauto/workloads/stream/__init__.py
deleted file mode 100644
index d9c39dc0..00000000
--- a/wlauto/workloads/stream/__init__.py
+++ /dev/null
@@ -1,93 +0,0 @@
-# Copyright 2012-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# pylint: disable=attribute-defined-outside-init
-import os
-import re
-
-from wlauto import Workload, Parameter, Executable
-
-
-stream_results_txt = 'stream_results.txt'
-system_array_regex = re.compile(r'^This system uses (\d)')
-
-regex_map = {
- "array_size": (re.compile(r'^Array size = (\d+)'), "elements"),
- "total_threads_requested": (re.compile(r'^Number of Threads requested = (\d+)'), "threads"),
- "total_thread_count": (re.compile(r'^Number of Threads counted = (\d+)'), "threads")
-}
-
-regex_map2 = {
- "memory_per_array": re.compile(r'^Memory per array = (\d*.\d*) (\w+)'),
- "total_memory_required": re.compile(r'^Total memory required = (\d*.\d*) (\w+)')
-}
-
-
-class Stream(Workload):
-
- name = 'stream'
- description = """
- Measures memory bandwidth.
-
- The original source code be found on:
- https://www.cs.virginia.edu/stream/FTP/Code/
- """
-
- parameters = [
- # Workload parameters go here e.g.
- Parameter('threads', kind=int, default=0,
- description='The number of threads to execute if OpenMP is enabled')
- ]
-
- def initialize(self, context):
- Stream.stream_noomp_binary = context.resolver.get(Executable(self, self.device.abi, 'stream_noomp'))
- Stream.stream_omp_binary = context.resolver.get(Executable(self, self.device.abi, 'stream_omp'))
-
- Stream.stream_default = self.device.install(Stream.stream_noomp_binary)
- Stream.stream_optional = self.device.install(Stream.stream_omp_binary)
-
- def setup(self, context):
- self.results = os.path.join(self.device.working_directory, stream_results_txt)
- self.timeout = 50
-
- if self.threads:
- self.command = 'OMP_NUM_THREADS={} {} > {}'.format(self.threads, self.stream_optional, self.results)
- else:
- self.command = '{} > {}'.format(self.stream_default, self.results)
-
- def run(self, context):
- self.output = self.device.execute(self.command, timeout=self.timeout)
-
- def update_result(self, context):
- self.device.pull(self.results, context.output_directory)
- outfile = os.path.join(context.output_directory, stream_results_txt)
-
- with open(outfile, 'r') as stream_file:
- for line in stream_file:
- match = system_array_regex.search(line)
- if match:
- context.result.add_metric('bytes_per_array_element', int(match.group(1)), 'bytes')
-
- for label, (regex, units) in regex_map.iteritems():
- match = regex.search(line)
- if match:
- context.result.add_metric(label, float(match.group(1)), units)
-
- for label, regex in regex_map2.iteritems():
- match = regex.search(line)
- if match:
- context.result.add_metric(label, float(match.group(1)), match.group(2))
-
- def finalize(self, context):
- self.device.uninstall(self.stream_default)
- self.device.uninstall(self.stream_optional)
diff --git a/wlauto/workloads/stream/bin/arm64/stream_noomp b/wlauto/workloads/stream/bin/arm64/stream_noomp
deleted file mode 100755
index 58f1de48..00000000
--- a/wlauto/workloads/stream/bin/arm64/stream_noomp
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/stream/bin/arm64/stream_omp b/wlauto/workloads/stream/bin/arm64/stream_omp
deleted file mode 100755
index ab94ad49..00000000
--- a/wlauto/workloads/stream/bin/arm64/stream_omp
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/stream/bin/armeabi/stream_noomp b/wlauto/workloads/stream/bin/armeabi/stream_noomp
deleted file mode 100755
index 3533c879..00000000
--- a/wlauto/workloads/stream/bin/armeabi/stream_noomp
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/stream/bin/armeabi/stream_omp b/wlauto/workloads/stream/bin/armeabi/stream_omp
deleted file mode 100755
index 3ad761e2..00000000
--- a/wlauto/workloads/stream/bin/armeabi/stream_omp
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/sysbench/LICENSE b/wlauto/workloads/sysbench/LICENSE
deleted file mode 100644
index 7de042c4..00000000
--- a/wlauto/workloads/sysbench/LICENSE
+++ /dev/null
@@ -1,24 +0,0 @@
-Included sysbench binary is Free Software ditributed under GPLv2:
-
-/* Copyright (C) 2004 MySQL AB
-This program is free software; you can redistribute it and/or modify
-it under the terms of the GNU General Public License as published by
-the Free Software Foundation; either version 2 of the License, or
-(at your option) any later version.
-This program is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-GNU General Public License for more details.
-You should have received a copy of the GNU General Public License
-along with this program; if not, write to the Free Software
-Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-*/
-
-The full text of the license may be viewed here:
-
-http://www.gnu.org/licenses/gpl-2.0.html
-
-Source code for trace-cmd may be obtained here:
-
-https://github.com/akopytov/sysbench
-
diff --git a/wlauto/workloads/sysbench/__init__.py b/wlauto/workloads/sysbench/__init__.py
deleted file mode 100644
index 1f9b74cf..00000000
--- a/wlauto/workloads/sysbench/__init__.py
+++ /dev/null
@@ -1,203 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,W0201,E0203
-
-import os
-
-from wlauto import Workload, Parameter, Executable
-from wlauto.exceptions import WorkloadError, ConfigError
-from wlauto.utils.misc import parse_value
-from wlauto.utils.types import numeric
-
-
-class Sysbench(Workload):
-
- name = 'sysbench'
- description = """
- SysBench is a modular, cross-platform and multi-threaded benchmark tool
- for evaluating OS parameters that are important for a system running a
- database under intensive load.
-
- The idea of this benchmark suite is to quickly get an impression about
- system performance without setting up complex database benchmarks or
- even without installing a database at all.
-
- **Features of SysBench**
-
- * file I/O performance
- * scheduler performance
- * memory allocation and transfer speed
- * POSIX threads implementation performance
- * database server performance
-
-
- See: https://github.com/akopytov/sysbench
-
- """
-
- parameters = [
- Parameter('timeout', kind=int, default=300,
- description='timeout for workload execution (adjust from default '
- 'if running on a slow device and/or specifying a large value for '
- '``max_requests``'),
- Parameter('test', kind=str, default='cpu',
- allowed_values=['fileio', 'cpu', 'memory', 'threads', 'mutex'],
- description='sysbench test to run'),
- Parameter('threads', kind=int, default=8,
- description='The number of threads sysbench will launch'),
- Parameter('num_threads', kind=int, default=None,
- description='The number of threads sysbench will launch, overrides '
- ' ``threads`` (old parameter name)'),
- Parameter('max_requests', kind=int, default=None,
- description='The limit for the total number of requests.'),
- Parameter('max_time', kind=int, default=None,
- description='''The limit for the total execution time. If neither this nor
- ``max_requests`` is specified, this will default to 30 seconds.'''),
- Parameter('file_test_mode', default=None,
- allowed_values=['seqwr', 'seqrewr', 'seqrd', 'rndrd', 'rndwr', 'rndrw'],
- description='File test mode to use. This should only be specified if ``test`` is '
- '``"fileio"``; if that is the case and ``file_test_mode`` is not specified, '
- 'it will default to ``"seqwr"`` (please see sysbench documentation for '
- 'explanation of various modes).'),
- Parameter('cmd_params', kind=str, default='',
- description='Additional parameters to be passed to sysbench as a single stiring'),
- Parameter('force_install', kind=bool, default=True,
- description='Always install binary found on the host, even if already installed on device'),
- Parameter('taskset_mask', kind=int, default=0,
- description='The processes spawned by sysbench will be pinned to cores as specified by this parameter'),
- ]
-
- def validate(self):
- if not self.num_threads:
- self.num_threads = self.threads
- if (self.max_requests is None) and (self.max_time is None):
- self.max_time = 30
- if self.max_time and (self.max_time + 10) > self.timeout:
- self.timeout = self.max_time + 10
- if self.test == 'fileio' and not self.file_test_mode:
- self.logger.debug('Test is "fileio" and no file_test_mode specified -- using default.')
- self.file_test_mode = 'seqwr'
- elif self.test != 'fileio' and self.file_test_mode:
- raise ConfigError('file_test_mode must not be specified unless test is "fileio"')
-
- def init_resources(self, context):
- self.on_host_binary = context.resolver.get(Executable(self, 'armeabi', 'sysbench'), strict=False)
-
- def setup(self, context):
- params = dict(test=self.test,
- num_threads=self.num_threads)
- if self.max_requests:
- params['max_requests'] = self.max_requests
- if self.max_time:
- params['max_time'] = self.max_time
- self.results_file = self.device.path.join(self.device.working_directory, 'sysbench_result.txt')
- self._check_executable()
- self.command = self._build_command(**params)
-
- def run(self, context):
- self.device.execute(self.command, timeout=self.timeout)
-
- def update_result(self, context):
- host_results_file = os.path.join(context.output_directory, 'sysbench_result.txt')
- self.device.pull(self.results_file, host_results_file)
- context.add_iteration_artifact('sysbench_output', kind='raw', path=host_results_file)
-
- with open(host_results_file) as fh:
- find_line_with('General statistics:', fh)
- extract_metric('total time', fh.next(), context.result)
- extract_metric('total number of events', fh.next(), context.result, lower_is_better=False)
- find_line_with('response time:', fh)
- extract_metric('min', fh.next(), context.result, 'response time ')
- extract_metric('avg', fh.next(), context.result, 'response time ')
- extract_metric('max', fh.next(), context.result, 'response time ')
- extract_metric('approx. 95 percentile', fh.next(), context.result)
- find_line_with('Threads fairness:', fh)
- extract_threads_fairness_metric('events', fh.next(), context.result)
- extract_threads_fairness_metric('execution time', fh.next(), context.result)
-
- def teardown(self, context):
- self.device.remove(self.results_file)
-
- def _check_executable(self):
- self.on_device_binary = self.device.get_installed("sysbench")
- if not self.on_device_binary and not self.on_host_binary:
- raise WorkloadError('sysbench binary is not installed on the device, and it is not found on the host.')
- if self.force_install:
- self.on_device_binary = self.device.install(self.on_host_binary)
- else:
- self.on_device_binary = self.device.install_if_needed(self.on_host_binary)
-
- def _build_command(self, **parameters):
- param_strings = ['--{}={}'.format(k.replace('_', '-'), v)
- for k, v in parameters.iteritems()]
- if self.file_test_mode:
- param_strings.append('--file-test-mode={}'.format(self.file_test_mode))
- sysbench_command = '{} {} {} run'.format(self.on_device_binary, ' '.join(param_strings), self.cmd_params)
- if self.taskset_mask:
- taskset_string = 'busybox taskset 0x{:x} '.format(self.taskset_mask)
- else:
- taskset_string = ''
- return 'cd {} && {} {} > sysbench_result.txt'.format(self.device.working_directory, taskset_string, sysbench_command)
-
-
-# Utility functions
-
-def find_line_with(text, fh):
- for line in fh:
- if text in line:
- return
- message = 'Could not extract sysbench results from {}; did not see "{}"'
- raise WorkloadError(message.format(fh.name, text))
-
-
-def extract_metric(metric, line, result, prefix='', lower_is_better=True):
- try:
- name, value_part = [part.strip() for part in line.split(':')]
- if name != metric:
- message = 'Name mismatch: expected "{}", got "{}"'
- raise WorkloadError(message.format(metric, name.strip()))
- if not value_part or not value_part[0].isdigit():
- raise ValueError('value part does not start with a digit: {}'.format(value_part))
- idx = -1
- if not value_part[idx].isdigit(): # units detected at the end of the line
- while not value_part[idx - 1].isdigit():
- idx -= 1
- value = numeric(value_part[:idx])
- units = value_part[idx:]
- else:
- value = numeric(value_part)
- units = None
- result.add_metric(prefix + metric,
- value, units, lower_is_better=lower_is_better)
- except Exception as e:
- message = 'Could not extract sysbench metric "{}"; got "{}"'
- raise WorkloadError(message.format(prefix + metric, e))
-
-
-def extract_threads_fairness_metric(metric, line, result):
- try:
- name_part, value_part = [part.strip() for part in line.split(':')]
- name = name_part.split('(')[0].strip()
- if name != metric:
- message = 'Name mismatch: expected "{}", got "{}"'
- raise WorkloadError(message.format(metric, name))
- avg, stddev = [numeric(v) for v in value_part.split('/')]
- result.add_metric('thread fairness {} avg'.format(metric), avg)
- result.add_metric('thread fairness {} stddev'.format(metric),
- stddev, lower_is_better=True)
- except Exception as e:
- message = 'Could not extract sysbench metric "{}"; got "{}"'
- raise WorkloadError(message.format(metric, e))
diff --git a/wlauto/workloads/sysbench/bin/armeabi/sysbench b/wlauto/workloads/sysbench/bin/armeabi/sysbench
deleted file mode 100644
index 7004dfba..00000000
--- a/wlauto/workloads/sysbench/bin/armeabi/sysbench
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/telemetry/__init__.py b/wlauto/workloads/telemetry/__init__.py
deleted file mode 100644
index 6de1e0d7..00000000
--- a/wlauto/workloads/telemetry/__init__.py
+++ /dev/null
@@ -1,306 +0,0 @@
-# Copyright 2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=attribute-defined-outside-init
-import os
-import re
-import csv
-import shutil
-import json
-import urllib
-import stat
-from zipfile import is_zipfile, ZipFile
-
-try:
- import pandas as pd
-except ImportError:
- pd = None
-
-from wlauto import Workload, Parameter
-from wlauto.exceptions import WorkloadError, ConfigError
-from wlauto.utils.misc import check_output, get_null, get_meansd
-from wlauto.utils.types import numeric
-
-
-RESULT_REGEX = re.compile(r'RESULT ([^:]+): ([^=]+)\s*=\s*' # preamble and test/metric name
- r'(\[([^\]]+)\]|(\S+))' # value
- r'\s*(\S+)') # units
-TRACE_REGEX = re.compile(r'Trace saved as ([^\n]+)')
-
-# Trace event that signifies rendition of a Frame
-FRAME_EVENT = 'SwapBuffersLatency'
-
-TELEMETRY_ARCHIVE_URL = 'http://storage.googleapis.com/chromium-telemetry/snapshots/telemetry.zip'
-
-
-class Telemetry(Workload):
-
- name = 'telemetry'
- description = """
- Executes Google's Telemetery benchmarking framework
-
- Url: https://www.chromium.org/developers/telemetry
-
- From the web site:
-
- Telemetry is Chrome's performance testing framework. It allows you to
- perform arbitrary actions on a set of web pages and report metrics about
- it. The framework abstracts:
-
- - Launching a browser with arbitrary flags on any platform.
- - Opening a tab and navigating to the page under test.
- - Fetching data via the Inspector timeline and traces.
- - Using Web Page Replay to cache real-world websites so they don't
- change when used in benchmarks.
-
- Design Principles
-
- - Write one performance test that runs on all platforms - Windows, Mac,
- Linux, Chrome OS, and Android for both Chrome and ContentShell.
- - Runs on browser binaries, without a full Chromium checkout, and without
- having to build the browser yourself.
- - Use WebPageReplay to get repeatable test results.
- - Clean architecture for writing benchmarks that keeps measurements and
- use cases separate.
- - Run on non-Chrome browsers for comparative studies.
-
- This instrument runs telemetry via its ``run_benchmark`` script (which
- must be in PATH or specified using ``run_benchmark_path`` parameter) and
- parses metrics from the resulting output.
-
- **device setup**
-
- The device setup will depend on whether you're running a test image (in
- which case little or no setup should be necessary)
-
-
- """
-
- supported_platforms = ['android', 'chromeos']
-
- parameters = [
- Parameter('run_benchmark_path', default=None,
- description="""
- This is the path to run_benchmark script which runs a
- Telemetry benchmark. If not specified, WA will look for Telemetry in its
- dependencies; if not found there, Telemetry will be downloaded.
- """),
- Parameter('test', default='page_cycler.top_10_mobile',
- description="""
- Specifies the telemetry test to run.
- """),
- Parameter('run_benchmark_params', default='',
- description="""
- Additional paramters to be passed to ``run_benchmark``.
- """),
- Parameter('run_timeout', kind=int, default=900,
- description="""
- Timeout for execution of the test.
- """),
- Parameter('extract_fps', kind=bool, default=False,
- description="""
- if ``True``, FPS for the run will be computed from the trace (must be enabled).
- """),
- Parameter('target_config', kind=str, default=None,
- description="""
- Manually specify target configuration for telemetry. This must contain
- --browser option plus any addition options Telemetry requires for a particular
- target (e.g. --device or --remote)
- """),
- ]
-
- def validate(self):
- ret = os.system('{} > {} 2>&1'.format(self.run_benchmark_path, get_null()))
- if ret > 255:
- pass # telemetry found and appears to be installed properly.
- elif ret == 127:
- raise WorkloadError('run_benchmark not found (did you specify correct run_benchmark_path?)')
- else:
- raise WorkloadError('Unexected error from run_benchmark: {}'.format(ret))
- if self.extract_fps and 'trace' not in self.run_benchmark_params:
- raise ConfigError('"trace" profiler must be enabled in order to extract FPS for Telemetry')
- self._resolve_run_benchmark_path()
-
- def setup(self, context):
- self.raw_output = None
- self.error_output = None
- self.command = self.build_command()
-
- def run(self, context):
- self.logger.debug(self.command)
- self.raw_output, self.error_output = check_output(self.command, shell=True, timeout=self.run_timeout, ignore='all')
-
- def update_result(self, context): # pylint: disable=too-many-locals
- if self.error_output:
- self.logger.error('run_benchmarks output contained errors:\n' + self.error_output)
- elif not self.raw_output:
- self.logger.warning('Did not get run_benchmark output.')
- return
- raw_outfile = os.path.join(context.output_directory, 'telemetry_raw.out')
- with open(raw_outfile, 'w') as wfh:
- wfh.write(self.raw_output)
- context.add_artifact('telemetry-raw', raw_outfile, kind='raw')
-
- results, artifacts = parse_telemetry_results(raw_outfile)
- csv_outfile = os.path.join(context.output_directory, 'telemetry.csv')
- with open(csv_outfile, 'wb') as wfh:
- writer = csv.writer(wfh)
- writer.writerow(['kind', 'url', 'iteration', 'value', 'units'])
- for result in results:
- writer.writerows(result.rows)
-
- for i, value in enumerate(result.values, 1):
- context.add_metric(result.kind, value, units=result.units,
- classifiers={'url': result.url, 'time': i})
-
- context.add_artifact('telemetry', csv_outfile, kind='data')
-
- for idx, artifact in enumerate(artifacts):
- if is_zipfile(artifact):
- zf = ZipFile(artifact)
- for item in zf.infolist():
- zf.extract(item, context.output_directory)
- zf.close()
- context.add_artifact('telemetry_trace_{}'.format(idx), path=item.filename, kind='data')
- else: # not a zip archive
- wa_path = os.path.join(context.output_directory,
- os.path.basename(artifact))
- shutil.copy(artifact, wa_path)
- context.add_artifact('telemetry_artifact_{}'.format(idx), path=wa_path, kind='data')
-
- if self.extract_fps:
- self.logger.debug('Extracting FPS...')
- _extract_fps(context)
-
- def build_command(self):
- device_opts = ''
- if self.target_config:
- device_opts = self.target_config
- else:
- if self.device.os == 'chromeos':
- if '--remote' not in self.run_benchmark_params:
- device_opts += '--remote={} '.format(self.device.host)
- if '--browser' not in self.run_benchmark_params:
- device_opts += '--browser=cros-chrome '
- elif self.device.os == 'android':
- if '--device' not in self.run_benchmark_params and self.device.adb_name:
- device_opts += '--device={} '.format(self.device.adb_name)
- if '--browser' not in self.run_benchmark_params:
- device_opts += '--browser=android-webview-shell '
- else:
- raise WorkloadError('Unless you\'re running Telemetry on a ChromeOS or Android device, '
- 'you mast specify target_config option')
- return '{} {} {} {}'.format(self.run_benchmark_path,
- self.test,
- device_opts,
- self.run_benchmark_params)
-
- def _resolve_run_benchmark_path(self):
- # pylint: disable=access-member-before-definition
- if self.run_benchmark_path:
- if not os.path.exists(self.run_benchmark_path):
- raise ConfigError('run_benchmark path "{}" does not exist'.format(self.run_benchmark_path))
- else:
- self.run_benchmark_path = os.path.join(self.dependencies_directory, 'telemetry', 'run_benchmark')
- self.logger.debug('run_benchmark_path not specified using {}'.format(self.run_benchmark_path))
- if not os.path.exists(self.run_benchmark_path):
- self.logger.debug('Telemetry not found locally; downloading...')
- local_archive = os.path.join(self.dependencies_directory, 'telemetry.zip')
- urllib.urlretrieve(TELEMETRY_ARCHIVE_URL, local_archive)
- zf = ZipFile(local_archive)
- zf.extractall(self.dependencies_directory)
- if not os.path.exists(self.run_benchmark_path):
- raise WorkloadError('Could not download and extract Telemetry')
- old_mode = os.stat(self.run_benchmark_path).st_mode
- os.chmod(self.run_benchmark_path, old_mode | stat.S_IXUSR)
-
-
-def _extract_fps(context):
- trace_files = [a.path for a in context.iteration_artifacts
- if a.name.startswith('telemetry_trace_')]
- for tf in trace_files:
- name = os.path.splitext(os.path.basename(tf))[0]
- fps_file = os.path.join(context.output_directory, name + '-fps.csv')
- with open(tf) as fh:
- data = json.load(fh)
- events = pd.Series([e['ts'] for e in data['traceEvents'] if
- FRAME_EVENT == e['name']])
- fps = (1000000 / (events - events.shift(1)))
- fps.index = events
- df = fps.dropna().reset_index()
- df.columns = ['timestamp', 'fps']
- with open(fps_file, 'w') as wfh:
- df.to_csv(wfh, index=False)
- context.add_artifact('{}_fps'.format(name), fps_file, kind='data')
- context.result.add_metric('{} FPS'.format(name), df.fps.mean(),
- units='fps')
- context.result.add_metric('{} FPS (std)'.format(name), df.fps.std(),
- units='fps', lower_is_better=True)
-
-
-class TelemetryResult(object):
-
- @property
- def average(self):
- return get_meansd(self.values)[0]
-
- @property
- def std(self):
- return get_meansd(self.values)[1]
-
- @property
- def rows(self):
- for i, v in enumerate(self.values):
- yield [self.kind, self.url, i, v, self.units]
-
- def __init__(self, kind=None, url=None, values=None, units=None):
- self.kind = kind
- self.url = url
- self.values = values or []
- self.units = units
-
- def __str__(self):
- return 'TR({kind},{url},{values},{units})'.format(**self.__dict__)
-
- __repr__ = __str__
-
-
-def parse_telemetry_results(filepath):
- results = []
- artifacts = []
- with open(filepath) as fh:
- for line in fh:
- match = RESULT_REGEX.search(line)
- if match:
- result = TelemetryResult()
- result.kind = match.group(1)
- result.url = match.group(2)
- if match.group(4):
- result.values = map(numeric, match.group(4).split(','))
- else:
- result.values = [numeric(match.group(5))]
- result.units = match.group(6)
- results.append(result)
- match = TRACE_REGEX.search(line)
- if match:
- artifacts.append(match.group(1))
- return results, artifacts
-
-
-if __name__ == '__main__':
- import sys # pylint: disable=wrong-import-order,wrong-import-position
- from pprint import pprint # pylint: disable=wrong-import-order,wrong-import-position
- path = sys.argv[1]
- pprint(parse_telemetry_results(path))
diff --git a/wlauto/workloads/templerun/__init__.py b/wlauto/workloads/templerun/__init__.py
deleted file mode 100644
index f967a49d..00000000
--- a/wlauto/workloads/templerun/__init__.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import GameWorkload
-
-
-class Templerun(GameWorkload):
-
- name = 'templerun'
- description = """
- Templerun game.
-
- """
- package = 'com.imangi.templerun'
- activity = 'com.unity3d.player.UnityPlayerProxyActivity'
- install_timeout = 500
diff --git a/wlauto/workloads/templerun/revent_files/.empty b/wlauto/workloads/templerun/revent_files/.empty
deleted file mode 100644
index e69de29b..00000000
--- a/wlauto/workloads/templerun/revent_files/.empty
+++ /dev/null
diff --git a/wlauto/workloads/templerun/revent_files/Nexus10.run.revent b/wlauto/workloads/templerun/revent_files/Nexus10.run.revent
deleted file mode 100644
index c1019cf7..00000000
--- a/wlauto/workloads/templerun/revent_files/Nexus10.run.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/templerun/revent_files/Nexus10.setup.revent b/wlauto/workloads/templerun/revent_files/Nexus10.setup.revent
deleted file mode 100644
index 90b1cb43..00000000
--- a/wlauto/workloads/templerun/revent_files/Nexus10.setup.revent
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/thechase/__init__.py b/wlauto/workloads/thechase/__init__.py
deleted file mode 100755
index 48c94efa..00000000
--- a/wlauto/workloads/thechase/__init__.py
+++ /dev/null
@@ -1,46 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101
-import time
-
-from wlauto import ApkWorkload, Parameter
-
-
-class TheChase(ApkWorkload):
-
- name = 'thechase'
- description = """
- The Chase demo showcasing the capabilities of Unity game engine.
-
- This demo, is a static video-like game demo, that demonstrates advanced features
- of the unity game engine. It loops continuously until terminated.
-
- """
-
- package = 'com.unity3d.TheChase'
- activity = 'com.unity3d.player.UnityPlayerNativeActivity'
- install_timeout = 200
- view = 'SurfaceView'
-
- parameters = [
- Parameter('duration', kind=int, default=70,
- description=('Duration, in seconds, note that the demo loops the same (roughly) 60 '
- 'second sceene until stopped.')),
- ]
-
- def run(self, context):
- time.sleep(self.duration)
-
diff --git a/wlauto/workloads/truckerparking3d/__init__.py b/wlauto/workloads/truckerparking3d/__init__.py
deleted file mode 100644
index 8180d4fd..00000000
--- a/wlauto/workloads/truckerparking3d/__init__.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-from wlauto import GameWorkload
-
-
-class TruckerParking3D(GameWorkload):
-
- name = 'truckerparking3d'
- description = """
- Trucker Parking 3D game.
-
- (yes, apparently that's a thing...)
- """
- package = 'com.tapinator.truck.parking.bus3d'
- activity = 'com.tapinator.truck.parking.bus3d.GCMNotificationActivity'
diff --git a/wlauto/workloads/vellamo/__init__.py b/wlauto/workloads/vellamo/__init__.py
deleted file mode 100644
index 9abcd4a4..00000000
--- a/wlauto/workloads/vellamo/__init__.py
+++ /dev/null
@@ -1,214 +0,0 @@
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-import os
-import logging
-from HTMLParser import HTMLParser
-from collections import defaultdict, OrderedDict
-
-from wlauto import AndroidUiAutoBenchmark, Parameter
-from wlauto.utils.types import list_of_strs, numeric
-from wlauto.exceptions import WorkloadError
-
-
-#pylint: disable=no-member
-class Vellamo(AndroidUiAutoBenchmark):
-
- name = 'vellamo'
- description = """
- Android benchmark designed by Qualcomm.
-
- Vellamo began as a mobile web benchmarking tool that today has expanded
- to include three primary chapters. The Browser Chapter evaluates mobile
- web browser performance, the Multicore chapter measures the synergy of
- multiple CPU cores, and the Metal Chapter measures the CPU subsystem
- performance of mobile processors. Through click-and-go test suites,
- organized by chapter, Vellamo is designed to evaluate: UX, 3D graphics,
- and memory read/write and peak bandwidth performance, and much more!
-
- Note: Vellamo v3.0 fails to run on Juno
-
- """
- package = 'com.quicinc.vellamo'
- run_timeout = 15 * 60
- benchmark_types = {
- '2.0.3': ['html5', 'metal'],
- '3.0': ['Browser', 'Metal', 'Multi'],
- }
- valid_versions = benchmark_types.keys()
- summary_metrics = None
-
- parameters = [
- Parameter('version', kind=str, allowed_values=valid_versions, default=sorted(benchmark_types, reverse=True)[0],
- description=('Specify the version of Vellamo to be run. '
- 'If not specified, the latest available version will be used.')),
- Parameter('benchmarks', kind=list_of_strs, allowed_values=benchmark_types['3.0'], default=benchmark_types['3.0'],
- description=('Specify which benchmark sections of Vellamo to be run. Only valid on version 3.0 and newer.'
- '\nNOTE: Browser benchmark can be problematic and seem to hang,'
- 'just wait and it will progress after ~5 minutes')),
- Parameter('browser', kind=int, default=1,
- description=('Specify which of the installed browsers will be used for the tests. The number refers to '
- 'the order in which browsers are listed by Vellamo. E.g. ``1`` will select the first browser '
- 'listed, ``2`` -- the second, etc. Only valid for version ``3.0``.'))
- ]
-
- def __init__(self, device, **kwargs):
- super(Vellamo, self).__init__(device, **kwargs)
- if self.version == '2.0.3':
- self.activity = 'com.quicinc.vellamo.VellamoActivity'
- if self.version == '3.0':
- self.activity = 'com.quicinc.vellamo.main.MainActivity'
- self.summary_metrics = self.benchmark_types[self.version]
-
- def setup(self, context):
- self.uiauto_params['version'] = self.version
- self.uiauto_params['browserToUse'] = self.browser
- self.uiauto_params['metal'] = 'Metal' in self.benchmarks
- self.uiauto_params['browser'] = 'Browser' in self.benchmarks
- self.uiauto_params['multicore'] = 'Multi' in self.benchmarks
- super(Vellamo, self).setup(context)
-
- def validate(self):
- super(Vellamo, self).validate()
- if self.version == '2.0.3' or not self.benchmarks or self.benchmarks == []: # pylint: disable=access-member-before-definition
- self.benchmarks = self.benchmark_types[self.version] # pylint: disable=attribute-defined-outside-init
- else:
- for benchmark in self.benchmarks:
- if benchmark not in self.benchmark_types[self.version]:
- raise WorkloadError('Version {} does not support {} benchmarks'.format(self.version, benchmark))
-
- def update_result(self, context):
- super(Vellamo, self).update_result(context)
-
- # Get total scores from logcat
- self.non_root_update_result(context)
-
- if not self.device.is_rooted:
- return
-
- for test in self.benchmarks: # Get all scores from HTML files
- filename = None
- if test == "Browser":
- result_folder = self.device.path.join(context.device_manager.package_data_directory, self.package, 'files')
- for result_file in self.device.list_directory(result_folder, as_root=True):
- if result_file.startswith("Browser"):
- filename = result_file
- else:
- filename = '{}_results.html'.format(test)
-
- device_file = self.device.path.join(self.device.package_data_directory, self.package, 'files', filename)
- host_file = os.path.join(context.output_directory, filename)
- self.device.pull(device_file, host_file, as_root=True)
- with open(host_file) as fh:
- parser = VellamoResultParser()
- parser.feed(fh.read())
- for benchmark in parser.benchmarks:
- benchmark.name = benchmark.name.replace(' ', '_')
- context.result.add_metric('{}_Total'.format(benchmark.name), benchmark.score)
- for name, score in benchmark.metrics.items():
- name = name.replace(' ', '_')
- context.result.add_metric('{}_{}'.format(benchmark.name, name), score)
- context.add_iteration_artifact('vellamo_output', kind='raw', path=filename)
-
- def non_root_update_result(self, context):
- failed = []
- with open(self.logcat_log) as logcat:
- metrics = OrderedDict()
- for line in logcat:
- if 'VELLAMO RESULT:' in line:
- info = line.split(':')
- parts = info[2].split(" ")
- metric = parts[1].strip()
- value = int(parts[2].strip())
- metrics[metric] = value
- if 'VELLAMO ERROR:' in line:
- self.logger.warning("Browser crashed during benchmark, results may not be accurate")
- for key, value in metrics.iteritems():
- key = key.replace(' ', '_')
- context.result.add_metric(key, value)
- if value == 0:
- failed.append(key)
- if failed:
- raise WorkloadError("The following benchmark groups failed: {}".format(", ".join(failed)))
-
-
-class VellamoResult(object):
-
- def __init__(self, name):
- self.name = name
- self.score = None
- self.metrics = {}
-
- def add_metric(self, data):
- split_data = data.split(":")
- name = split_data[0].strip()
- score = split_data[1].strip()
-
- if name in self.metrics:
- raise KeyError("A metric of that name is already present")
- self.metrics[name] = float(score)
-
-
-class VellamoResultParser(HTMLParser):
-
- class StopParsingException(Exception):
- pass
-
- def __init__(self):
- HTMLParser.__init__(self)
- self.inside_div = False
- self.inside_span = 0
- self.inside_li = False
- self.got_data = False
- self.failed = False
- self.benchmarks = []
-
- def feed(self, text):
- try:
- HTMLParser.feed(self, text)
- except self.StopParsingException:
- pass
-
- def handle_starttag(self, tag, attrs):
- if tag == 'div':
- self.inside_div = True
- if tag == 'span':
- self.inside_span += 1
- if tag == 'li':
- self.inside_li = True
-
- def handle_endtag(self, tag):
- if tag == 'div':
- self.inside_div = False
- self.inside_span = 0
- self.got_data = False
- self.failed = False
- if tag == 'li':
- self.inside_li = False
-
- def handle_data(self, data):
- if self.inside_div and not self.failed:
- if "Problem" in data:
- self.failed = True
- elif self.inside_span == 1:
- self.benchmarks.append(VellamoResult(data))
- elif self.inside_span == 3 and not self.got_data:
- self.benchmarks[-1].score = int(data)
- self.got_data = True
- elif self.inside_li and self.got_data:
- if 'failed' not in data:
- self.benchmarks[-1].add_metric(data)
- else:
- self.failed = True
diff --git a/wlauto/workloads/vellamo/com.arm.wlauto.uiauto.vellamo.jar b/wlauto/workloads/vellamo/com.arm.wlauto.uiauto.vellamo.jar
deleted file mode 100644
index 9e71e7eb..00000000
--- a/wlauto/workloads/vellamo/com.arm.wlauto.uiauto.vellamo.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/vellamo/uiauto/build.sh b/wlauto/workloads/vellamo/uiauto/build.sh
deleted file mode 100755
index 4c3ad807..00000000
--- a/wlauto/workloads/vellamo/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2014-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.vellamo.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.vellamo.jar ..
-fi
diff --git a/wlauto/workloads/vellamo/uiauto/build.xml b/wlauto/workloads/vellamo/uiauto/build.xml
deleted file mode 100644
index c137d62d..00000000
--- a/wlauto/workloads/vellamo/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.vellamo" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/vellamo/uiauto/project.properties b/wlauto/workloads/vellamo/uiauto/project.properties
deleted file mode 100644
index ce39f2d0..00000000
--- a/wlauto/workloads/vellamo/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-18
diff --git a/wlauto/workloads/vellamo/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/vellamo/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index a49a18ee..00000000
--- a/wlauto/workloads/vellamo/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,260 +0,0 @@
-/* Copyright 2014-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.vellamo;
-
-import android.app.Activity;
-import android.os.Bundle;
-import android.util.Log;
-import android.view.KeyEvent;
-import java.util.concurrent.TimeUnit;
-import java.util.ArrayList;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.core.UiDevice;
-import com.android.uiautomator.core.UiWatcher;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "vellamo";
- public static ArrayList<String> scores = new ArrayList();
- public static Boolean wasError = false;
-
- public void runUiAutomation() throws Exception {
- Bundle parameters = getParams();
- String version = parameters.getString("version");
- Boolean browser = Boolean.parseBoolean(parameters.getString("browser"));
- Boolean metal = Boolean.parseBoolean(parameters.getString("metal"));
- Boolean multicore = Boolean.parseBoolean(parameters.getString("multicore"));
- Integer browserToUse = Integer.parseInt(parameters.getString("browserToUse")) - 1;
-
- dismissEULA();
-
- if (version.equals("2.0.3")) {
- dissmissWelcomebanner();
- startTest();
- dismissNetworkConnectionDialogIfNecessary();
- dismissExplanationDialogIfNecessary();
- waitForTestCompletion(15 * 60, "com.quicinc.vellamo:id/act_ba_results_btn_no");
- getScore("html5", "com.quicinc.vellamo:id/act_ba_results_img_0");
- getScore("metal", "com.quicinc.vellamo:id/act_ba_results_img_1");
- }
-
- else {
- dismissLetsRoll();
- if (browser) {
- startBrowserTest(browserToUse);
- proccessTest("Browser");
- }
- if (multicore) {
- startTestV3(1);
- proccessTest("Multicore");
-
- }
- if (metal) {
- startTestV3(2);
- proccessTest("Metal");
- }
- }
- for(String result : scores){
- Log.v(TAG, String.format("VELLAMO RESULT: %s", result));
- }
- if (wasError) Log.v("vellamoWatcher", "VELLAMO ERROR: Something crashed while running browser benchmark");
- }
-
- public void startTest() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject runButton = new UiObject(selector.textContains("Run All Chapters"));
-
- if (!runButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {
- UiObject pager = new UiObject(selector.className("android.support.v4.view.ViewPager"));
- pager.swipeLeft(2);
- if (!runButton.exists()) {
- throw new UiObjectNotFoundException("Could not find \"Run All Chapters\" button.");
- }
- }
- runButton.click();
- }
-
- public void startBrowserTest(int browserToUse) throws Exception {
- //Ensure chrome is selected as "browser" fails to run the benchmark
- UiSelector selector = new UiSelector();
- UiObject browserToUseButton = new UiObject(selector.className("android.widget.ImageButton")
- .longClickable(true).instance(browserToUse));
- UiObject browserButton = new UiObject(selector.className("android.widget.ImageButton")
- .longClickable(true).selected(true));
- //Disable browsers
- while(browserButton.exists()) browserButton.click();
- if (browserToUseButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {
- if (browserToUseButton.exists()) {
- browserToUseButton.click();
- }
- }
-
- //enable a watcher to dismiss browser dialogs
- UiWatcher stoppedWorkingDialogWatcher = new UiWatcher() {
- @Override
- public boolean checkForCondition() {
- UiObject stoppedWorkingDialog = new UiObject(new UiSelector().textStartsWith("Unfortunately"));
- if(stoppedWorkingDialog.exists()){
- wasError = true;
- UiObject okButton = new UiObject(new UiSelector().className("android.widget.Button").text("OK"));
- try {
- okButton.click();
- } catch (UiObjectNotFoundException e) {
- // TODO Auto-generated catch block
- e.printStackTrace();
- }
- return (stoppedWorkingDialog.waitUntilGone(25000));
- }
- return false;
- }
- };
- // Register watcher
- UiDevice.getInstance().registerWatcher("stoppedWorkingDialogWatcher", stoppedWorkingDialogWatcher);
-
- // Run watcher
- UiDevice.getInstance().runWatchers();
-
- startTestV3(0);
- }
-
- public void startTestV3(int run) throws Exception {
- UiSelector selector = new UiSelector();
-
- UiObject thirdRunButton = new UiObject(selector.resourceId("com.quicinc.vellamo:id/card_launcher_run_button").instance(run));
- if (!thirdRunButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {
- if (!thirdRunButton.exists()) {
- throw new UiObjectNotFoundException("Could not find three \"Run\" buttons.");
- }
- }
-
- //Run benchmarks
- UiObject runButton = new UiObject(selector.resourceId("com.quicinc.vellamo:id/card_launcher_run_button").instance(run));
- if (!runButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {
- if (!runButton.exists()) {
- throw new UiObjectNotFoundException("Could not find correct \"Run\" button.");
- }
- }
- runButton.click();
-
- //Skip tutorial screens
- UiObject swipeScreen = new UiObject(selector.textContains("Swipe left to continue"));
- if (!swipeScreen.waitForExists(TimeUnit.SECONDS.toMillis(5))) {
- if (!swipeScreen.exists()) {
- throw new UiObjectNotFoundException("Could not find \"Swipe screen\".");
- }
- }
- sleep(1);
- swipeScreen.swipeLeft(2);
- sleep(1);
- swipeScreen.swipeLeft(2);
-
- }
-
- public void proccessTest(String metric) throws Exception{
- waitForTestCompletion(15 * 60, "com.quicinc.vellamo:id/button_no");
-
- //Remove watcher
- UiDevice.getInstance().removeWatcher("stoppedWorkingDialogWatcher");
-
- getScore(metric, "com.quicinc.vellamo:id/card_score_score");
- getUiDevice().pressBack();
- getUiDevice().pressBack();
- getUiDevice().pressBack();
- }
-
- public void getScore(String metric, String resourceID) throws Exception {
- UiSelector selector = new UiSelector();
- UiObject score = new UiObject(selector.resourceId(resourceID));
- if (!score.waitForExists(TimeUnit.SECONDS.toMillis(5))) {
- if (!score.exists()) {
- throw new UiObjectNotFoundException("Could not find score on screen.");
- }
- }
- scores.add(metric + " " + score.getText().trim());
- }
-
- public void waitForTestCompletion(int timeout, String resourceID) throws Exception {
- UiSelector selector = new UiSelector();
- UiObject resultsNoButton = new UiObject(selector.resourceId(resourceID));
- if (!resultsNoButton.waitForExists(TimeUnit.SECONDS.toMillis(timeout))) {
- throw new UiObjectNotFoundException("Did not see results screen.");
- }
-
- }
-
- public void dismissEULA() throws Exception {
- UiSelector selector = new UiSelector();
- waitText("Vellamo EULA");
- UiObject acceptButton = new UiObject(selector.text("Accept")
- .className("android.widget.Button"));
- if (acceptButton.exists()) {
- acceptButton.click();
- }
- }
-
- public void dissmissWelcomebanner() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject welcomeBanner = new UiObject(selector.textContains("WELCOME"));
- if (welcomeBanner.waitForExists(TimeUnit.SECONDS.toMillis(5))) {
- UiObject pager = new UiObject(selector.className("android.support.v4.view.ViewPager"));
- pager.swipeLeft(2);
- pager.swipeLeft(2);
- }
- }
-
- public void dismissLetsRoll() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject letsRollButton = new UiObject(selector.className("android.widget.Button")
- .textContains("Let's Roll"));
- if (!letsRollButton.waitForExists(TimeUnit.SECONDS.toMillis(5))) {
- if (!letsRollButton.exists()) {
- throw new UiObjectNotFoundException("Could not find \"Let's Roll\" button.");
- }
- }
- letsRollButton.click();
- }
-
- public void dismissNetworkConnectionDialogIfNecessary() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject dialog = new UiObject(selector.className("android.widget.TextView")
- .textContains("No Network Connection"));
- if (dialog.exists()) {
- UiObject yesButton = new UiObject(selector.className("android.widget.Button")
- .text("Yes"));
- yesButton.click();
- }
- }
-
- public void dismissExplanationDialogIfNecessary() throws Exception {
- UiSelector selector = new UiSelector();
- UiObject dialog = new UiObject(selector.className("android.widget.TextView")
- .textContains("Benchmarks Explanation"));
- if (dialog.exists()) {
- UiObject noButton = new UiObject(selector.className("android.widget.Button")
- .text("No"));
- noButton.click();
- }
- }
-}
diff --git a/wlauto/workloads/video/__init__.py b/wlauto/workloads/video/__init__.py
deleted file mode 100644
index 8f8ee6e7..00000000
--- a/wlauto/workloads/video/__init__.py
+++ /dev/null
@@ -1,138 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,E0203,W0201
-
-import os
-import time
-import urllib
-from collections import defaultdict
-
-from wlauto import Workload, settings, Parameter, Alias
-from wlauto.exceptions import ConfigError, WorkloadError
-from wlauto.utils.misc import ensure_directory_exists as _d
-from wlauto.utils.types import boolean
-
-DOWNLOAD_URLS = {
- '1080p': 'http://download.blender.org/peach/bigbuckbunny_movies/big_buck_bunny_1080p_surround.avi',
- '720p': 'http://download.blender.org/peach/bigbuckbunny_movies/big_buck_bunny_720p_surround.avi',
- '480p': 'http://download.blender.org/peach/bigbuckbunny_movies/big_buck_bunny_480p_surround-fix.avi'
-}
-
-
-class VideoWorkload(Workload):
- name = 'video'
- description = """
- Plays a video file using the standard android video player for a predetermined duration.
-
- The video can be specified either using ``resolution`` workload parameter, in which case
- `Big Buck Bunny`_ MP4 video of that resolution will be downloaded and used, or using
- ``filename`` parameter, in which case the video file specified will be used.
-
-
- .. _Big Buck Bunny: http://www.bigbuckbunny.org/
-
- """
- supported_platforms = ['android']
-
- parameters = [
- Parameter('play_duration', kind=int, default=20,
- description='Playback duration of the video file. This become the duration of the workload.'),
- Parameter('resolution', default='720p', allowed_values=['480p', '720p', '1080p'],
- description='Specifies which resolution video file to play.'),
- Parameter('filename',
- description="""
- The name of the video file to play. This can be either a path
- to the file anywhere on your file system, or it could be just a
- name, in which case, the workload will look for it in
- ``~/.workloads_automation/dependency/video``
- *Note*: either resolution or filename should be specified, but not both!
- """),
- Parameter('force_dependency_push', kind=boolean, default=False,
- description="""
- If true, video will always be pushed to device, regardless
- of whether the file is already on the device. Default is ``False``.
- """),
- ]
-
- aliases = [
- Alias('video_720p', resolution='720p'),
- Alias('video_1080p', resolution='1080p'),
- ]
-
- @property
- def host_video_file(self):
- if not self._selected_file:
- if self.filename:
- if self.filename[0] in './' or len(self.filename) > 1 and self.filename[1] == ':':
- filepath = os.path.abspath(self.filename)
- else:
- filepath = os.path.join(self.video_directory, self.filename)
- if not os.path.isfile(filepath):
- raise WorkloadError('{} does not exist.'.format(filepath))
- self._selected_file = filepath
- else:
- files = self.video_files[self.resolution]
- if not files:
- url = DOWNLOAD_URLS[self.resolution]
- filepath = os.path.join(self.video_directory, os.path.basename(url))
- self.logger.debug('Downloading {}...'.format(filepath))
- urllib.urlretrieve(url, filepath)
- self._selected_file = filepath
- else:
- self._selected_file = files[0]
- if len(files) > 1:
- self.logger.warn('Multiple files for 720p found. Using {}.'.format(self._selected_file))
- self.logger.warn('Use \'filename\'parameter instead of \'resolution\' to specify a different file.')
- return self._selected_file
-
- def init_resources(self, context):
- self.video_directory = _d(os.path.join(settings.dependencies_directory, 'video'))
- self.video_files = defaultdict(list)
- self.enum_video_files()
- self._selected_file = None
-
- def setup(self, context):
- on_device_video_file = os.path.join(self.device.working_directory, os.path.basename(self.host_video_file))
- if self.force_dependency_push or not self.device.file_exists(on_device_video_file):
- self.logger.debug('Copying {} to device.'.format(self.host_video_file))
- self.device.push(self.host_video_file, on_device_video_file, timeout=120)
- self.device.execute('am start -n com.android.browser/.BrowserActivity about:blank')
- time.sleep(5)
- self.device.execute('am force-stop com.android.browser')
- time.sleep(5)
- self.device.clear_logcat()
- command = 'am start -W -S -n com.android.gallery3d/.app.MovieActivity -d {}'.format(on_device_video_file)
- self.device.execute(command)
-
- def run(self, context):
- time.sleep(self.play_duration)
-
- def update_result(self, context):
- self.device.execute('am force-stop com.android.gallery3d')
-
- def teardown(self, context):
- pass
-
- def validate(self):
- if (self.resolution and self.filename) and (self.resolution != self.parameters['resolution'].default):
- raise ConfigError('Ether resolution *or* filename must be specified; but not both.')
-
- def enum_video_files(self):
- for filename in os.listdir(self.video_directory):
- for resolution in self.parameters['resolution'].allowed_values:
- if resolution in filename:
- self.video_files[resolution].append(os.path.join(self.video_directory, filename))
-
diff --git a/wlauto/workloads/videostreaming/__init__.py b/wlauto/workloads/videostreaming/__init__.py
deleted file mode 100644
index 1374e67b..00000000
--- a/wlauto/workloads/videostreaming/__init__.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# pylint: disable=E1101,E0203,W0201
-import os
-
-from wlauto import AndroidUiAutoBenchmark, Parameter
-import wlauto.common.android.resources
-
-
-class Videostreaming(AndroidUiAutoBenchmark):
- name = 'videostreaming'
- description = """
- Uses the FREEdi video player to search, stream and play the specified
- video content from YouTube.
-
- """
- name = 'videostreaming'
- package = 'tw.com.freedi.youtube.player'
- activity = '.MainActivity'
-
- parameters = [
- Parameter('video_name', kind=str,
- description='Name of the video to be played.'),
- Parameter('resolution', kind=str, default='320p', allowed_values=['320p', '720p', '1080p'],
- description='Resolution of the video to be played. If video_name is set'
- 'this setting will be ignored'),
- Parameter('sampling_interval', kind=int, default=20,
- description="""
- Time interval, in seconds, after which the status of the video playback to
- be monitoreThe elapsed time of the video playback is
- monitored after after every ``sampling_interval`` seconds and
- compared against the actual time elapsed and the previous
- sampling point. If the video elapsed time is less that
- (sampling time - ``tolerance``) , then the playback is aborted as
- the video has not been playing continuously.
- """),
- Parameter('tolerance', kind=int, default=3,
- description="""
- Specifies the amount, in seconds, by which sampling time is
- allowed to deviate from elapsed video playback time. If the delta
- is greater than this value (which could happen due to poor network
- connection), workload result will be invalidated.
- """),
- Parameter('run_timeout', kind=int, default=200,
- description='The duration in second for which to play the video'),
- ]
-
- def init_resources(self, context):
- self.uiauto_params['tolerance'] = self.tolerance
- self.uiauto_params['sampling_interval'] = self.sampling_interval
- if self.video_name and self.video_name != "":
- self.uiauto_params['video_name'] = self.video_name.replace(" ", "0space0") # hack to get around uiautomator limitation
- else:
- self.uiauto_params['video_name'] = "abkk sathe {}".format(self.resolution).replace(" ", "0space0")
- self.apk_file = context.resolver.get(wlauto.common.android.resources.ApkFile(self))
- self.uiauto_file = context.resolver.get(wlauto.common.android.resources.JarFile(self))
- self.device_uiauto_file = self.device.path.join(self.device.working_directory,
- os.path.basename(self.uiauto_file))
- if not self.uiauto_package:
- self.uiauto_package = os.path.splitext(os.path.basename(self.uiauto_file))[0]
diff --git a/wlauto/workloads/videostreaming/com.arm.wlauto.uiauto.videostreaming.jar b/wlauto/workloads/videostreaming/com.arm.wlauto.uiauto.videostreaming.jar
deleted file mode 100644
index beb6790b..00000000
--- a/wlauto/workloads/videostreaming/com.arm.wlauto.uiauto.videostreaming.jar
+++ /dev/null
Binary files differ
diff --git a/wlauto/workloads/videostreaming/uiauto/build.sh b/wlauto/workloads/videostreaming/uiauto/build.sh
deleted file mode 100755
index 07e2131e..00000000
--- a/wlauto/workloads/videostreaming/uiauto/build.sh
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/bin/bash
-# Copyright 2013-2015 ARM Limited
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-
-
-class_dir=bin/classes/com/arm/wlauto/uiauto
-base_class=`python -c "import os, wlauto; print os.path.join(os.path.dirname(wlauto.__file__), 'common', 'android', 'BaseUiAutomation.class')"`
-mkdir -p $class_dir
-cp $base_class $class_dir
-
-ant build
-
-if [[ -f bin/com.arm.wlauto.uiauto.videostreaming.jar ]]; then
- cp bin/com.arm.wlauto.uiauto.videostreaming.jar ..
-fi
diff --git a/wlauto/workloads/videostreaming/uiauto/build.xml b/wlauto/workloads/videostreaming/uiauto/build.xml
deleted file mode 100644
index e897fec2..00000000
--- a/wlauto/workloads/videostreaming/uiauto/build.xml
+++ /dev/null
@@ -1,92 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<project name="com.arm.wlauto.uiauto.videostreaming" default="help">
-
- <!-- The local.properties file is created and updated by the 'android' tool.
- It contains the path to the SDK. It should *NOT* be checked into
- Version Control Systems. -->
- <property file="local.properties" />
-
- <!-- The ant.properties file can be created by you. It is only edited by the
- 'android' tool to add properties to it.
- This is the place to change some Ant specific build properties.
- Here are some properties you may want to change/update:
-
- source.dir
- The name of the source directory. Default is 'src'.
- out.dir
- The name of the output directory. Default is 'bin'.
-
- For other overridable properties, look at the beginning of the rules
- files in the SDK, at tools/ant/build.xml
-
- Properties related to the SDK location or the project target should
- be updated using the 'android' tool with the 'update' action.
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems.
-
- -->
- <property file="ant.properties" />
-
- <!-- if sdk.dir was not set from one of the property file, then
- get it from the ANDROID_HOME env var.
- This must be done before we load project.properties since
- the proguard config can use sdk.dir -->
- <property environment="env" />
- <condition property="sdk.dir" value="${env.ANDROID_HOME}">
- <isset property="env.ANDROID_HOME" />
- </condition>
-
- <!-- The project.properties file is created and updated by the 'android'
- tool, as well as ADT.
-
- This contains project specific properties such as project target, and library
- dependencies. Lower level build properties are stored in ant.properties
- (or in .classpath for Eclipse projects).
-
- This file is an integral part of the build system for your
- application and should be checked into Version Control Systems. -->
- <loadproperties srcFile="project.properties" />
-
- <!-- quick check on sdk.dir -->
- <fail
- message="sdk.dir is missing. Make sure to generate local.properties using 'android update project' or to inject it through the ANDROID_HOME environment variable."
- unless="sdk.dir"
- />
-
- <!--
- Import per project custom build rules if present at the root of the project.
- This is the place to put custom intermediary targets such as:
- -pre-build
- -pre-compile
- -post-compile (This is typically used for code obfuscation.
- Compiled code location: ${out.classes.absolute.dir}
- If this is not done in place, override ${out.dex.input.absolute.dir})
- -post-package
- -post-build
- -pre-clean
- -->
- <import file="custom_rules.xml" optional="true" />
-
- <!-- Import the actual build file.
-
- To customize existing targets, there are two options:
- - Customize only one target:
- - copy/paste the target into this file, *before* the
- <import> task.
- - customize it to your needs.
- - Customize the whole content of build.xml
- - copy/paste the content of the rules files (minus the top node)
- into this file, replacing the <import> task.
- - customize to your needs.
-
- ***********************
- ****** IMPORTANT ******
- ***********************
- In all cases you must update the value of version-tag below to read 'custom' instead of an integer,
- in order to avoid having your file be overridden by tools such as "android update project"
- -->
- <!-- version-tag: VERSION_TAG -->
- <import file="${sdk.dir}/tools/ant/uibuild.xml" />
-
-</project>
diff --git a/wlauto/workloads/videostreaming/uiauto/project.properties b/wlauto/workloads/videostreaming/uiauto/project.properties
deleted file mode 100644
index ce39f2d0..00000000
--- a/wlauto/workloads/videostreaming/uiauto/project.properties
+++ /dev/null
@@ -1,14 +0,0 @@
-# This file is automatically generated by Android Tools.
-# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
-#
-# This file must be checked in Version Control Systems.
-#
-# To customize properties used by the Ant build system edit
-# "ant.properties", and override values to adapt the script to your
-# project structure.
-#
-# To enable ProGuard to shrink and obfuscate your code, uncomment this (available properties: sdk.dir, user.home):
-#proguard.config=${sdk.dir}/tools/proguard/proguard-android.txt:proguard-project.txt
-
-# Project target.
-target=android-18
diff --git a/wlauto/workloads/videostreaming/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java b/wlauto/workloads/videostreaming/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
deleted file mode 100644
index e532b499..00000000
--- a/wlauto/workloads/videostreaming/uiauto/src/com/arm/wlauto/uiauto/UiAutomation.java
+++ /dev/null
@@ -1,155 +0,0 @@
-/* Copyright 2013-2015 ARM Limited
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
-*/
-
-
-package com.arm.wlauto.uiauto.videostreaming;
-
-import android.app.Activity;
-import java.util.Date;
-import android.os.Bundle;
-import java.util.concurrent.TimeUnit;
-
-// Import the uiautomator libraries
-import com.android.uiautomator.core.UiObject;
-import com.android.uiautomator.core.UiObjectNotFoundException;
-import com.android.uiautomator.core.UiScrollable;
-import com.android.uiautomator.core.UiSelector;
-import com.android.uiautomator.testrunner.UiAutomatorTestCase;
-
-import com.arm.wlauto.uiauto.BaseUiAutomation;
-
-public class UiAutomation extends BaseUiAutomation {
-
- public static String TAG = "videostreaming";
-
- /*function to convert time in string to sec*/
- public int computeTimeInSec(String time) {
- final int seconds = 60;
- if (!time.contains(":"))
- return -1;
-
- int totalTime = 0, mulfactor = 1;
- String [] strArr = time.split(":");
-
- for (int j = strArr.length - 1; j >= 0; j--) {
- totalTime += Integer.parseInt(strArr[j]) * (mulfactor);
- mulfactor = mulfactor * seconds;
- }
- return totalTime;
- }
-
- public void runUiAutomation() throws Exception {
- final int timeout = 5;
- int currentTime = 0, timeAfter20Sec = 0, videoTime = 0;
- long timeBeforeGetText = 0, timeAfterGetText = 0, timeForGetText = 0;
- Bundle status = new Bundle();
-
- Bundle parameters = getParams();
- if (parameters.size() <= 0)
- return;
-
- int tolerance = Integer.parseInt(parameters.getString("tolerance"));
- int samplingInterval = Integer.parseInt(parameters
- .getString("sampling_interval"));
- String videoName = parameters.getString("video_name").replace("0space0", " "); //Hack to get around uiautomator limitation
-
- UiObject search = new UiObject(new UiSelector()
- .className("android.widget.ImageButton").index(0));
- if (search.exists()) {
- search.clickAndWaitForNewWindow(timeout);
- }
-
- UiObject clickVideoTab = new UiObject(new UiSelector()
- .className("android.widget.Button").text("Video"));
- clickVideoTab.click();
-
- UiObject enterKeyword = new UiObject(new UiSelector()
- .className("android.widget.EditText")
- .text("Please input the keywords"));
- enterKeyword.clearTextField();
- enterKeyword.setText(videoName);
-
- UiSelector selector = new UiSelector();
- UiObject clickSearch = new UiObject(selector.resourceId("tw.com.freedi.youtube.player:id/startSearchBtn"));
- clickSearch.clickAndWaitForNewWindow(timeout);
-
- UiObject clickVideo = new UiObject(new UiSelector().className("android.widget.TextView").textContains(videoName));
- if (!clickVideo.waitForExists(TimeUnit.SECONDS.toMillis(10))) {
- if (!clickVideo.exists()) {
- throw new UiObjectNotFoundException("Could not find video.");
- }
- }
-
- clickVideo.clickAndWaitForNewWindow(timeout);
-
- UiObject totalVideoTime = new UiObject(new UiSelector()
- .className("android.widget.TextView").index(2));
-
- UiObject rewind = new UiObject(new UiSelector()
- .className("android.widget.RelativeLayout")
- .index(0).childSelector(new UiSelector()
- .className("android.widget.LinearLayout")
- .index(1).childSelector(new UiSelector()
- .className("android.widget.LinearLayout")
- .index(1).childSelector(new UiSelector()
- .className("android.widget.ImageButton")
- .enabled(true).index(2)))));
- rewind.click();
-
- videoTime = computeTimeInSec(totalVideoTime.getText());
-
- /**
- * Measure the video elapsed time between sampling intervals and
- * compare it against the actual time elapsed minus tolerance.If the
- * video elapsed time is less than the (actual time elapsed -
- * tolerance), raise the message.
- */
- if (videoTime > samplingInterval) {
- for (int i = 0; i < (videoTime / samplingInterval); i++) {
- UiObject videoCurrentTime = new UiObject(new UiSelector()
- .className("android.widget.TextView").index(0));
-
- sleep(samplingInterval);
-
- // Handle the time taken by the getText function
- timeBeforeGetText = new Date().getTime() / 1000;
- timeAfter20Sec = computeTimeInSec(videoCurrentTime.getText());
- timeAfterGetText = new Date().getTime() / 1000;
- timeForGetText = timeAfterGetText - timeBeforeGetText;
-
- if (timeAfter20Sec == -1) {
- getUiDevice().pressHome();
- return;
- }
-
- if ((timeAfter20Sec - (currentTime + timeForGetText)) <
- (samplingInterval - tolerance)) {
- getUiDevice().pressHome();
-
- getAutomationSupport().sendStatus(Activity.RESULT_CANCELED,
- status);
- return;
- }
- currentTime = timeAfter20Sec;
-
- }
- } else {
- sleep(videoTime);
- }
- getUiDevice().pressBack();
- getUiDevice().pressHome();
- getAutomationSupport().sendStatus(Activity.RESULT_OK, status);
- }
-}