summaryrefslogtreecommitdiff
path: root/tests/fixtures.py
diff options
context:
space:
mode:
authorCharles Oliveira <charles.oliveira@linaro.org>2020-03-11 16:48:21 -0300
committerCharles Oliveira <charles.oliveira@linaro.org>2020-03-23 19:25:31 -0300
commit381a89d7517cd7e4f70591d4ea053d72aceedb80 (patch)
tree4393364fb012cbe4184cc204095a9b02fd4f4776 /tests/fixtures.py
parent3a1aab9581b2ae714f7032d8450f355847a63c83 (diff)
tests: start local squad server to support testing
Start a local instance of SQUAD so that Squad-Client tests can run against it. Add fixtures file, which provides all necessary data to be tested in squad-client. The mechanism is pretty simple, before every call to `./manage.py test`, a fresh squad instance is started up and run that file so that all data is available thru the api
Diffstat (limited to 'tests/fixtures.py')
-rw-r--r--tests/fixtures.py38
1 files changed, 38 insertions, 0 deletions
diff --git a/tests/fixtures.py b/tests/fixtures.py
new file mode 100644
index 0000000..53da607
--- /dev/null
+++ b/tests/fixtures.py
@@ -0,0 +1,38 @@
+# This file is supposed to run in a squad instance for squad-client tests
+#
+# Some guidance to maintain this file:
+# - try to keep all values here
+# - do not delete any of the data anywhere (if deletion tests are needed, create one for the specific test)
+#
+
+from squad.core import models as m
+from squad.ci import models as mci
+
+group = m.Group.objects.create(slug='my_group')
+group2 = m.Group.objects.create(slug='my_other_group')
+
+project = group.projects.create(slug='my_project')
+
+build = project.builds.create(version='my_build')
+build2 = project.builds.create(version='my_build2')
+build3 = project.builds.create(version='my_build3')
+build4 = project.builds.create(version='my_build4')
+build5 = project.builds.create(version='my_build5')
+build6 = project.builds.create(version='my_build6')
+
+environment = project.environments.create(slug='my_env')
+suite = project.suites.create(slug='my_suite')
+
+testrun = build.test_runs.create(environment=environment)
+passed_test = testrun.tests.create(suite=suite, result=True, name='my_passed_test')
+failed_test = testrun.tests.create(suite=suite, result=False, name='my_failed_test')
+xfailed_test = testrun.tests.create(suite=suite, result=True, name='my_xfailed_test', has_known_issues=True)
+skipped_test = testrun.tests.create(suite=suite, result=None, name='my_skipped_test')
+
+backend = mci.Backend.objects.create()
+testjob = testrun.test_jobs.create(backend=backend, target=project, target_build=build)
+
+emailtemplate = m.EmailTemplate.objects.create(name='my_emailtemplate')
+suitemetadata = m.SuiteMetadata.objects.create(name='my_suitemetadata')
+metricthreshold = m.MetricThreshold.objects.create(project=project, value=42)
+report = build.delayed_reports.create()