diff --git a/.github/workflows/automatic-doc-checks.yml b/.github/workflows/automatic-doc-checks.yml new file mode 100644 index 00000000..df8db9a0 --- /dev/null +++ b/.github/workflows/automatic-doc-checks.yml @@ -0,0 +1,21 @@ +name: Automatic doc checks + +on: + push: + branches-ignore: + - 'main' + paths: + - 'docs/**' + - '.github/workflows/automatic-doc-checks.yml' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + documentation-checks: + uses: canonical/documentation-workflows/.github/workflows/documentation-checks.yaml@main + with: + working-directory: "docs" + fetch-depth: 0 diff --git a/.github/workflows/markdown-style-checks.yml b/.github/workflows/markdown-style-checks.yml new file mode 100644 index 00000000..3ed8b268 --- /dev/null +++ b/.github/workflows/markdown-style-checks.yml @@ -0,0 +1,22 @@ +name: Markdown style checks + +on: + push: + branches-ignore: + - 'main' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + markdown-lint: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: DavidAnson/markdownlint-cli2-action@v16 + with: + config: "docs/.sphinx/.markdownlint.json" diff --git a/.github/workflows/sphinx-python-dependency-build-checks.yml b/.github/workflows/sphinx-python-dependency-build-checks.yml new file mode 100644 index 00000000..31219107 --- /dev/null +++ b/.github/workflows/sphinx-python-dependency-build-checks.yml @@ -0,0 +1,52 @@ +# The purpose of this workflow file is to confirm that the Sphinx +# virtual environment can be built from source, consequently documenting +# the packages required in the build environment to do that. +# +# This is needed because some projects embeds the documentation into built +# artifacts which involves rendering the documentation on the target +# architecture. +# +# Depending on the architecture, pip may or may not have already built wheels +# available, and as such we need to make sure building wheels from source can +# succeed. +name: Sphinx python dependency build checks +on: + push: + branches-ignore: + - 'main' + paths: + - 'docs/**' + - '.github/workflows/sphinx-python-dependency-build-checks.yml' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + name: build + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install dependencies + run: | + set -ex + sudo apt -y install \ + cargo \ + libpython3-dev \ + libxml2-dev \ + libxslt1-dev \ + make \ + python3-venv \ + rustc \ + libtiff5-dev libjpeg8-dev libopenjp2-7-dev zlib1g-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.6-dev tk8.6-dev python3-tk libharfbuzz-dev libfribidi-dev libxcb1-dev + - name: Build Sphinx venv + working-directory: "docs" + run: | + set -ex + make install \ + PIPOPTS="--no-binary :all:" \ + || ( cat .sphinx/venv/pip_install.log && exit 1 ) diff --git a/.gitignore b/.gitignore index e5b91fc5..1da1ee9d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ # Visual Studio Code configurations .vscode -.idea \ No newline at end of file +.idea +build \ No newline at end of file diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..4b2ef360 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,40 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.11" + jobs: + pre_install: + - git fetch --unshallow || true + post_checkout: + # Cancel building pull requests when there aren't changed in the docs directory. + # If there are no changes (git diff exits with 0) we force the command to return with 183. + # This is a special exit code on Read the Docs that will cancel the build immediately. + # https://docs.readthedocs.io/en/stable/build-customization.html#cancel-build-based-on-a-condition + - | + if [ "$READTHEDOCS_VERSION_TYPE" = "external" ] && git diff --quiet origin/main -- docs/; + then + exit 183; + fi + +# Build documentation in the docs/ directory with Sphinx +sphinx: + builder: dirhtml + configuration: docs/conf.py + fail_on_warning: true + +# If using Sphinx, optionally build your docs in additional formats such as PDF +formats: +- pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: docs/.sphinx/requirements.txt diff --git a/.wokeignore b/.wokeignore new file mode 100644 index 00000000..c64a6037 --- /dev/null +++ b/.wokeignore @@ -0,0 +1,4 @@ +# the cheat sheets contain a link to a repository with a block word which we +# cannot avoid for now, ie +# https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html +doc-cheat-sheet* diff --git a/README.md b/README.md index 5b74ebc3..2124742c 100644 --- a/README.md +++ b/README.md @@ -1,167 +1,11 @@ -# Juju deployment +# Test Observer -Local Juju and charm deployment via microk8s and terraform. +Test Observer (TO) is a dashboard for viewing the results of tests run on different environments for a particular artefact. A user interested in testing an artefact (a deb, snap, charm or image) under different environments (particular machines or cloud setups) can use TO as means for storing, viewing and comparing results with previous runs or versions of an artefact. The last use case is particularly useful for catching regressions. Additionally, TO provides a mechanism to assign reviewers that can look at results and mark artefacts as approved or failed to gate updates. It is important to note that TO does not run the tests itself, but provides an API with which users can report the results to. -## Setup +Certification currently deploys an instance of TO that they used for reviewing Stable Release Updates (SRUs). Other teams also use this instance for their tests. You can visit the [frontend](https://test-observer.canonical.com/) and view the [API docs](https://test-observer-api.canonical.com/docs), although this currently requires Canonical VPN access. There's also a staging deployment of [frontend](https://test-observer-staging.canonical.com/) and [API](https://test-observer-api-staging.canonical.com/docs) that teams can use to test their integration. -It is recommended to install the pre-requisites on a VM rather than your host machine. To do so, first install multipass: +## Run Locally -```bash -sudo snap install multipass -``` +For development look at the [backend](/backend/README.md) and [frontend](/frontend/README.md). -Then launch the "charm-dev" VM blueprint that comes pre-setup with required tools (this will take a while): - -```bash -multipass launch --mount $HOME charm-dev -``` - -Note the home mount to access the project files in the VM. - -Once the VM initialization has been completed, you will need to enable microk8s ingress there: - -```bash -multipass exec charm-dev -- sudo microk8s enable ingress -``` - -Then install terraform: - -```bash -multipass exec charm-dev -- sudo snap install terraform --classic -``` - -And initialize it: - -```bash -multipass exec charm-dev -- terraform init -``` - -## Deploy - -You can deploy everything using terraform by running: - -```bash -multipass exec charm-dev -- TF_VAR_environment=development TF_VAR_external_ingress_hostname=local terraform apply -auto-approve -``` - -Then wait for the deployment to settle and all the statuses to become active. You can watch the statuses via: - -```bash -multipass exec charm-dev -- JUJU_MODEL=test-observer-development juju status --storage --relations --watch 5s -``` - -Look at the IPv4 addresses of your charm-dev vm through: - -```bash -multipass info charm-dev -``` - -One of these connect to the ingress enabled inside the VM. To figure out which one try the following command on each IP address until you get response: - -```bash -curl --connect-to :: http://test-observer-api.local -``` - -Once you find the IP address add the following entry to your host machine's `/etc/hosts` file: - -```bash - test-observer.local test-observer-api.local -``` - -After that you should be able to get to TO frontend on your host machine's browser through the url test-observer.local. You should also be able to access the API through test-observer-api.local. - -## Teardown - -To take everything down you can start with terraform: - -```bash -multipass exec charm-dev -- TF_VAR_environment=development TF_VAR_external_ingress_hostname=local terraform destroy --auto-approve -``` - -The above step can take a while and may even get stuck with some applications in error state. You can watch it through: - -```bash -multipass exec charm-dev -- JUJU_MODEL=test-observer-development juju status --storage --relations --watch 5s -``` - -To forcefully remove applications stuck in error state: - -```bash -multipass exec charm-dev -- JUJU_MODEL=test-observer-development juju remove-application --destroy-storage --force -``` - -Once everything is down and the juju model has been deleted you can stop the multipass VM: - -```bash -multipass stop charm-dev -``` - -## Developing the charm - -To develop and test updates to the backend and frontend charms, you would typically want to first complete the above steps to deploy a working system. Once you have done that, proceed with the following steps. - -### Build and refresh the backend charm - -You can make edits to the backend charm and refresh it in the running system on the fly with: - -```bash -cd backend/charm -charmcraft pack -juju refresh api --path ./test-observer-api_ubuntu-22.04-amd64.charm - -# to update the OCI image that runs the backend -juju attach-resource api api-image=ghcr.io/canonical/test_observer/backend:[tag or sha] -``` - -### Build and refresh the frontend charm - -Same thing with the frontend: - -```bash -cd frontend/charm -charmcraft pack - -juju refresh frontend ./test-observer-frontend_ubuntu-22.04-amd64.charm - -# to update the OCI image that runs the backend -juju attach-resource frontend frontend-image=ghcr.io/canonical/test_observer/frontend:[tag or sha] -``` - -Note that the frontend app is made aware of the backend URL to connect to using the global `window.testObserverAPIBaseURI`, which is set at runtime with some nginx config level trickery based on... - -- the `test-observer-api` charm's `hostname` config value. -- the frontend charm's `test-observer-api-scheme` config value. - -These in turn can be set using the terraform plan (`terraform/test-observer.tf` and associated variables). - -## Running tests - -To run the unit and integration tests for the frontend charms, do the following: - -```bash -cd frontend/charm -tox -e unit -tox -e integration -``` - -## Releasing the charms - -Charms are released through GitHub actions on push to main. If however you need to release charms on your branch before merging with main you could always just add your branch as a trigger to those same GitHub actions. - -## VS Code & charm libraries - -VS Code fails to find (for autocompletions and code navigation purposes) the charm libraries under `lib` in each of `backend/charm` and `frontend/charm`. There is a .vscode-settings-default.json found under each of these directories which you can copy to the `.gitignore`d path `.vscode/settings.json` to make them fly. Taking the backend charm as an example: - -```bash -mkdir -p backend/charm/.vscode -cp backend/charm/.vscode-settings-default.json backend/charm/.vscode/settings.json - -mkdir -p frontend/charm/.vscode -cp frontend/charm/.vscode-settings-default.json frontend/charm/.vscode/settings.json -``` - -Now if you use as your project the directory `backend/charm` and `frontend/charm` respectively (which you'll want to do also for them to keep their own virtual environments), VS Code should be happy. - -## Handy documentation pointers about charming - -- [Integrations (how to provide and require relations)](https://juju.is/docs/sdk/integration) +To run via Terraform, juju and charms simulating production and staging environments, look at [terraform](README.md) diff --git a/backend/schemata/openapi.json b/backend/schemata/openapi.json index 562687ad..6ad039ba 100644 --- a/backend/schemata/openapi.json +++ b/backend/schemata/openapi.json @@ -261,7 +261,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TestExecutionDTO" + "$ref": "#/components/schemas/TestExecutionResponse" } } } @@ -458,7 +458,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/TestEventDTO" + "$ref": "#/components/schemas/TestEventResponse" }, "title": "Response Get Status Update V1 Test Executions Id Status Update Get" } @@ -505,7 +505,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/ArtefactBuildEnvironmentReviewDTO" + "$ref": "#/components/schemas/ArtefactBuildEnvironmentReviewResponse" }, "title": "Response Get Environment Reviews V1 Artefacts Artefact Id Environment Reviews Get" } @@ -569,7 +569,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ArtefactBuildEnvironmentReviewDTO" + "$ref": "#/components/schemas/ArtefactBuildEnvironmentReviewResponse" } } } @@ -615,7 +615,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/ArtefactBuildDTO" + "$ref": "#/components/schemas/ArtefactBuildResponse" }, "title": "Response Get Artefact Builds V1 Artefacts Artefact Id Builds Get" } @@ -669,7 +669,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/ArtefactDTO" + "$ref": "#/components/schemas/ArtefactResponse" }, "title": "Response Get Artefacts V1 Artefacts Get" } @@ -713,7 +713,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ArtefactDTO" + "$ref": "#/components/schemas/ArtefactResponse" } } } @@ -763,7 +763,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ArtefactDTO" + "$ref": "#/components/schemas/ArtefactResponse" } } } @@ -807,7 +807,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/ArtefactVersionDTO" + "$ref": "#/components/schemas/ArtefactVersionResponse" }, "title": "Response Get Artefact Versions V1 Artefacts Artefact Id Versions Get" } @@ -1352,45 +1352,19 @@ }, "components": { "schemas": { - "ArtefactBuildDTO": { - "properties": { - "id": { - "type": "integer", - "title": "Id" - }, - "architecture": { - "type": "string", - "title": "Architecture" - }, - "revision": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "title": "Revision" - }, - "test_executions": { - "items": { - "$ref": "#/components/schemas/TestExecutionDTO" - }, - "type": "array", - "title": "Test Executions" - } - }, - "type": "object", - "required": [ - "id", - "architecture", - "revision", - "test_executions" + "ArtefactBuildEnvironmentReviewDecision": { + "type": "string", + "enum": [ + "REJECTED", + "APPROVED_INCONSISTENT_TEST", + "APPROVED_UNSTABLE_PHYSICAL_INFRA", + "APPROVED_CUSTOMER_PREREQUISITE_FAIL", + "APPROVED_FAULTY_HARDWARE", + "APPROVED_ALL_TESTS_PASS" ], - "title": "ArtefactBuildDTO" + "title": "ArtefactBuildEnvironmentReviewDecision" }, - "ArtefactBuildEnvironmentReviewDTO": { + "ArtefactBuildEnvironmentReviewResponse": { "properties": { "id": { "type": "integer", @@ -1408,10 +1382,10 @@ "title": "Review Comment" }, "environment": { - "$ref": "#/components/schemas/EnvironmentDTO" + "$ref": "#/components/schemas/EnvironmentResponse" }, "artefact_build": { - "$ref": "#/components/schemas/ArtefactBuildMinimalDTO" + "$ref": "#/components/schemas/ArtefactBuildMinimalResponse" } }, "type": "object", @@ -1422,21 +1396,39 @@ "environment", "artefact_build" ], - "title": "ArtefactBuildEnvironmentReviewDTO" + "title": "ArtefactBuildEnvironmentReviewResponse" }, - "ArtefactBuildEnvironmentReviewDecision": { - "type": "string", - "enum": [ - "REJECTED", - "APPROVED_INCONSISTENT_TEST", - "APPROVED_UNSTABLE_PHYSICAL_INFRA", - "APPROVED_CUSTOMER_PREREQUISITE_FAIL", - "APPROVED_FAULTY_HARDWARE", - "APPROVED_ALL_TESTS_PASS" + "ArtefactBuildMinimalResponse": { + "properties": { + "id": { + "type": "integer", + "title": "Id" + }, + "architecture": { + "type": "string", + "title": "Architecture" + }, + "revision": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "title": "Revision" + } + }, + "type": "object", + "required": [ + "id", + "architecture", + "revision" ], - "title": "ArtefactBuildEnvironmentReviewDecision" + "title": "ArtefactBuildMinimalResponse" }, - "ArtefactBuildMinimalDTO": { + "ArtefactBuildResponse": { "properties": { "id": { "type": "integer", @@ -1456,17 +1448,37 @@ } ], "title": "Revision" + }, + "test_executions": { + "items": { + "$ref": "#/components/schemas/TestExecutionResponse" + }, + "type": "array", + "title": "Test Executions" } }, "type": "object", "required": [ "id", "architecture", - "revision" + "revision", + "test_executions" + ], + "title": "ArtefactBuildResponse" + }, + "ArtefactPatch": { + "properties": { + "status": { + "$ref": "#/components/schemas/ArtefactStatus" + } + }, + "type": "object", + "required": [ + "status" ], - "title": "ArtefactBuildMinimalDTO" + "title": "ArtefactPatch" }, - "ArtefactDTO": { + "ArtefactResponse": { "properties": { "id": { "type": "integer", @@ -1520,13 +1532,17 @@ "type": "string", "title": "Stage" }, + "family": { + "type": "string", + "title": "Family" + }, "status": { "$ref": "#/components/schemas/ArtefactStatus" }, "assignee": { "anyOf": [ { - "$ref": "#/components/schemas/UserDTO" + "$ref": "#/components/schemas/UserResponse" }, { "type": "null" @@ -1573,6 +1589,7 @@ "sha256", "image_url", "stage", + "family", "status", "assignee", "due_date", @@ -1580,19 +1597,7 @@ "all_environment_reviews_count", "completed_environment_reviews_count" ], - "title": "ArtefactDTO" - }, - "ArtefactPatch": { - "properties": { - "status": { - "$ref": "#/components/schemas/ArtefactStatus" - } - }, - "type": "object", - "required": [ - "status" - ], - "title": "ArtefactPatch" + "title": "ArtefactResponse" }, "ArtefactStatus": { "type": "string", @@ -1603,7 +1608,7 @@ ], "title": "ArtefactStatus" }, - "ArtefactVersionDTO": { + "ArtefactVersionResponse": { "properties": { "version": { "type": "string", @@ -1619,7 +1624,7 @@ "version", "artefact_id" ], - "title": "ArtefactVersionDTO" + "title": "ArtefactVersionResponse" }, "C3TestResult": { "properties": { @@ -1733,29 +1738,6 @@ ], "title": "EndTestExecutionRequest" }, - "EnvironmentDTO": { - "properties": { - "id": { - "type": "integer", - "title": "Id" - }, - "name": { - "type": "string", - "title": "Name" - }, - "architecture": { - "type": "string", - "title": "Architecture" - } - }, - "type": "object", - "required": [ - "id", - "name", - "architecture" - ], - "title": "EnvironmentDTO" - }, "EnvironmentReportedIssueRequest": { "properties": { "environment_name": { @@ -1848,6 +1830,29 @@ ], "title": "EnvironmentReportedIssueResponse" }, + "EnvironmentResponse": { + "properties": { + "id": { + "type": "integer", + "title": "Id" + }, + "name": { + "type": "string", + "title": "Name" + }, + "architecture": { + "type": "string", + "title": "Architecture" + } + }, + "type": "object", + "required": [ + "id", + "name", + "architecture" + ], + "title": "EnvironmentResponse" + }, "EnvironmentReviewPatch": { "properties": { "review_decision": { @@ -1923,13 +1928,13 @@ "$ref": "#/components/schemas/FamilyName" }, "test_execution": { - "$ref": "#/components/schemas/TestExecutionDTO" + "$ref": "#/components/schemas/TestExecutionResponse" }, "artefact": { - "$ref": "#/components/schemas/ArtefactDTO" + "$ref": "#/components/schemas/ArtefactResponse" }, "artefact_build": { - "$ref": "#/components/schemas/ArtefactBuildMinimalDTO" + "$ref": "#/components/schemas/ArtefactBuildMinimalResponse" } }, "type": "object", @@ -2337,7 +2342,7 @@ "properties": { "events": { "items": { - "$ref": "#/components/schemas/TestEventDTO" + "$ref": "#/components/schemas/TestEventResponse" }, "type": "array", "title": "Events" @@ -2349,7 +2354,7 @@ ], "title": "StatusUpdateRequest" }, - "TestEventDTO": { + "TestEventResponse": { "properties": { "event_name": { "type": "string", @@ -2371,9 +2376,9 @@ "timestamp", "detail" ], - "title": "TestEventDTO" + "title": "TestEventResponse" }, - "TestExecutionDTO": { + "TestExecutionResponse": { "properties": { "id": { "type": "integer", @@ -2402,7 +2407,7 @@ "title": "C3 Link" }, "environment": { - "$ref": "#/components/schemas/EnvironmentDTO" + "$ref": "#/components/schemas/EnvironmentResponse" }, "status": { "$ref": "#/components/schemas/TestExecutionStatus" @@ -2427,7 +2432,7 @@ "test_plan", "is_rerun_requested" ], - "title": "TestExecutionDTO" + "title": "TestExecutionResponse" }, "TestExecutionStatus": { "type": "string", @@ -2669,7 +2674,7 @@ ], "title": "TestResultStatus" }, - "UserDTO": { + "UserResponse": { "properties": { "id": { "type": "integer", @@ -2695,7 +2700,7 @@ "launchpad_email", "name" ], - "title": "UserDTO" + "title": "UserResponse" }, "ValidationError": { "properties": { diff --git a/backend/test_observer/controllers/artefacts/artefacts.py b/backend/test_observer/controllers/artefacts/artefacts.py index 07f89ff5..3852d99a 100644 --- a/backend/test_observer/controllers/artefacts/artefacts.py +++ b/backend/test_observer/controllers/artefacts/artefacts.py @@ -34,9 +34,9 @@ is_there_a_rejected_environment, ) from .models import ( - ArtefactDTO, + ArtefactResponse, ArtefactPatch, - ArtefactVersionDTO, + ArtefactVersionResponse, ) router = APIRouter(tags=["artefacts"]) @@ -44,7 +44,7 @@ router.include_router(builds.router) -@router.get("", response_model=list[ArtefactDTO]) +@router.get("", response_model=list[ArtefactResponse]) def get_artefacts(family: FamilyName | None = None, db: Session = Depends(get_db)): """Get latest artefacts optionally by family""" artefacts = [] @@ -69,7 +69,7 @@ def get_artefacts(family: FamilyName | None = None, db: Session = Depends(get_db return artefacts -@router.get("/{artefact_id}", response_model=ArtefactDTO) +@router.get("/{artefact_id}", response_model=ArtefactResponse) def get_artefact( artefact: Artefact = Depends( ArtefactRetriever( @@ -82,7 +82,7 @@ def get_artefact( return artefact -@router.patch("/{artefact_id}", response_model=ArtefactDTO) +@router.patch("/{artefact_id}", response_model=ArtefactResponse) def patch_artefact( request: ArtefactPatch, db: Session = Depends(get_db), @@ -121,7 +121,7 @@ def _validate_artefact_status( ) -@router.get("/{artefact_id}/versions", response_model=list[ArtefactVersionDTO]) +@router.get("/{artefact_id}/versions", response_model=list[ArtefactVersionResponse]) def get_artefact_versions( artefact: Artefact = Depends(ArtefactRetriever()), db: Session = Depends(get_db) ): diff --git a/backend/test_observer/controllers/artefacts/builds.py b/backend/test_observer/controllers/artefacts/builds.py index 8cce4152..4b192471 100644 --- a/backend/test_observer/controllers/artefacts/builds.py +++ b/backend/test_observer/controllers/artefacts/builds.py @@ -26,13 +26,13 @@ ) from .models import ( - ArtefactBuildDTO, + ArtefactBuildResponse, ) router = APIRouter(tags=["artefact-builds"]) -@router.get("/{artefact_id}/builds", response_model=list[ArtefactBuildDTO]) +@router.get("/{artefact_id}/builds", response_model=list[ArtefactBuildResponse]) def get_artefact_builds( artefact: Artefact = Depends( ArtefactRetriever( diff --git a/backend/test_observer/controllers/artefacts/environment_reviews.py b/backend/test_observer/controllers/artefacts/environment_reviews.py index fd48dfa2..856b1e97 100644 --- a/backend/test_observer/controllers/artefacts/environment_reviews.py +++ b/backend/test_observer/controllers/artefacts/environment_reviews.py @@ -28,7 +28,7 @@ from test_observer.data_access.setup import get_db from .models import ( - ArtefactBuildEnvironmentReviewDTO, + ArtefactBuildEnvironmentReviewResponse, EnvironmentReviewPatch, ) @@ -37,7 +37,7 @@ @router.get( "/{artefact_id}/environment-reviews", - response_model=list[ArtefactBuildEnvironmentReviewDTO], + response_model=list[ArtefactBuildEnvironmentReviewResponse], ) def get_environment_reviews( artefact: Artefact = Depends( @@ -57,7 +57,7 @@ def get_environment_reviews( @router.patch( "/{artefact_id}/environment-reviews/{review_id}", - response_model=ArtefactBuildEnvironmentReviewDTO, + response_model=ArtefactBuildEnvironmentReviewResponse, ) def update_environment_review( artefact_id: int, diff --git a/backend/test_observer/controllers/artefacts/models.py b/backend/test_observer/controllers/artefacts/models.py index e3298c37..8ca18f65 100644 --- a/backend/test_observer/controllers/artefacts/models.py +++ b/backend/test_observer/controllers/artefacts/models.py @@ -34,7 +34,7 @@ ) -class UserDTO(BaseModel): +class UserResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int @@ -43,7 +43,7 @@ class UserDTO(BaseModel): name: str -class ArtefactDTO(BaseModel): +class ArtefactResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int @@ -59,15 +59,16 @@ class ArtefactDTO(BaseModel): sha256: str image_url: str stage: str + family: str status: ArtefactStatus - assignee: UserDTO | None + assignee: UserResponse | None due_date: date | None bug_link: str all_environment_reviews_count: int completed_environment_reviews_count: int -class EnvironmentDTO(BaseModel): +class EnvironmentResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int @@ -75,7 +76,7 @@ class EnvironmentDTO(BaseModel): architecture: str -class TestExecutionDTO(BaseModel): +class TestExecutionResponse(BaseModel): __test__ = False model_config = ConfigDict(from_attributes=True) @@ -83,7 +84,7 @@ class TestExecutionDTO(BaseModel): id: int ci_link: str | None c3_link: str | None - environment: EnvironmentDTO + environment: EnvironmentResponse status: TestExecutionStatus rerun_request: Any = Field(exclude=True) test_plan: str @@ -93,25 +94,25 @@ def is_rerun_requested(self) -> bool: return bool(self.rerun_request) -class ArtefactBuildDTO(BaseModel): +class ArtefactBuildResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int architecture: str revision: int | None - test_executions: list[TestExecutionDTO] + test_executions: list[TestExecutionResponse] class ArtefactPatch(BaseModel): status: ArtefactStatus -class ArtefactVersionDTO(BaseModel): +class ArtefactVersionResponse(BaseModel): version: str artefact_id: int = Field(validation_alias=AliasPath("id")) -class ArtefactBuildMinimalDTO(BaseModel): +class ArtefactBuildMinimalResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int @@ -119,12 +120,12 @@ class ArtefactBuildMinimalDTO(BaseModel): revision: int | None -class ArtefactBuildEnvironmentReviewDTO(BaseModel): +class ArtefactBuildEnvironmentReviewResponse(BaseModel): id: int review_decision: list[ArtefactBuildEnvironmentReviewDecision] review_comment: str - environment: EnvironmentDTO - artefact_build: ArtefactBuildMinimalDTO + environment: EnvironmentResponse + artefact_build: ArtefactBuildMinimalResponse class EnvironmentReviewPatch(BaseModel): diff --git a/backend/test_observer/controllers/test_executions/models.py b/backend/test_observer/controllers/test_executions/models.py index 878033a7..f01ea71e 100644 --- a/backend/test_observer/controllers/test_executions/models.py +++ b/backend/test_observer/controllers/test_executions/models.py @@ -30,9 +30,9 @@ from test_observer.common.constants import PREVIOUS_TEST_RESULT_COUNT from test_observer.controllers.artefacts.models import ( - ArtefactBuildMinimalDTO, - ArtefactDTO, - TestExecutionDTO, + ArtefactBuildMinimalResponse, + ArtefactResponse, + TestExecutionResponse, ) from test_observer.data_access.models_enums import ( FamilyName, @@ -175,13 +175,13 @@ class PendingRerun(BaseModel): "test_execution", "artefact_build", "artefact", "family" ) ) - test_execution: TestExecutionDTO = Field( + test_execution: TestExecutionResponse = Field( validation_alias=AliasPath("test_execution") ) - artefact: ArtefactDTO = Field( + artefact: ArtefactResponse = Field( validation_alias=AliasPath("test_execution", "artefact_build", "artefact") ) - artefact_build: ArtefactBuildMinimalDTO = Field( + artefact_build: ArtefactBuildMinimalResponse = Field( validation_alias=AliasPath("test_execution", "artefact_build") ) @@ -190,11 +190,11 @@ class DeleteReruns(BaseModel): test_execution_ids: set[int] -class TestEventDTO(BaseModel): +class TestEventResponse(BaseModel): event_name: str timestamp: datetime detail: str class StatusUpdateRequest(BaseModel): - events: list[TestEventDTO] + events: list[TestEventResponse] diff --git a/backend/test_observer/controllers/test_executions/patch.py b/backend/test_observer/controllers/test_executions/patch.py index 6bcd5800..b31f2253 100644 --- a/backend/test_observer/controllers/test_executions/patch.py +++ b/backend/test_observer/controllers/test_executions/patch.py @@ -18,7 +18,7 @@ from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.orm import Session -from test_observer.controllers.artefacts.models import TestExecutionDTO +from test_observer.controllers.artefacts.models import TestExecutionResponse from test_observer.data_access.models import TestExecution from test_observer.data_access.models_enums import TestExecutionStatus, TestResultStatus from test_observer.data_access.setup import get_db @@ -28,7 +28,7 @@ router = APIRouter() -@router.patch("/{id}", response_model=TestExecutionDTO) +@router.patch("/{id}", response_model=TestExecutionResponse) def patch_test_execution( id: int, request: TestExecutionsPatchRequest, diff --git a/backend/test_observer/controllers/test_executions/status_update.py b/backend/test_observer/controllers/test_executions/status_update.py index ff25f66d..c0b6048f 100644 --- a/backend/test_observer/controllers/test_executions/status_update.py +++ b/backend/test_observer/controllers/test_executions/status_update.py @@ -26,7 +26,7 @@ from test_observer.data_access.setup import get_db from .logic import delete_previous_test_events -from .models import StatusUpdateRequest, TestEventDTO +from .models import StatusUpdateRequest, TestEventResponse from .testflinger_event_parser import TestflingerEventParser router = APIRouter() @@ -67,7 +67,7 @@ def put_status_update( db.commit() -@router.get("/{id}/status_update", response_model=list[TestEventDTO]) +@router.get("/{id}/status_update", response_model=list[TestEventResponse]) def get_status_update(id: int, db: Session = Depends(get_db)): test_execution = db.get( TestExecution, diff --git a/backend/tests/controllers/artefacts/test_artefacts.py b/backend/tests/controllers/artefacts/test_artefacts.py index 2c7b8e6f..9209c502 100644 --- a/backend/tests/controllers/artefacts/test_artefacts.py +++ b/backend/tests/controllers/artefacts/test_artefacts.py @@ -279,6 +279,7 @@ def _assert_get_artefact_response(response: dict[str, Any], artefact: Artefact) "sha256": artefact.sha256, "image_url": artefact.image_url, "status": artefact.status, + "family": artefact.family, "assignee": None, "due_date": ( artefact.due_date.strftime("%Y-%m-%d") if artefact.due_date else None diff --git a/backend/tests/controllers/test_executions/test_reruns.py b/backend/tests/controllers/test_executions/test_reruns.py index 4f2753b2..1df1f927 100644 --- a/backend/tests/controllers/test_executions/test_reruns.py +++ b/backend/tests/controllers/test_executions/test_reruns.py @@ -103,6 +103,7 @@ def test_execution_to_pending_rerun(test_execution: TestExecution) -> dict: "completed_environment_reviews_count": ( test_execution.artefact_build.artefact.completed_environment_reviews_count ), + "family": test_execution.artefact_build.artefact.family, }, "artefact_build": { "id": test_execution.artefact_build.id, diff --git a/docs/.custom_wordlist.txt b/docs/.custom_wordlist.txt new file mode 100644 index 00000000..08a51ee4 --- /dev/null +++ b/docs/.custom_wordlist.txt @@ -0,0 +1,33 @@ +# Leave a blank line at the end of this file to support concatenation +backend +backend's +Backend +cjk +cryptographically +dvipng +fonts +freefont +frontend +frontend's +Frontend +github +GPG +GPLv +gyre +https +lang +latexmk +md +otf +plantuml +schemas +SRU +SRUs +tex +texlive +TOC +utils +VPN +WCAG +xetex +xindy diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 00000000..be332da3 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,5 @@ +_build +venv +.doctrees +warnings.txt +.wordlist.dic \ No newline at end of file diff --git a/docs/.sphinx/.markdownlint.json b/docs/.sphinx/.markdownlint.json new file mode 100644 index 00000000..536f9ea9 --- /dev/null +++ b/docs/.sphinx/.markdownlint.json @@ -0,0 +1,21 @@ +{ + "default": false, + "MD003": { + "style": "atx" + }, + "MD014": true, + "MD018": true, + "MD022": true, + "MD023": true, + "MD026": { + "punctuation": ".,;。,;" + }, + "MD031": { + "list_items": false + }, + "MD032": true, + "MD035": true, + "MD042": true, + "MD045": true, + "MD052": true +} \ No newline at end of file diff --git a/docs/.sphinx/.wordlist.txt b/docs/.sphinx/.wordlist.txt new file mode 100644 index 00000000..be5021a1 --- /dev/null +++ b/docs/.sphinx/.wordlist.txt @@ -0,0 +1,64 @@ +ACME +ACME's +addons +AGPLv +API +APIs +balancer +Charmhub +CLI +DCO +Diátaxis +Dqlite +dropdown +EBS +EKS +enablement +favicon +Furo +Git +GitHub +Grafana +IAM +installable +JSON +Juju +Kubeflow +Kubernetes +Launchpad +linter +LTS +LXD +Makefile +Makefiles +Matrix +Mattermost +MicroCeph +MicroCloud +MicroOVN +MyST +namespace +namespaces +NodePort +Numbat +observability +OEM +OLM +Permalink +pre +Quickstart +ReadMe +reST +reStructuredText +roadmap +RTD +subdirectories +subfolders +subtree +TODO +Ubuntu +UI +UUID +VM +webhook +YAML diff --git a/docs/.sphinx/_static/project_specific.css b/docs/.sphinx/_static/project_specific.css new file mode 100644 index 00000000..e69de29b diff --git a/docs/.sphinx/get_vale_conf.py b/docs/.sphinx/get_vale_conf.py new file mode 100644 index 00000000..9ee2d0b5 --- /dev/null +++ b/docs/.sphinx/get_vale_conf.py @@ -0,0 +1,53 @@ +#! /usr/bin/env python + +import requests +import os + +DIR = os.getcwd() + + +def main(): + if os.path.exists(f"{DIR}/.sphinx/styles/Canonical"): + print("Vale directory exists") + else: + os.makedirs(f"{DIR}/.sphinx/styles/Canonical") + + url = ( + "https://api.github.com/repos/canonical/praecepta/" + + "contents/styles/Canonical" + ) + r = requests.get(url) + for item in r.json(): + download = requests.get(item["download_url"]) + file = open(".sphinx/styles/Canonical/" + item["name"], "w") + file.write(download.text) + file.close() + + if os.path.exists(f"{DIR}/.sphinx/styles/config/vocabularies/Canonical"): + print("Vocab directory exists") + else: + os.makedirs(f"{DIR}/.sphinx/styles/config/vocabularies/Canonical") + + url = ( + "https://api.github.com/repos/canonical/praecepta/" + + "contents/styles/config/vocabularies/Canonical" + ) + r = requests.get(url) + for item in r.json(): + download = requests.get(item["download_url"]) + file = open( + ".sphinx/styles/config/vocabularies/Canonical/" + item["name"], + "w" + ) + file.write(download.text) + file.close() + config = requests.get( + "https://raw.githubusercontent.com/canonical/praecepta/main/vale.ini" + ) + file = open(".sphinx/vale.ini", "w") + file.write(config.text) + file.close() + + +if __name__ == "__main__": + main() diff --git a/docs/.sphinx/metrics/build_metrics.sh b/docs/.sphinx/metrics/build_metrics.sh new file mode 100755 index 00000000..bd1ff1cb --- /dev/null +++ b/docs/.sphinx/metrics/build_metrics.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# shellcheck disable=all + +links=0 +images=0 + +# count number of links +links=$(find . -type d -path './.sphinx' -prune -o -name '*.html' -exec cat {} + | grep -o " \n" \ + "------------------------------------------------------------- \n" + +.PHONY: full-help woke-install spellcheck-install pa11y-install install run html \ + epub serve clean clean-doc spelling spellcheck linkcheck woke \ + allmetrics pa11y pdf-prep-force pdf-prep pdf Makefile.sp vale bash + +full-help: $(VENVDIR) + @. $(VENV); $(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + @echo "\n\033[1;31mNOTE: This help texts shows unsupported targets!\033[0m" + @echo "Run 'make help' to see supported targets." + +# If requirements are updated, venv should be rebuilt and timestamped. +$(VENVDIR): + python3 -c "import venv" || \ + (echo "You must install python3-venv before you can build the documentation."; exit 1) + @echo "... setting up virtualenv" + python3 -m venv $(VENVDIR) + . $(VENV); pip install $(PIPOPTS) --require-virtualenv \ + --upgrade -r $(SPHINXDIR)/requirements.txt \ + --log $(VENVDIR)/pip_install.log + @test ! -f $(VENVDIR)/pip_list.txt || \ + mv $(VENVDIR)/pip_list.txt $(VENVDIR)/pip_list.txt.bak + @. $(VENV); pip list --local --format=freeze > $(VENVDIR)/pip_list.txt + @touch $(VENVDIR) + +woke-install: + @type woke >/dev/null 2>&1 || \ + { \ + echo "Installing system-wide \"woke\" snap..."; \ + confirm_sudo=$(CONFIRM_SUDO); \ + if [ "$$confirm_sudo" != "y" ] && [ "$$confirm_sudo" != "Y" ]; then \ + read -p "This requires sudo privileges. Proceed? [y/N]: " confirm_sudo; \ + fi; \ + if [ "$$confirm_sudo" = "y" ] || [ "$$confirm_sudo" = "Y" ]; then \ + sudo snap install woke; \ + else \ + echo "Installation cancelled."; \ + fi \ + } + +spellcheck-install: + @type aspell >/dev/null 2>&1 || \ + { \ + echo "Installing system-wide \"aspell\" packages..."; \ + confirm_sudo=$(CONFIRM_SUDO); \ + if [ "$$confirm_sudo" != "y" ] && [ "$$confirm_sudo" != "Y" ]; then \ + read -p "This requires sudo privileges. Proceed? [y/N]: " confirm_sudo; \ + fi; \ + if [ "$$confirm_sudo" = "y" ] || [ "$$confirm_sudo" = "Y" ]; then \ + sudo apt-get install aspell aspell-en; \ + else \ + echo "Installation cancelled."; \ + fi \ + } + +pa11y-install: + @type $(PA11Y) >/dev/null 2>&1 || { \ + echo "Installing \"pa11y\" from npm... \n"; \ + mkdir -p $(SPHINXDIR)/node_modules/ ; \ + npm install --prefix $(SPHINXDIR) pa11y; \ + } + +install: $(VENVDIR) + +run: install + . $(VENV); $(VENVDIR)/bin/sphinx-autobuild -b dirhtml "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) + +# Doesn't depend on $(BUILDDIR) to rebuild properly at every run. +html: install + . $(VENV); $(SPHINXBUILD) -W --keep-going -b dirhtml "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) + +epub: install + . $(VENV); $(SPHINXBUILD) -b epub "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) + +serve: html + cd "$(BUILDDIR)"; python3 -m http.server --bind 127.0.0.1 8000 + +clean: clean-doc + @test ! -e "$(VENVDIR)" -o -d "$(VENVDIR)" -a "$(abspath $(VENVDIR))" != "$(VENVDIR)" + rm -rf $(VENVDIR) + rm -rf $(SPHINXDIR)/node_modules/ + rm -rf $(SPHINXDIR)/styles + rm -rf $(SPHINXDIR)/vale.ini + +clean-doc: + git clean -fx "$(BUILDDIR)" + rm -rf $(SPHINXDIR)/.doctrees + +spellcheck: spellcheck-install + . $(VENV) ; python3 -m pyspelling -c $(SPHINXDIR)/spellingcheck.yaml -j $(shell nproc) + +spelling: html spellcheck + +linkcheck: install + . $(VENV) ; $(SPHINXBUILD) -b linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) || { grep --color -F "[broken]" "$(BUILDDIR)/output.txt"; exit 1; } + exit 0 + +woke: woke-install + woke $(ALLFILES) --exit-1-on-failure \ + -c https://raw.githubusercontent.com/canonical/Inclusive-naming/main/config.yml + +pa11y: pa11y-install html + find $(BUILDDIR) -name *.html -print0 | xargs -n 1 -0 $(PA11Y) + +vale: install + @. $(VENV); test -d $(SPHINXDIR)/venv/lib/python*/site-packages/vale || pip install vale + @. $(VENV); test -f $(SPHINXDIR)/vale.ini || python3 $(SPHINXDIR)/get_vale_conf.py + @. $(VENV); find $(SPHINXDIR)/venv/lib/python*/site-packages/vale/vale_bin -size 195c -exec vale --config "$(SPHINXDIR)/vale.ini" $(TARGET) > /dev/null \; + @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt + @cat $(SPHINXDIR)/.wordlist.txt $(SOURCEDIR)/.custom_wordlist.txt >> $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt + @echo "" + @echo "Running Vale against $(TARGET). To change target set TARGET= with make command" + @echo "" + @. $(VENV); vale --config "$(SPHINXDIR)/vale.ini" --glob='*.{md,txt,rst}' $(TARGET) || true + @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt && rm $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt + +pdf-prep: install + @for packageName in $(REQPDFPACKS); do (dpkg-query -W -f='$${Status}' $$packageName 2>/dev/null | \ + grep -c "ok installed" >/dev/null && echo "Package $$packageName is installed") && continue || \ + (echo "\nPDF generation requires the installation of the following packages: $(REQPDFPACKS)" && \ + echo "" && echo "Run 'sudo make pdf-prep-force' to install these packages" && echo "" && echo \ + "Please be aware these packages will be installed to your system") && exit 1 ; done + +pdf-prep-force: + apt-get update + apt-get upgrade -y + apt-get install --no-install-recommends -y $(REQPDFPACKS) \ + +pdf: pdf-prep + @. $(VENV); sphinx-build -M latexpdf "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) + @rm ./$(BUILDDIR)/latex/front-page-light.pdf || true + @rm ./$(BUILDDIR)/latex/normal-page-footer.pdf || true + @find ./$(BUILDDIR)/latex -name "*.pdf" -exec mv -t ./$(BUILDDIR) {} + + @rm -r $(BUILDDIR)/latex + @echo "\nOutput can be found in ./$(BUILDDIR)\n" + +allmetrics: html + @echo "Recording documentation metrics..." + @echo "Checking for existence of vale..." + . $(VENV) + @. $(VENV); test -d $(SPHINXDIR)/venv/lib/python*/site-packages/vale || pip install vale + @. $(VENV); test -f $(SPHINXDIR)/vale.ini || python3 $(SPHINXDIR)/get_vale_conf.py + @. $(VENV); find $(SPHINXDIR)/venv/lib/python*/site-packages/vale/vale_bin -size 195c -exec vale --config "$(SPHINXDIR)/vale.ini" $(TARGET) > /dev/null \; + @eval '$(METRICSDIR)/source_metrics.sh $(PWD)' + @eval '$(METRICSDIR)/build_metrics.sh $(PWD) $(METRICSDIR)' + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: + . $(VENV); $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..aaac6609 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,293 @@ +import datetime +import ast + +# Configuration for the Sphinx documentation builder. +# All configuration specific to your project should be done in this file. +# +# A complete list of built-in Sphinx configuration values: +# https://www.sphinx-doc.org/en/master/usage/configuration.html +# +# Our starter pack uses the custom Canonical Sphinx extension +# to keep all documentation based on it consistent and on brand: +# https://github.com/canonical/canonical-sphinx + + +####################### +# Project information # +####################### + +# Project name + +project = "Test Observer" +author = "Canonical Ltd." + + +# Sidebar documentation title; best kept reasonably short +# +# To include a version number, add it here (hardcoded or automated). +# +# To disable the title, set to an empty string. + +html_title = project + " documentation" + + +# Copyright string; shown at the bottom of the page +# +# Now, the starter pack uses CC-BY-SA as the license +# and the current year as the copyright year. +# +# If your docs need another license, specify it instead of 'CC-BY-SA'. +# +# If your documentation is a part of the code repository of your project, +# it inherits the code license instead; specify it instead of 'CC-BY-SA'. +# +# For static works, it is common to provide the first publication year. +# Another option is to provide both the first year of publication +# and the current year, especially for docs that frequently change, +# e.g. 2022–2023 (note the en-dash). +# +# A way to check a repo's creation date is to get a classic GitHub token +# with 'repo' permissions; see https://github.com/settings/tokens +# Next, use 'curl' and 'jq' to extract the date from the API's output: +# +# curl -H 'Authorization: token ' \ +# -H 'Accept: application/vnd.github.v3.raw' \ +# https://api.github.com/repos/canonical/ | jq '.created_at' + +copyright = "%s CC-BY-SA, %s" % (datetime.date.today().year, author) + + +# Documentation website URL +# +# TODO: Update with the official URL of your docs or leave empty if unsure. +# +# The Open Graph Protocol (OGP) enhances page display in a social graph +# and is used by social media platforms; see https://ogp.me/ + +ogp_site_url = "" + + +# Preview name of the documentation website +# +# To use a different name for the project in previews, update as needed. + +ogp_site_name = project + + +# Preview image URL +# +# To customise the preview image, update as needed. + +ogp_image = "https://assets.ubuntu.com/v1/253da317-image-document-ubuntudocs.svg" + + +# Product favicon; shown in bookmarks, browser tabs, etc. + +# To customise the favicon, uncomment and update as needed. + +# html_favicon = '.sphinx/_static/favicon.png' + + +# Dictionary of values to pass into the Sphinx context for all pages: +# https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-html_context + +html_context = { + # Product page URL; can be different from product docs URL + # + # Change to your product website URL, + # dropping the 'https://' prefix, e.g. 'ubuntu.com/lxd'. + # + # If there's no such website, + # remove the {{ product_page }} link from the page header template + # (usually .sphinx/_templates/header.html; also, see README.rst). + "product_page": "test-observer.canonical.com", + # Product tag image; the orange part of your logo, shown in the page header + # + # To add a tag image, uncomment and update as needed. + # 'product_tag': '_static/tag.png', + # Your Discourse instance URL + # + # Change to your Discourse instance URL or leave empty. + # + # If set, adding ':discourse: 123' to an .rst file + # will add a link to Discourse topic 123 at the bottom of the page. + "discourse": "", + # Your Mattermost channel URL + # + # Change to your Mattermost channel URL or leave empty. + "mattermost": "https://chat.canonical.com/canonical/channels/test-observer", + # Your Matrix channel URL + # + # Change to your Matrix channel URL or leave empty. + "matrix": "", + # Your documentation GitHub repository URL + # + # Change to your documentation GitHub repository URL or leave empty. + # + # If set, links for viewing the documentation source files + # and creating GitHub issues are added at the bottom of each page. + "github_url": "https://github.com/canonical/test_observer", + # Docs branch in the repo; used in links for viewing the source files + # + # To customise the branch, uncomment and update as needed. + 'github_version': 'main', + # Docs location in the repo; used in links for viewing the source files + # + # To customise the directory, uncomment and update as needed. + "github_folder": "/docs/", + # To enable or disable the Previous / Next buttons at the bottom of pages + # Valid options: none, prev, next, both + "sequential_nav": "none", + # To enable listing contributors on individual pages, set to True + "display_contributors": False, +} + +# Project slug; see https://meta.discourse.org/t/what-is-category-slug/87897 +# +# If your documentation is hosted on https://docs.ubuntu.com/, +# uncomment and update as needed. + +# slug = '' + + +# Template and asset locations + +html_static_path = [".sphinx/_static"] +templates_path = [".sphinx/_templates"] + + +############# +# Redirects # +############# + +# To set up redirects: https://documatt.gitlab.io/sphinx-reredirects/usage.html +# For example: 'explanation/old-name.html': '../how-to/prettify.html', + +# To set up redirects in the Read the Docs project dashboard: +# https://docs.readthedocs.io/en/stable/guides/redirects.html + +# If undefined, set to None, or empty, +# the sphinx_reredirects extension will be disabled. + +redirects = {} + + +########################### +# Link checker exceptions # +########################### + +# A regex list of URLs that are ignored by 'make linkcheck' +# + +linkcheck_ignore = [ + "http://127.0.0.1:8000", + # TO is currently behind a VPN so ignore link checks + "https://test-observer.canonical.com/*", + "https://test-observer-staging.canonical.com/*", + "https://test-observer-api.canonical.com/*", + "https://test-observer-api-staging.canonical.com/*", + ] + + +# A regex list of URLs where anchors are ignored by 'make linkcheck' + +linkcheck_anchors_ignore_for_url = [r"https://github\.com/.*"] + +# give linkcheck multiple tries on failure +# linkcheck_timeout = 30 +linkcheck_retries = 3 + +######################## +# Configuration extras # +######################## + +# Custom MyST syntax extensions; see +# https://myst-parser.readthedocs.io/en/latest/syntax/optional.html +# +# NOTE: By default, the following MyST extensions are enabled: +# substitution, deflist, linkify + +# myst_enable_extensions = set() + + +# Custom Sphinx extensions; see +# https://www.sphinx-doc.org/en/master/usage/extensions/index.html + +# NOTE: The canonical_sphinx extension is required for the starter pack. +# It automatically enables the following extensions: +# - custom-rst-roles +# - myst_parser +# - notfound.extension +# - related-links +# - sphinx_copybutton +# - sphinx_design +# - sphinx_reredirects +# - sphinx_tabs.tabs +# - sphinxcontrib.jquery +# - sphinxext.opengraph +# - terminal-output +# - youtube-links + +extensions = [ + "canonical_sphinx", + "sphinxcontrib.cairosvgconverter", + "sphinx_last_updated_by_git", +] + +# Excludes files or directories from processing + +exclude_patterns = [ + "doc-cheat-sheet*", +] + +# Adds custom CSS files, located under 'html_static_path' + +html_css_files = [ + "css/pdf.css", +] + + +# Adds custom JavaScript files, located under 'html_static_path' + +# html_js_files = [] + + +# Specifies a reST snippet to be appended to each .rst file + +rst_epilog = """ +.. include:: /reuse/links.txt +""" + +# Feedback button at the top; enabled by default +# +# To disable the button, uncomment this. + +# disable_feedback_button = True + + +# Your manpage URL +# +# To enable manpage links, uncomment and update as needed. +# +# NOTE: If set, adding ':manpage:' to an .rst file +# adds a link to the corresponding man section at the bottom of the page. + +# manpages_url = f'https://manpages.ubuntu.com/manpages/{codename}/en/' + \ +# f'man{section}/{page}.{section}.html' + + +# Specifies a reST snippet to be prepended to each .rst file +# This defines a :center: role that centers table cell content. +# This defines a :h2: role that styles content for use with PDF generation. + +rst_prolog = """ +.. role:: center + :class: align-center +.. role:: h2 + :class: hclass2 +""" + +# Workaround for https://github.com/canonical/canonical-sphinx/issues/34 + +if "discourse_prefix" not in html_context and "discourse" in html_context: + html_context["discourse_prefix"] = html_context["discourse"] + "/t/" diff --git a/docs/explanation/glossary.rst b/docs/explanation/glossary.rst new file mode 100644 index 00000000..c63b1453 --- /dev/null +++ b/docs/explanation/glossary.rst @@ -0,0 +1,43 @@ +Glossary +======== + +Here is a list of terms used by Test Observer (TO) and what they mean. + +Artefact +-------- + +An artefact is the thing under test, for instance a particular snap or image. An artefact has a name, a version, a family and a stage. Artefacts also have other attributes that are specific to their families (e.g. track is specific to snaps and charms). + +Family +------ + +The type of an artefact. TO currently supports snaps, debs, charms and images. + +Stage +----- + +The level of risk of this artefact. This property is dependant on the family. Specifically: + +* snaps and charms can be one of edge, beta, candidate and stable +* debs can be proposed or updates +* images can be pending or current + +Environment +----------- + +The architecture and name of what the artefact was tested on. In most cases this is some physical machine. But can be more complicated. + +Test Plan +--------- + +The name of a particular grouping of tests. This is useful if you want to logically partition the tests you have, or if multiple teams are running tests on the same artefact and environment. + +Test Execution +-------------- + +An execution of a test plan on an artefact under a particular environment. It can contain many test results. + +Test Result +----------- + +Includes the name and status (PASSED, FAILED, or SKIPPED) of a test. Additionally, it optionally can have logs and other useful bits of information. diff --git a/docs/explanation/index.rst b/docs/explanation/index.rst new file mode 100644 index 00000000..ae43c048 --- /dev/null +++ b/docs/explanation/index.rst @@ -0,0 +1,9 @@ +Explanation +=========== + +This section covers conceptual questions about Test Observer. + +.. toctree:: + :maxdepth: 1 + + glossary \ No newline at end of file diff --git a/docs/how-to/index.rst b/docs/how-to/index.rst new file mode 100644 index 00000000..9b87c371 --- /dev/null +++ b/docs/how-to/index.rst @@ -0,0 +1,9 @@ +How-to guides +============== + +These how-to guides cover key operations and processes in Test Observer. + +.. toctree:: + :maxdepth: 1 + + submit-a-test \ No newline at end of file diff --git a/docs/how-to/submit-a-test.rst b/docs/how-to/submit-a-test.rst new file mode 100644 index 00000000..be012621 --- /dev/null +++ b/docs/how-to/submit-a-test.rst @@ -0,0 +1,31 @@ +Submit a test to Test Observer +============================== + +Below are the main steps involved in submitting a test to Test Observer (TO): + +#. Inform TO that testing has started +#. Submit the results of the testing +#. Inform TO that testing has ended + +Note that if you want to execute multiple test plans on the same environment, or test on multiple environments then you will need to repeat the steps for each test execution. + +Inform TO that testing has started +---------------------------------- + +Send a ``PUT`` request to the `start_test endpoint `_ with a body following the schema appropriate to the type of artefact you are testing (the aforementioned link includes the different schemas). The body of this request includes information about the artefact, the environment and the test plan. TO will store this information and return a test execution id that you will need to submit the results to. It is worth noting that a test execution is not a single test. It is a collection of tests grouped together under a single test plan to be executed on a single environment. + +Submit the results of the testing +--------------------------------- + +When you have the results of your testing, you can submit them to TO using a ``POST`` to the `test-results `_ endpoint. This endpoint may be called multiple times in case you wanted to submit the results in batches. + +Inform TO that testing has ended +-------------------------------- + +Once testing has been completed and you have submitted the results to TO, you should inform TO that the test execution has ended. You can do so by sending a ``PATCH`` request to the `test-executions `_ endpoint with a body containing:: + + { + "status": "COMPLETED" + } + +TO will then parse the submitted results and set the status of the test execution to either ``PASSED`` (if all tests passed), ``FAILED`` (if some tests failed), or ``ENDED_PREMATURELY`` (if no tests were submitted). diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..b9bcdb9c --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,13 @@ +Introduction +============ + +Test Observer (TO) is a dashboard for viewing the results of tests run on different environments for a particular artefact. A user interested in testing an artefact (a deb, snap, charm or image) under different environments (particular machines or cloud setups) can use TO as means for storing, viewing and comparing results with previous runs or versions of an artefact. The last use case is particularly useful for catching regressions. Additionally, TO provides a mechanism to assign reviewers that can look at results and mark artefacts as approved or failed to gate updates. It is important to note that TO does not run the tests itself, but provides an API with which users can report the results to. + +Certification currently deploys an instance of TO that they used for reviewing Stable Release Updates (SRUs). Other teams also use this instance for their tests. You can visit `the frontend `_ and view `the API docs `_, although this currently requires Canonical VPN access. There's also a staging deployment of `frontend `_ and `API `_ that teams can use to test their integration. + +.. toctree:: + :hidden: + :maxdepth: 2 + + how-to/index + explanation/index diff --git a/docs/reuse/links.txt b/docs/reuse/links.txt new file mode 100644 index 00000000..e69de29b diff --git a/frontend/benchmarks/common.dart b/frontend/benchmarks/common.dart index 1d97c511..1cf25658 100644 --- a/frontend/benchmarks/common.dart +++ b/frontend/benchmarks/common.dart @@ -61,6 +61,7 @@ class ApiRepositoryMock extends Mock implements ApiRepository { name: 'artefact', version: '1', track: 'latest', + family: 'snap', store: 'ubuntu', series: '', repo: '', diff --git a/frontend/lib/models/artefact.dart b/frontend/lib/models/artefact.dart index 84302e60..615224d1 100644 --- a/frontend/lib/models/artefact.dart +++ b/frontend/lib/models/artefact.dart @@ -32,6 +32,7 @@ class Artefact with _$Artefact { required int id, required String name, required String version, + required String family, @Default('') String track, @Default('') String store, @Default('') String series, diff --git a/frontend/test/dummy_data.dart b/frontend/test/dummy_data.dart index e5797591..c20a5beb 100644 --- a/frontend/test/dummy_data.dart +++ b/frontend/test/dummy_data.dart @@ -33,6 +33,7 @@ const dummyArtefact = Artefact( id: 1, name: 'core', version: '16-2.61', + family: 'snap', track: 'latest', store: 'ubuntu', series: '', diff --git a/terraform/README.md b/terraform/README.md new file mode 100644 index 00000000..5b74ebc3 --- /dev/null +++ b/terraform/README.md @@ -0,0 +1,167 @@ +# Juju deployment + +Local Juju and charm deployment via microk8s and terraform. + +## Setup + +It is recommended to install the pre-requisites on a VM rather than your host machine. To do so, first install multipass: + +```bash +sudo snap install multipass +``` + +Then launch the "charm-dev" VM blueprint that comes pre-setup with required tools (this will take a while): + +```bash +multipass launch --mount $HOME charm-dev +``` + +Note the home mount to access the project files in the VM. + +Once the VM initialization has been completed, you will need to enable microk8s ingress there: + +```bash +multipass exec charm-dev -- sudo microk8s enable ingress +``` + +Then install terraform: + +```bash +multipass exec charm-dev -- sudo snap install terraform --classic +``` + +And initialize it: + +```bash +multipass exec charm-dev -- terraform init +``` + +## Deploy + +You can deploy everything using terraform by running: + +```bash +multipass exec charm-dev -- TF_VAR_environment=development TF_VAR_external_ingress_hostname=local terraform apply -auto-approve +``` + +Then wait for the deployment to settle and all the statuses to become active. You can watch the statuses via: + +```bash +multipass exec charm-dev -- JUJU_MODEL=test-observer-development juju status --storage --relations --watch 5s +``` + +Look at the IPv4 addresses of your charm-dev vm through: + +```bash +multipass info charm-dev +``` + +One of these connect to the ingress enabled inside the VM. To figure out which one try the following command on each IP address until you get response: + +```bash +curl --connect-to :: http://test-observer-api.local +``` + +Once you find the IP address add the following entry to your host machine's `/etc/hosts` file: + +```bash + test-observer.local test-observer-api.local +``` + +After that you should be able to get to TO frontend on your host machine's browser through the url test-observer.local. You should also be able to access the API through test-observer-api.local. + +## Teardown + +To take everything down you can start with terraform: + +```bash +multipass exec charm-dev -- TF_VAR_environment=development TF_VAR_external_ingress_hostname=local terraform destroy --auto-approve +``` + +The above step can take a while and may even get stuck with some applications in error state. You can watch it through: + +```bash +multipass exec charm-dev -- JUJU_MODEL=test-observer-development juju status --storage --relations --watch 5s +``` + +To forcefully remove applications stuck in error state: + +```bash +multipass exec charm-dev -- JUJU_MODEL=test-observer-development juju remove-application --destroy-storage --force +``` + +Once everything is down and the juju model has been deleted you can stop the multipass VM: + +```bash +multipass stop charm-dev +``` + +## Developing the charm + +To develop and test updates to the backend and frontend charms, you would typically want to first complete the above steps to deploy a working system. Once you have done that, proceed with the following steps. + +### Build and refresh the backend charm + +You can make edits to the backend charm and refresh it in the running system on the fly with: + +```bash +cd backend/charm +charmcraft pack +juju refresh api --path ./test-observer-api_ubuntu-22.04-amd64.charm + +# to update the OCI image that runs the backend +juju attach-resource api api-image=ghcr.io/canonical/test_observer/backend:[tag or sha] +``` + +### Build and refresh the frontend charm + +Same thing with the frontend: + +```bash +cd frontend/charm +charmcraft pack + +juju refresh frontend ./test-observer-frontend_ubuntu-22.04-amd64.charm + +# to update the OCI image that runs the backend +juju attach-resource frontend frontend-image=ghcr.io/canonical/test_observer/frontend:[tag or sha] +``` + +Note that the frontend app is made aware of the backend URL to connect to using the global `window.testObserverAPIBaseURI`, which is set at runtime with some nginx config level trickery based on... + +- the `test-observer-api` charm's `hostname` config value. +- the frontend charm's `test-observer-api-scheme` config value. + +These in turn can be set using the terraform plan (`terraform/test-observer.tf` and associated variables). + +## Running tests + +To run the unit and integration tests for the frontend charms, do the following: + +```bash +cd frontend/charm +tox -e unit +tox -e integration +``` + +## Releasing the charms + +Charms are released through GitHub actions on push to main. If however you need to release charms on your branch before merging with main you could always just add your branch as a trigger to those same GitHub actions. + +## VS Code & charm libraries + +VS Code fails to find (for autocompletions and code navigation purposes) the charm libraries under `lib` in each of `backend/charm` and `frontend/charm`. There is a .vscode-settings-default.json found under each of these directories which you can copy to the `.gitignore`d path `.vscode/settings.json` to make them fly. Taking the backend charm as an example: + +```bash +mkdir -p backend/charm/.vscode +cp backend/charm/.vscode-settings-default.json backend/charm/.vscode/settings.json + +mkdir -p frontend/charm/.vscode +cp frontend/charm/.vscode-settings-default.json frontend/charm/.vscode/settings.json +``` + +Now if you use as your project the directory `backend/charm` and `frontend/charm` respectively (which you'll want to do also for them to keep their own virtual environments), VS Code should be happy. + +## Handy documentation pointers about charming + +- [Integrations (how to provide and require relations)](https://juju.is/docs/sdk/integration)