From 657e3c0d71e89201787eff0956710030a3716510 Mon Sep 17 00:00:00 2001 From: Matias Piipari Date: Fri, 7 Feb 2025 10:20:09 +0200 Subject: [PATCH 1/3] Rename DTO -> Response (#255) * DTO -> Response renaming * Schema adjusted --- .gitignore | 3 +- backend/schemata/openapi.json | 226 +++++++++--------- .../controllers/artefacts/artefacts.py | 12 +- .../controllers/artefacts/builds.py | 4 +- .../artefacts/environment_reviews.py | 6 +- .../controllers/artefacts/models.py | 26 +- .../controllers/test_executions/models.py | 16 +- .../controllers/test_executions/patch.py | 4 +- .../test_executions/status_update.py | 4 +- 9 files changed, 151 insertions(+), 150 deletions(-) diff --git a/.gitignore b/.gitignore index e5b91fc5..1da1ee9d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,5 @@ # Visual Studio Code configurations .vscode -.idea \ No newline at end of file +.idea +build \ No newline at end of file diff --git a/backend/schemata/openapi.json b/backend/schemata/openapi.json index 562687ad..021fb98a 100644 --- a/backend/schemata/openapi.json +++ b/backend/schemata/openapi.json @@ -261,7 +261,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/TestExecutionDTO" + "$ref": "#/components/schemas/TestExecutionResponse" } } } @@ -458,7 +458,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/TestEventDTO" + "$ref": "#/components/schemas/TestEventResponse" }, "title": "Response Get Status Update V1 Test Executions Id Status Update Get" } @@ -505,7 +505,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/ArtefactBuildEnvironmentReviewDTO" + "$ref": "#/components/schemas/ArtefactBuildEnvironmentReviewResponse" }, "title": "Response Get Environment Reviews V1 Artefacts Artefact Id Environment Reviews Get" } @@ -569,7 +569,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ArtefactBuildEnvironmentReviewDTO" + "$ref": "#/components/schemas/ArtefactBuildEnvironmentReviewResponse" } } } @@ -615,7 +615,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/ArtefactBuildDTO" + "$ref": "#/components/schemas/ArtefactBuildResponse" }, "title": "Response Get Artefact Builds V1 Artefacts Artefact Id Builds Get" } @@ -669,7 +669,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/ArtefactDTO" + "$ref": "#/components/schemas/ArtefactResponse" }, "title": "Response Get Artefacts V1 Artefacts Get" } @@ -713,7 +713,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ArtefactDTO" + "$ref": "#/components/schemas/ArtefactResponse" } } } @@ -763,7 +763,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ArtefactDTO" + "$ref": "#/components/schemas/ArtefactResponse" } } } @@ -807,7 +807,7 @@ "schema": { "type": "array", "items": { - "$ref": "#/components/schemas/ArtefactVersionDTO" + "$ref": "#/components/schemas/ArtefactVersionResponse" }, "title": "Response Get Artefact Versions V1 Artefacts Artefact Id Versions Get" } @@ -1352,45 +1352,19 @@ }, "components": { "schemas": { - "ArtefactBuildDTO": { - "properties": { - "id": { - "type": "integer", - "title": "Id" - }, - "architecture": { - "type": "string", - "title": "Architecture" - }, - "revision": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "title": "Revision" - }, - "test_executions": { - "items": { - "$ref": "#/components/schemas/TestExecutionDTO" - }, - "type": "array", - "title": "Test Executions" - } - }, - "type": "object", - "required": [ - "id", - "architecture", - "revision", - "test_executions" + "ArtefactBuildEnvironmentReviewDecision": { + "type": "string", + "enum": [ + "REJECTED", + "APPROVED_INCONSISTENT_TEST", + "APPROVED_UNSTABLE_PHYSICAL_INFRA", + "APPROVED_CUSTOMER_PREREQUISITE_FAIL", + "APPROVED_FAULTY_HARDWARE", + "APPROVED_ALL_TESTS_PASS" ], - "title": "ArtefactBuildDTO" + "title": "ArtefactBuildEnvironmentReviewDecision" }, - "ArtefactBuildEnvironmentReviewDTO": { + "ArtefactBuildEnvironmentReviewResponse": { "properties": { "id": { "type": "integer", @@ -1408,10 +1382,10 @@ "title": "Review Comment" }, "environment": { - "$ref": "#/components/schemas/EnvironmentDTO" + "$ref": "#/components/schemas/EnvironmentResponse" }, "artefact_build": { - "$ref": "#/components/schemas/ArtefactBuildMinimalDTO" + "$ref": "#/components/schemas/ArtefactBuildMinimalResponse" } }, "type": "object", @@ -1422,21 +1396,39 @@ "environment", "artefact_build" ], - "title": "ArtefactBuildEnvironmentReviewDTO" + "title": "ArtefactBuildEnvironmentReviewResponse" }, - "ArtefactBuildEnvironmentReviewDecision": { - "type": "string", - "enum": [ - "REJECTED", - "APPROVED_INCONSISTENT_TEST", - "APPROVED_UNSTABLE_PHYSICAL_INFRA", - "APPROVED_CUSTOMER_PREREQUISITE_FAIL", - "APPROVED_FAULTY_HARDWARE", - "APPROVED_ALL_TESTS_PASS" + "ArtefactBuildMinimalResponse": { + "properties": { + "id": { + "type": "integer", + "title": "Id" + }, + "architecture": { + "type": "string", + "title": "Architecture" + }, + "revision": { + "anyOf": [ + { + "type": "integer" + }, + { + "type": "null" + } + ], + "title": "Revision" + } + }, + "type": "object", + "required": [ + "id", + "architecture", + "revision" ], - "title": "ArtefactBuildEnvironmentReviewDecision" + "title": "ArtefactBuildMinimalResponse" }, - "ArtefactBuildMinimalDTO": { + "ArtefactBuildResponse": { "properties": { "id": { "type": "integer", @@ -1456,17 +1448,37 @@ } ], "title": "Revision" + }, + "test_executions": { + "items": { + "$ref": "#/components/schemas/TestExecutionResponse" + }, + "type": "array", + "title": "Test Executions" } }, "type": "object", "required": [ "id", "architecture", - "revision" + "revision", + "test_executions" ], - "title": "ArtefactBuildMinimalDTO" + "title": "ArtefactBuildResponse" }, - "ArtefactDTO": { + "ArtefactPatch": { + "properties": { + "status": { + "$ref": "#/components/schemas/ArtefactStatus" + } + }, + "type": "object", + "required": [ + "status" + ], + "title": "ArtefactPatch" + }, + "ArtefactResponse": { "properties": { "id": { "type": "integer", @@ -1526,7 +1538,7 @@ "assignee": { "anyOf": [ { - "$ref": "#/components/schemas/UserDTO" + "$ref": "#/components/schemas/UserResponse" }, { "type": "null" @@ -1580,19 +1592,7 @@ "all_environment_reviews_count", "completed_environment_reviews_count" ], - "title": "ArtefactDTO" - }, - "ArtefactPatch": { - "properties": { - "status": { - "$ref": "#/components/schemas/ArtefactStatus" - } - }, - "type": "object", - "required": [ - "status" - ], - "title": "ArtefactPatch" + "title": "ArtefactResponse" }, "ArtefactStatus": { "type": "string", @@ -1603,7 +1603,7 @@ ], "title": "ArtefactStatus" }, - "ArtefactVersionDTO": { + "ArtefactVersionResponse": { "properties": { "version": { "type": "string", @@ -1619,7 +1619,7 @@ "version", "artefact_id" ], - "title": "ArtefactVersionDTO" + "title": "ArtefactVersionResponse" }, "C3TestResult": { "properties": { @@ -1733,29 +1733,6 @@ ], "title": "EndTestExecutionRequest" }, - "EnvironmentDTO": { - "properties": { - "id": { - "type": "integer", - "title": "Id" - }, - "name": { - "type": "string", - "title": "Name" - }, - "architecture": { - "type": "string", - "title": "Architecture" - } - }, - "type": "object", - "required": [ - "id", - "name", - "architecture" - ], - "title": "EnvironmentDTO" - }, "EnvironmentReportedIssueRequest": { "properties": { "environment_name": { @@ -1848,6 +1825,29 @@ ], "title": "EnvironmentReportedIssueResponse" }, + "EnvironmentResponse": { + "properties": { + "id": { + "type": "integer", + "title": "Id" + }, + "name": { + "type": "string", + "title": "Name" + }, + "architecture": { + "type": "string", + "title": "Architecture" + } + }, + "type": "object", + "required": [ + "id", + "name", + "architecture" + ], + "title": "EnvironmentResponse" + }, "EnvironmentReviewPatch": { "properties": { "review_decision": { @@ -1923,13 +1923,13 @@ "$ref": "#/components/schemas/FamilyName" }, "test_execution": { - "$ref": "#/components/schemas/TestExecutionDTO" + "$ref": "#/components/schemas/TestExecutionResponse" }, "artefact": { - "$ref": "#/components/schemas/ArtefactDTO" + "$ref": "#/components/schemas/ArtefactResponse" }, "artefact_build": { - "$ref": "#/components/schemas/ArtefactBuildMinimalDTO" + "$ref": "#/components/schemas/ArtefactBuildMinimalResponse" } }, "type": "object", @@ -2337,7 +2337,7 @@ "properties": { "events": { "items": { - "$ref": "#/components/schemas/TestEventDTO" + "$ref": "#/components/schemas/TestEventResponse" }, "type": "array", "title": "Events" @@ -2349,7 +2349,7 @@ ], "title": "StatusUpdateRequest" }, - "TestEventDTO": { + "TestEventResponse": { "properties": { "event_name": { "type": "string", @@ -2371,9 +2371,9 @@ "timestamp", "detail" ], - "title": "TestEventDTO" + "title": "TestEventResponse" }, - "TestExecutionDTO": { + "TestExecutionResponse": { "properties": { "id": { "type": "integer", @@ -2402,7 +2402,7 @@ "title": "C3 Link" }, "environment": { - "$ref": "#/components/schemas/EnvironmentDTO" + "$ref": "#/components/schemas/EnvironmentResponse" }, "status": { "$ref": "#/components/schemas/TestExecutionStatus" @@ -2427,7 +2427,7 @@ "test_plan", "is_rerun_requested" ], - "title": "TestExecutionDTO" + "title": "TestExecutionResponse" }, "TestExecutionStatus": { "type": "string", @@ -2669,7 +2669,7 @@ ], "title": "TestResultStatus" }, - "UserDTO": { + "UserResponse": { "properties": { "id": { "type": "integer", @@ -2695,7 +2695,7 @@ "launchpad_email", "name" ], - "title": "UserDTO" + "title": "UserResponse" }, "ValidationError": { "properties": { diff --git a/backend/test_observer/controllers/artefacts/artefacts.py b/backend/test_observer/controllers/artefacts/artefacts.py index 07f89ff5..3852d99a 100644 --- a/backend/test_observer/controllers/artefacts/artefacts.py +++ b/backend/test_observer/controllers/artefacts/artefacts.py @@ -34,9 +34,9 @@ is_there_a_rejected_environment, ) from .models import ( - ArtefactDTO, + ArtefactResponse, ArtefactPatch, - ArtefactVersionDTO, + ArtefactVersionResponse, ) router = APIRouter(tags=["artefacts"]) @@ -44,7 +44,7 @@ router.include_router(builds.router) -@router.get("", response_model=list[ArtefactDTO]) +@router.get("", response_model=list[ArtefactResponse]) def get_artefacts(family: FamilyName | None = None, db: Session = Depends(get_db)): """Get latest artefacts optionally by family""" artefacts = [] @@ -69,7 +69,7 @@ def get_artefacts(family: FamilyName | None = None, db: Session = Depends(get_db return artefacts -@router.get("/{artefact_id}", response_model=ArtefactDTO) +@router.get("/{artefact_id}", response_model=ArtefactResponse) def get_artefact( artefact: Artefact = Depends( ArtefactRetriever( @@ -82,7 +82,7 @@ def get_artefact( return artefact -@router.patch("/{artefact_id}", response_model=ArtefactDTO) +@router.patch("/{artefact_id}", response_model=ArtefactResponse) def patch_artefact( request: ArtefactPatch, db: Session = Depends(get_db), @@ -121,7 +121,7 @@ def _validate_artefact_status( ) -@router.get("/{artefact_id}/versions", response_model=list[ArtefactVersionDTO]) +@router.get("/{artefact_id}/versions", response_model=list[ArtefactVersionResponse]) def get_artefact_versions( artefact: Artefact = Depends(ArtefactRetriever()), db: Session = Depends(get_db) ): diff --git a/backend/test_observer/controllers/artefacts/builds.py b/backend/test_observer/controllers/artefacts/builds.py index 8cce4152..4b192471 100644 --- a/backend/test_observer/controllers/artefacts/builds.py +++ b/backend/test_observer/controllers/artefacts/builds.py @@ -26,13 +26,13 @@ ) from .models import ( - ArtefactBuildDTO, + ArtefactBuildResponse, ) router = APIRouter(tags=["artefact-builds"]) -@router.get("/{artefact_id}/builds", response_model=list[ArtefactBuildDTO]) +@router.get("/{artefact_id}/builds", response_model=list[ArtefactBuildResponse]) def get_artefact_builds( artefact: Artefact = Depends( ArtefactRetriever( diff --git a/backend/test_observer/controllers/artefacts/environment_reviews.py b/backend/test_observer/controllers/artefacts/environment_reviews.py index fd48dfa2..856b1e97 100644 --- a/backend/test_observer/controllers/artefacts/environment_reviews.py +++ b/backend/test_observer/controllers/artefacts/environment_reviews.py @@ -28,7 +28,7 @@ from test_observer.data_access.setup import get_db from .models import ( - ArtefactBuildEnvironmentReviewDTO, + ArtefactBuildEnvironmentReviewResponse, EnvironmentReviewPatch, ) @@ -37,7 +37,7 @@ @router.get( "/{artefact_id}/environment-reviews", - response_model=list[ArtefactBuildEnvironmentReviewDTO], + response_model=list[ArtefactBuildEnvironmentReviewResponse], ) def get_environment_reviews( artefact: Artefact = Depends( @@ -57,7 +57,7 @@ def get_environment_reviews( @router.patch( "/{artefact_id}/environment-reviews/{review_id}", - response_model=ArtefactBuildEnvironmentReviewDTO, + response_model=ArtefactBuildEnvironmentReviewResponse, ) def update_environment_review( artefact_id: int, diff --git a/backend/test_observer/controllers/artefacts/models.py b/backend/test_observer/controllers/artefacts/models.py index e3298c37..08835367 100644 --- a/backend/test_observer/controllers/artefacts/models.py +++ b/backend/test_observer/controllers/artefacts/models.py @@ -34,7 +34,7 @@ ) -class UserDTO(BaseModel): +class UserResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int @@ -43,7 +43,7 @@ class UserDTO(BaseModel): name: str -class ArtefactDTO(BaseModel): +class ArtefactResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int @@ -60,14 +60,14 @@ class ArtefactDTO(BaseModel): image_url: str stage: str status: ArtefactStatus - assignee: UserDTO | None + assignee: UserResponse | None due_date: date | None bug_link: str all_environment_reviews_count: int completed_environment_reviews_count: int -class EnvironmentDTO(BaseModel): +class EnvironmentResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int @@ -75,7 +75,7 @@ class EnvironmentDTO(BaseModel): architecture: str -class TestExecutionDTO(BaseModel): +class TestExecutionResponse(BaseModel): __test__ = False model_config = ConfigDict(from_attributes=True) @@ -83,7 +83,7 @@ class TestExecutionDTO(BaseModel): id: int ci_link: str | None c3_link: str | None - environment: EnvironmentDTO + environment: EnvironmentResponse status: TestExecutionStatus rerun_request: Any = Field(exclude=True) test_plan: str @@ -93,25 +93,25 @@ def is_rerun_requested(self) -> bool: return bool(self.rerun_request) -class ArtefactBuildDTO(BaseModel): +class ArtefactBuildResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int architecture: str revision: int | None - test_executions: list[TestExecutionDTO] + test_executions: list[TestExecutionResponse] class ArtefactPatch(BaseModel): status: ArtefactStatus -class ArtefactVersionDTO(BaseModel): +class ArtefactVersionResponse(BaseModel): version: str artefact_id: int = Field(validation_alias=AliasPath("id")) -class ArtefactBuildMinimalDTO(BaseModel): +class ArtefactBuildMinimalResponse(BaseModel): model_config = ConfigDict(from_attributes=True) id: int @@ -119,12 +119,12 @@ class ArtefactBuildMinimalDTO(BaseModel): revision: int | None -class ArtefactBuildEnvironmentReviewDTO(BaseModel): +class ArtefactBuildEnvironmentReviewResponse(BaseModel): id: int review_decision: list[ArtefactBuildEnvironmentReviewDecision] review_comment: str - environment: EnvironmentDTO - artefact_build: ArtefactBuildMinimalDTO + environment: EnvironmentResponse + artefact_build: ArtefactBuildMinimalResponse class EnvironmentReviewPatch(BaseModel): diff --git a/backend/test_observer/controllers/test_executions/models.py b/backend/test_observer/controllers/test_executions/models.py index 878033a7..f01ea71e 100644 --- a/backend/test_observer/controllers/test_executions/models.py +++ b/backend/test_observer/controllers/test_executions/models.py @@ -30,9 +30,9 @@ from test_observer.common.constants import PREVIOUS_TEST_RESULT_COUNT from test_observer.controllers.artefacts.models import ( - ArtefactBuildMinimalDTO, - ArtefactDTO, - TestExecutionDTO, + ArtefactBuildMinimalResponse, + ArtefactResponse, + TestExecutionResponse, ) from test_observer.data_access.models_enums import ( FamilyName, @@ -175,13 +175,13 @@ class PendingRerun(BaseModel): "test_execution", "artefact_build", "artefact", "family" ) ) - test_execution: TestExecutionDTO = Field( + test_execution: TestExecutionResponse = Field( validation_alias=AliasPath("test_execution") ) - artefact: ArtefactDTO = Field( + artefact: ArtefactResponse = Field( validation_alias=AliasPath("test_execution", "artefact_build", "artefact") ) - artefact_build: ArtefactBuildMinimalDTO = Field( + artefact_build: ArtefactBuildMinimalResponse = Field( validation_alias=AliasPath("test_execution", "artefact_build") ) @@ -190,11 +190,11 @@ class DeleteReruns(BaseModel): test_execution_ids: set[int] -class TestEventDTO(BaseModel): +class TestEventResponse(BaseModel): event_name: str timestamp: datetime detail: str class StatusUpdateRequest(BaseModel): - events: list[TestEventDTO] + events: list[TestEventResponse] diff --git a/backend/test_observer/controllers/test_executions/patch.py b/backend/test_observer/controllers/test_executions/patch.py index 6bcd5800..b31f2253 100644 --- a/backend/test_observer/controllers/test_executions/patch.py +++ b/backend/test_observer/controllers/test_executions/patch.py @@ -18,7 +18,7 @@ from fastapi import APIRouter, Depends, HTTPException from sqlalchemy.orm import Session -from test_observer.controllers.artefacts.models import TestExecutionDTO +from test_observer.controllers.artefacts.models import TestExecutionResponse from test_observer.data_access.models import TestExecution from test_observer.data_access.models_enums import TestExecutionStatus, TestResultStatus from test_observer.data_access.setup import get_db @@ -28,7 +28,7 @@ router = APIRouter() -@router.patch("/{id}", response_model=TestExecutionDTO) +@router.patch("/{id}", response_model=TestExecutionResponse) def patch_test_execution( id: int, request: TestExecutionsPatchRequest, diff --git a/backend/test_observer/controllers/test_executions/status_update.py b/backend/test_observer/controllers/test_executions/status_update.py index ff25f66d..c0b6048f 100644 --- a/backend/test_observer/controllers/test_executions/status_update.py +++ b/backend/test_observer/controllers/test_executions/status_update.py @@ -26,7 +26,7 @@ from test_observer.data_access.setup import get_db from .logic import delete_previous_test_events -from .models import StatusUpdateRequest, TestEventDTO +from .models import StatusUpdateRequest, TestEventResponse from .testflinger_event_parser import TestflingerEventParser router = APIRouter() @@ -67,7 +67,7 @@ def put_status_update( db.commit() -@router.get("/{id}/status_update", response_model=list[TestEventDTO]) +@router.get("/{id}/status_update", response_model=list[TestEventResponse]) def get_status_update(id: int, db: Session = Depends(get_db)): test_execution = db.get( TestExecution, From b111cabbf1ab0bbfbba561c5f4091ccbe6ef7bfe Mon Sep 17 00:00:00 2001 From: Matias Piipari Date: Fri, 7 Feb 2025 11:23:04 +0200 Subject: [PATCH 2/3] Add family to artefact response (#256) Add family to Artefact response --- backend/schemata/openapi.json | 5 +++++ backend/test_observer/controllers/artefacts/models.py | 1 + backend/tests/controllers/artefacts/test_artefacts.py | 1 + backend/tests/controllers/test_executions/test_reruns.py | 1 + frontend/benchmarks/common.dart | 1 + frontend/lib/models/artefact.dart | 1 + frontend/test/dummy_data.dart | 1 + 7 files changed, 11 insertions(+) diff --git a/backend/schemata/openapi.json b/backend/schemata/openapi.json index 021fb98a..6ad039ba 100644 --- a/backend/schemata/openapi.json +++ b/backend/schemata/openapi.json @@ -1532,6 +1532,10 @@ "type": "string", "title": "Stage" }, + "family": { + "type": "string", + "title": "Family" + }, "status": { "$ref": "#/components/schemas/ArtefactStatus" }, @@ -1585,6 +1589,7 @@ "sha256", "image_url", "stage", + "family", "status", "assignee", "due_date", diff --git a/backend/test_observer/controllers/artefacts/models.py b/backend/test_observer/controllers/artefacts/models.py index 08835367..8ca18f65 100644 --- a/backend/test_observer/controllers/artefacts/models.py +++ b/backend/test_observer/controllers/artefacts/models.py @@ -59,6 +59,7 @@ class ArtefactResponse(BaseModel): sha256: str image_url: str stage: str + family: str status: ArtefactStatus assignee: UserResponse | None due_date: date | None diff --git a/backend/tests/controllers/artefacts/test_artefacts.py b/backend/tests/controllers/artefacts/test_artefacts.py index 2c7b8e6f..9209c502 100644 --- a/backend/tests/controllers/artefacts/test_artefacts.py +++ b/backend/tests/controllers/artefacts/test_artefacts.py @@ -279,6 +279,7 @@ def _assert_get_artefact_response(response: dict[str, Any], artefact: Artefact) "sha256": artefact.sha256, "image_url": artefact.image_url, "status": artefact.status, + "family": artefact.family, "assignee": None, "due_date": ( artefact.due_date.strftime("%Y-%m-%d") if artefact.due_date else None diff --git a/backend/tests/controllers/test_executions/test_reruns.py b/backend/tests/controllers/test_executions/test_reruns.py index 4f2753b2..1df1f927 100644 --- a/backend/tests/controllers/test_executions/test_reruns.py +++ b/backend/tests/controllers/test_executions/test_reruns.py @@ -103,6 +103,7 @@ def test_execution_to_pending_rerun(test_execution: TestExecution) -> dict: "completed_environment_reviews_count": ( test_execution.artefact_build.artefact.completed_environment_reviews_count ), + "family": test_execution.artefact_build.artefact.family, }, "artefact_build": { "id": test_execution.artefact_build.id, diff --git a/frontend/benchmarks/common.dart b/frontend/benchmarks/common.dart index 1d97c511..1cf25658 100644 --- a/frontend/benchmarks/common.dart +++ b/frontend/benchmarks/common.dart @@ -61,6 +61,7 @@ class ApiRepositoryMock extends Mock implements ApiRepository { name: 'artefact', version: '1', track: 'latest', + family: 'snap', store: 'ubuntu', series: '', repo: '', diff --git a/frontend/lib/models/artefact.dart b/frontend/lib/models/artefact.dart index 84302e60..615224d1 100644 --- a/frontend/lib/models/artefact.dart +++ b/frontend/lib/models/artefact.dart @@ -32,6 +32,7 @@ class Artefact with _$Artefact { required int id, required String name, required String version, + required String family, @Default('') String track, @Default('') String store, @Default('') String series, diff --git a/frontend/test/dummy_data.dart b/frontend/test/dummy_data.dart index e5797591..c20a5beb 100644 --- a/frontend/test/dummy_data.dart +++ b/frontend/test/dummy_data.dart @@ -33,6 +33,7 @@ const dummyArtefact = Artefact( id: 1, name: 'core', version: '16-2.61', + family: 'snap', track: 'latest', store: 'ubuntu', series: '', From 869219e551f15a845fb5ae133d8119942889c729 Mon Sep 17 00:00:00 2001 From: Omar Abou Selo Date: Mon, 10 Feb 2025 12:53:42 +0300 Subject: [PATCH 3/3] Add sphinx documentation (#249) * Initial sphinx starter pack setup * Add .gitignore for doc builds * Resolve most TODO's in conf.py * Address TODO's in contributing-myst.md * Removed last mention to acme in file * Addressed TODO in contributing.rst * Update acme * Set working directory for markdownlint * Remove line length rule for markdown as it doesn't make sense * Update doc workflows to trigger on push * Add words to spelling wordlist * Add more words to spelling wordlist * Remove starter pack documentation * Add introduction * Add submit-a-test how-to * Remove unnecessary section * Add glossary * Mention TO api docs * Remove duplicate mention of API docs and VPN * Update link checks * Fix spelling issues * Move main readme into terraform directory * Fix spelling issues * Remove doc cheat sheet files * Remove unused links * Bring back links.txt file * Add comment about ignored link checks * Resolve duplicate frontend warning * Update docs/content/explanation/glossary.rst Co-authored-by: Jonathan Cave * Add mention of Test Results * Remove `content` directory * Add check to only build read the docs if there are changes to docs directory --------- Co-authored-by: Jonathan Cave --- .github/workflows/automatic-doc-checks.yml | 21 ++ .github/workflows/markdown-style-checks.yml | 22 ++ .../sphinx-python-dependency-build-checks.yml | 52 ++++ .readthedocs.yaml | 40 +++ .wokeignore | 4 + README.md | 182 +---------- docs/.custom_wordlist.txt | 33 ++ docs/.gitignore | 5 + docs/.sphinx/.markdownlint.json | 21 ++ docs/.sphinx/.wordlist.txt | 64 ++++ docs/.sphinx/_static/project_specific.css | 0 docs/.sphinx/get_vale_conf.py | 53 ++++ docs/.sphinx/metrics/build_metrics.sh | 15 + docs/.sphinx/metrics/source_metrics.sh | 66 ++++ docs/.sphinx/pa11y.json | 9 + docs/.sphinx/requirements.txt | 4 + docs/.sphinx/spellingcheck.yaml | 30 ++ docs/Makefile | 188 +++++++++++ docs/conf.py | 293 ++++++++++++++++++ docs/explanation/glossary.rst | 43 +++ docs/explanation/index.rst | 9 + docs/how-to/index.rst | 9 + docs/how-to/submit-a-test.rst | 31 ++ docs/index.rst | 13 + docs/reuse/links.txt | 0 terraform/README.md | 183 +++++++++++ 26 files changed, 1213 insertions(+), 177 deletions(-) create mode 100644 .github/workflows/automatic-doc-checks.yml create mode 100644 .github/workflows/markdown-style-checks.yml create mode 100644 .github/workflows/sphinx-python-dependency-build-checks.yml create mode 100644 .readthedocs.yaml create mode 100644 .wokeignore create mode 100644 docs/.custom_wordlist.txt create mode 100644 docs/.gitignore create mode 100644 docs/.sphinx/.markdownlint.json create mode 100644 docs/.sphinx/.wordlist.txt create mode 100644 docs/.sphinx/_static/project_specific.css create mode 100644 docs/.sphinx/get_vale_conf.py create mode 100755 docs/.sphinx/metrics/build_metrics.sh create mode 100755 docs/.sphinx/metrics/source_metrics.sh create mode 100644 docs/.sphinx/pa11y.json create mode 100644 docs/.sphinx/requirements.txt create mode 100644 docs/.sphinx/spellingcheck.yaml create mode 100644 docs/Makefile create mode 100644 docs/conf.py create mode 100644 docs/explanation/glossary.rst create mode 100644 docs/explanation/index.rst create mode 100644 docs/how-to/index.rst create mode 100644 docs/how-to/submit-a-test.rst create mode 100644 docs/index.rst create mode 100644 docs/reuse/links.txt create mode 100644 terraform/README.md diff --git a/.github/workflows/automatic-doc-checks.yml b/.github/workflows/automatic-doc-checks.yml new file mode 100644 index 00000000..df8db9a0 --- /dev/null +++ b/.github/workflows/automatic-doc-checks.yml @@ -0,0 +1,21 @@ +name: Automatic doc checks + +on: + push: + branches-ignore: + - 'main' + paths: + - 'docs/**' + - '.github/workflows/automatic-doc-checks.yml' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + documentation-checks: + uses: canonical/documentation-workflows/.github/workflows/documentation-checks.yaml@main + with: + working-directory: "docs" + fetch-depth: 0 diff --git a/.github/workflows/markdown-style-checks.yml b/.github/workflows/markdown-style-checks.yml new file mode 100644 index 00000000..3ed8b268 --- /dev/null +++ b/.github/workflows/markdown-style-checks.yml @@ -0,0 +1,22 @@ +name: Markdown style checks + +on: + push: + branches-ignore: + - 'main' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + markdown-lint: + runs-on: ubuntu-22.04 + steps: + - uses: actions/checkout@v4 + with: + fetch-depth: 0 + - uses: DavidAnson/markdownlint-cli2-action@v16 + with: + config: "docs/.sphinx/.markdownlint.json" diff --git a/.github/workflows/sphinx-python-dependency-build-checks.yml b/.github/workflows/sphinx-python-dependency-build-checks.yml new file mode 100644 index 00000000..31219107 --- /dev/null +++ b/.github/workflows/sphinx-python-dependency-build-checks.yml @@ -0,0 +1,52 @@ +# The purpose of this workflow file is to confirm that the Sphinx +# virtual environment can be built from source, consequently documenting +# the packages required in the build environment to do that. +# +# This is needed because some projects embeds the documentation into built +# artifacts which involves rendering the documentation on the target +# architecture. +# +# Depending on the architecture, pip may or may not have already built wheels +# available, and as such we need to make sure building wheels from source can +# succeed. +name: Sphinx python dependency build checks +on: + push: + branches-ignore: + - 'main' + paths: + - 'docs/**' + - '.github/workflows/sphinx-python-dependency-build-checks.yml' + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + build: + name: build + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Install dependencies + run: | + set -ex + sudo apt -y install \ + cargo \ + libpython3-dev \ + libxml2-dev \ + libxslt1-dev \ + make \ + python3-venv \ + rustc \ + libtiff5-dev libjpeg8-dev libopenjp2-7-dev zlib1g-dev libfreetype6-dev liblcms2-dev libwebp-dev tcl8.6-dev tk8.6-dev python3-tk libharfbuzz-dev libfribidi-dev libxcb1-dev + - name: Build Sphinx venv + working-directory: "docs" + run: | + set -ex + make install \ + PIPOPTS="--no-binary :all:" \ + || ( cat .sphinx/venv/pip_install.log && exit 1 ) diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 00000000..4b2ef360 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,40 @@ +# .readthedocs.yaml +# Read the Docs configuration file +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the version of Python and other tools you might need +build: + os: ubuntu-22.04 + tools: + python: "3.11" + jobs: + pre_install: + - git fetch --unshallow || true + post_checkout: + # Cancel building pull requests when there aren't changed in the docs directory. + # If there are no changes (git diff exits with 0) we force the command to return with 183. + # This is a special exit code on Read the Docs that will cancel the build immediately. + # https://docs.readthedocs.io/en/stable/build-customization.html#cancel-build-based-on-a-condition + - | + if [ "$READTHEDOCS_VERSION_TYPE" = "external" ] && git diff --quiet origin/main -- docs/; + then + exit 183; + fi + +# Build documentation in the docs/ directory with Sphinx +sphinx: + builder: dirhtml + configuration: docs/conf.py + fail_on_warning: true + +# If using Sphinx, optionally build your docs in additional formats such as PDF +formats: +- pdf + +# Optionally declare the Python requirements required to build your docs +python: + install: + - requirements: docs/.sphinx/requirements.txt diff --git a/.wokeignore b/.wokeignore new file mode 100644 index 00000000..c64a6037 --- /dev/null +++ b/.wokeignore @@ -0,0 +1,4 @@ +# the cheat sheets contain a link to a repository with a block word which we +# cannot avoid for now, ie +# https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html +doc-cheat-sheet* diff --git a/README.md b/README.md index 1bb16f51..12776a17 100644 --- a/README.md +++ b/README.md @@ -1,183 +1,11 @@ # Test Observer -Observe the status and state of certification tests for various artefacts +Test Observer (TO) is a dashboard for viewing the results of tests run on different environments for a particular artefact. A user interested in testing an artefact (a deb, snap, charm or image) under different environments (particular machines or cloud setups) can use TO as means for storing, viewing and comparing results with previous runs or versions of an artefact. The last use case is particularly useful for catching regressions. Additionally, TO provides a mechanism to assign reviewers that can look at results and mark artefacts as approved or failed to gate updates. It is important to note that TO does not run the tests itself, but provides an API with which users can report the results to. -## Prerequisites for deploying locally +Certification currently deploys an instance of TO that they used for reviewing Stable Release Updates (SRUs). Other teams also use this instance for their tests. You can visit the `frontend `_and view the `API docs `_, although this currently requires Canonical VPN access. There's also a staging deployment of `frontend `_and `API `_ that teams can use to test their integration. -- `juju` 3.1 or later (`sudo snap install juju --channel=3.1/stable`) -- `microk8s` 1.27 or later (`sudo snap install microk8s --channel=1.27-strict/stable`) + [permission setup steps after install](https://juju.is/docs/sdk/set-up-your-development-environment#heading--install-microk8s) -- `terraform` 1.4.6 or later (`sudo snap install terraform --classic`) -- `lxd` 5.19 or later (`sudo snap install lxd --channel=5.19/stable` or `sudo snap refresh lxd --channel=5.19/stable` if already installed) + `lxd init --auto` after install. -- `charmcraft` 2.3.0 or later (`sudo snap install charmcraft --channel=2.x/stable --classic`) -- optional: `jhack` for all kinds of handy Juju and charm SDK development and debugging operations (`sudo snap install jhack`) +## Run Locally -## Deploying a copy of the system with terraform / juju in microk8s +For development look at the [backend](/backend/README.md) and [frontend](/frontend/README.md). -Workaround for juju bug https://bugs.launchpad.net/juju/+bug/1988355 - -``` -mkdir -p ~/.local/share -``` - -Fist configure microk8s with the needed extensions: - -``` -sudo microk8s enable dns hostpath-storage metallb ingress# metallb setup involves choosing a free IP range for the load balancer. -``` - -Setup juju: - -```bash -juju bootstrap microk8s -juju model-config logging-config="=DEBUG" -``` - -### Deploy the system locally with Terraform - -In the `terraform` directory of your working copy, complete the one-time initialisation: - -```bash -cd terraform -terraform init -``` - -After initialization (or after making changes to the terraform configuration) you can deploy the whole system with: - -```bash -TF_VAR_environment=development TF_VAR_external_ingress_hostname="mah-domain.com" terraform apply -auto-approve -``` - -At the time of writing, this will accomplish deploying the following: - -- the backend API server -- the frontend served using nginx -- a postgresql database -- nginx as ingress -- backend connected to frontend (the backend's public facing base URI passed to the frontend app) -- backend connected to database -- backend connected to load balancer -- frontend connected to load balancer - -Terraform works by applying changes between the current state of the system and what is in the plan (the test-observer.tf configuration file). When `terraform apply` is run the 1st time, there is no state -> it will create the Juju model and all resources inside it. When it is run with a pre-existing model already in place, it will instead set / unset config values that have changed, add / remove relations, add / remove applications, etc. Basically, it makes working with Juju declarative - yay! - -The terraform juju provider is documented over here: https://registry.terraform.io/providers/juju/juju/latest/docs - -Terraform tracks its state with a .tfstate file which is created as a result of running `terraform apply` -- for production purposes this will be stored in an S3-like bucket remotely, and for local development purposes it sits in the `terraform` directory aftery you have done a `terraform apply`). - -After all is up, you can run `juju switch test-observer-development` to use the development juju model. Then `juju status --relations` should give you output to the direction of the following: - -```bash -$ juju status --relations -Model Controller Cloud/Region Version SLA Timestamp -test-observer-development juju-controller microk8s/localhost 3.1.2 unsupported 15:38:51+03:00 - -App Version Status Scale Charm Channel Rev Address Exposed Message -api active 1 test-observer-api latest/edge 15 10.152.183.182 no -db 14.7 active 1 postgresql-k8s 14/stable 73 10.152.183.172 no Primary -frontend active 1 test-observer-frontend latest/edge 8 10.152.183.79 no -ingress 25.3.0 active 1 nginx-ingress-integrator stable 59 10.152.183.103 no Ingress IP(s): 127.0.0.1, 127.0.0.1, Service IP(s): 10.152.183.72, 10.152.183.34 - -Unit Workload Agent Address Ports Message -api/0* active idle 10.1.131.142 -db/0* active idle 10.1.131.132 Primary -frontend/0* active idle 10.1.131.169 -ingress/0* active idle 10.1.131.167 Ingress IP(s): 127.0.0.1, 127.0.0.1, Service IP(s): 10.152.183.72, 10.152.183.34 - -Relation provider Requirer Interface Type Message -api:test-observer-rest-api frontend:test-observer-rest-api http regular -db:database api:database postgresql_client regular -db:database-peers db:database-peers postgresql_peers peer -db:restart db:restart rolling_op peer -ingress:nginx-route api:nginx-route nginx-route regular -ingress:nginx-route frontend:nginx-route nginx-route regular -``` - -## Add /etc/hosts entries - -To test the application, you need to create some aliases in `/etc/hosts` to the IP address that the ingress got from `metallb` (`juju status` above will find you the ingress IP). Let's assume you have a domain `mah-domain.com` that you want to expose service under, the backend and frontend will be present as subdomains `test-observer.mah-domain.com` and `test-observer-api.mah-domain.com`, respectively: - -```bash -$ cat /etc/hosts -192.168.0.202 test-observer.mah-domain.com test-observer-api.mah-domain.com -... -``` - -Note that without this step the frontend will fail to connect to api as it's trying to use `test-observer-api.mah-domain.com` - -## Developing the charm - -To develop and test updates to the backend and frontend charms, you would typically want to first complete the above steps to deploy a working system. Once you have done that, proceed with the following steps. - -### Build and refresh the backend charm - -You can make edits to the backend charm and refresh it in the running system on the fly with: - -```bash -cd backend/charm -charmcraft pack -juju refresh test-observer-api --path ./test-observer-api_ubuntu-22.04-amd64.charm - -# to update the OCI image that runs the backend -juju attach-resource test-observer-api api-image=ghcr.io/canonical/test_observer/backend:[tag or sha] -``` - -### Build and refresh the frontend charm - -Same thing with the frontend: - -```bash -cd frontend/charm -charmcraft pack - -juju refresh test-observer-frontend ./test-observer-frontend_ubuntu-22.04-amd64.charm - -# to update the OCI image that runs the backend -juju attach-resource test-observer-frontend frontend-image=ghcr.io/canonical/test_observer/frontend:[tag or sha] -``` - -Note that the frontend app is made aware of the backend URL to connect to using the global `window.testObserverAPIBaseURI`, which is set at runtime with some nginx config level trickery based on... - -- the `test-observer-api` charm's `hostname` config value. -- the frontend charm's `test-observer-api-scheme` config value. - -These in turn can be set using the terraform plan (`terraform/test-observer.tf` and associated variables). - -## Running tests - -To run the unit and integration tests for the frontend charms, do the following: - -```bash -cd frontend/charm -tox -e unit -tox -e integration -``` - -## Releasing the charms - -Charms are released through GitHub actions on push to main. If however you need to release charms on your branch before merging with main you could always just add your branch as a trigger to those same GitHub actions. - -## VS Code & charm libraries - -VS Code fails to find (for autocompletions and code navigation purposes) the charm libraries under `lib` in each of `backend/charm` and `frontend/charm`. There is a .vscode-settings-default.json found under each of these directories which you can copy to the `.gitignore`d path `.vscode/settings.json` to make them fly. Taking the backend charm as an example: - -```bash -mkdir -p backend/charm/.vscode -cp backend/charm/.vscode-settings-default.json backend/charm/.vscode/settings.json - -mkdir -p frontend/charm/.vscode -cp frontend/charm/.vscode-settings-default.json frontend/charm/.vscode/settings.json -``` - -Now if you use as your project the directory `backend/charm` and `frontend/charm` respectively (which you'll want to do also for them to keep their own virtual environments), VS Code should be happy. - -## Handy documentation pointers about charming - -- [Integrations (how to provide and require relations)](https://juju.is/docs/sdk/integration) - -### Enable the K8s Dashboard - -You need an auth token in case you want to connect to the kubernetes dashboard: - -```bash -microk8s kubectl describe secret -n kube-system microk8s-dashboard-token -``` +To run via Terraform, juju and charms simulating production and staging environments, look at [terraform](README.md) diff --git a/docs/.custom_wordlist.txt b/docs/.custom_wordlist.txt new file mode 100644 index 00000000..08a51ee4 --- /dev/null +++ b/docs/.custom_wordlist.txt @@ -0,0 +1,33 @@ +# Leave a blank line at the end of this file to support concatenation +backend +backend's +Backend +cjk +cryptographically +dvipng +fonts +freefont +frontend +frontend's +Frontend +github +GPG +GPLv +gyre +https +lang +latexmk +md +otf +plantuml +schemas +SRU +SRUs +tex +texlive +TOC +utils +VPN +WCAG +xetex +xindy diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 00000000..be332da3 --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,5 @@ +_build +venv +.doctrees +warnings.txt +.wordlist.dic \ No newline at end of file diff --git a/docs/.sphinx/.markdownlint.json b/docs/.sphinx/.markdownlint.json new file mode 100644 index 00000000..536f9ea9 --- /dev/null +++ b/docs/.sphinx/.markdownlint.json @@ -0,0 +1,21 @@ +{ + "default": false, + "MD003": { + "style": "atx" + }, + "MD014": true, + "MD018": true, + "MD022": true, + "MD023": true, + "MD026": { + "punctuation": ".,;。,;" + }, + "MD031": { + "list_items": false + }, + "MD032": true, + "MD035": true, + "MD042": true, + "MD045": true, + "MD052": true +} \ No newline at end of file diff --git a/docs/.sphinx/.wordlist.txt b/docs/.sphinx/.wordlist.txt new file mode 100644 index 00000000..be5021a1 --- /dev/null +++ b/docs/.sphinx/.wordlist.txt @@ -0,0 +1,64 @@ +ACME +ACME's +addons +AGPLv +API +APIs +balancer +Charmhub +CLI +DCO +Diátaxis +Dqlite +dropdown +EBS +EKS +enablement +favicon +Furo +Git +GitHub +Grafana +IAM +installable +JSON +Juju +Kubeflow +Kubernetes +Launchpad +linter +LTS +LXD +Makefile +Makefiles +Matrix +Mattermost +MicroCeph +MicroCloud +MicroOVN +MyST +namespace +namespaces +NodePort +Numbat +observability +OEM +OLM +Permalink +pre +Quickstart +ReadMe +reST +reStructuredText +roadmap +RTD +subdirectories +subfolders +subtree +TODO +Ubuntu +UI +UUID +VM +webhook +YAML diff --git a/docs/.sphinx/_static/project_specific.css b/docs/.sphinx/_static/project_specific.css new file mode 100644 index 00000000..e69de29b diff --git a/docs/.sphinx/get_vale_conf.py b/docs/.sphinx/get_vale_conf.py new file mode 100644 index 00000000..9ee2d0b5 --- /dev/null +++ b/docs/.sphinx/get_vale_conf.py @@ -0,0 +1,53 @@ +#! /usr/bin/env python + +import requests +import os + +DIR = os.getcwd() + + +def main(): + if os.path.exists(f"{DIR}/.sphinx/styles/Canonical"): + print("Vale directory exists") + else: + os.makedirs(f"{DIR}/.sphinx/styles/Canonical") + + url = ( + "https://api.github.com/repos/canonical/praecepta/" + + "contents/styles/Canonical" + ) + r = requests.get(url) + for item in r.json(): + download = requests.get(item["download_url"]) + file = open(".sphinx/styles/Canonical/" + item["name"], "w") + file.write(download.text) + file.close() + + if os.path.exists(f"{DIR}/.sphinx/styles/config/vocabularies/Canonical"): + print("Vocab directory exists") + else: + os.makedirs(f"{DIR}/.sphinx/styles/config/vocabularies/Canonical") + + url = ( + "https://api.github.com/repos/canonical/praecepta/" + + "contents/styles/config/vocabularies/Canonical" + ) + r = requests.get(url) + for item in r.json(): + download = requests.get(item["download_url"]) + file = open( + ".sphinx/styles/config/vocabularies/Canonical/" + item["name"], + "w" + ) + file.write(download.text) + file.close() + config = requests.get( + "https://raw.githubusercontent.com/canonical/praecepta/main/vale.ini" + ) + file = open(".sphinx/vale.ini", "w") + file.write(config.text) + file.close() + + +if __name__ == "__main__": + main() diff --git a/docs/.sphinx/metrics/build_metrics.sh b/docs/.sphinx/metrics/build_metrics.sh new file mode 100755 index 00000000..bd1ff1cb --- /dev/null +++ b/docs/.sphinx/metrics/build_metrics.sh @@ -0,0 +1,15 @@ +#!/bin/bash +# shellcheck disable=all + +links=0 +images=0 + +# count number of links +links=$(find . -type d -path './.sphinx' -prune -o -name '*.html' -exec cat {} + | grep -o " \n" \ + "------------------------------------------------------------- \n" + +.PHONY: full-help woke-install spellcheck-install pa11y-install install run html \ + epub serve clean clean-doc spelling spellcheck linkcheck woke \ + allmetrics pa11y pdf-prep-force pdf-prep pdf Makefile.sp vale bash + +full-help: $(VENVDIR) + @. $(VENV); $(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + @echo "\n\033[1;31mNOTE: This help texts shows unsupported targets!\033[0m" + @echo "Run 'make help' to see supported targets." + +# If requirements are updated, venv should be rebuilt and timestamped. +$(VENVDIR): + python3 -c "import venv" || \ + (echo "You must install python3-venv before you can build the documentation."; exit 1) + @echo "... setting up virtualenv" + python3 -m venv $(VENVDIR) + . $(VENV); pip install $(PIPOPTS) --require-virtualenv \ + --upgrade -r $(SPHINXDIR)/requirements.txt \ + --log $(VENVDIR)/pip_install.log + @test ! -f $(VENVDIR)/pip_list.txt || \ + mv $(VENVDIR)/pip_list.txt $(VENVDIR)/pip_list.txt.bak + @. $(VENV); pip list --local --format=freeze > $(VENVDIR)/pip_list.txt + @touch $(VENVDIR) + +woke-install: + @type woke >/dev/null 2>&1 || \ + { \ + echo "Installing system-wide \"woke\" snap..."; \ + confirm_sudo=$(CONFIRM_SUDO); \ + if [ "$$confirm_sudo" != "y" ] && [ "$$confirm_sudo" != "Y" ]; then \ + read -p "This requires sudo privileges. Proceed? [y/N]: " confirm_sudo; \ + fi; \ + if [ "$$confirm_sudo" = "y" ] || [ "$$confirm_sudo" = "Y" ]; then \ + sudo snap install woke; \ + else \ + echo "Installation cancelled."; \ + fi \ + } + +spellcheck-install: + @type aspell >/dev/null 2>&1 || \ + { \ + echo "Installing system-wide \"aspell\" packages..."; \ + confirm_sudo=$(CONFIRM_SUDO); \ + if [ "$$confirm_sudo" != "y" ] && [ "$$confirm_sudo" != "Y" ]; then \ + read -p "This requires sudo privileges. Proceed? [y/N]: " confirm_sudo; \ + fi; \ + if [ "$$confirm_sudo" = "y" ] || [ "$$confirm_sudo" = "Y" ]; then \ + sudo apt-get install aspell aspell-en; \ + else \ + echo "Installation cancelled."; \ + fi \ + } + +pa11y-install: + @type $(PA11Y) >/dev/null 2>&1 || { \ + echo "Installing \"pa11y\" from npm... \n"; \ + mkdir -p $(SPHINXDIR)/node_modules/ ; \ + npm install --prefix $(SPHINXDIR) pa11y; \ + } + +install: $(VENVDIR) + +run: install + . $(VENV); $(VENVDIR)/bin/sphinx-autobuild -b dirhtml "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) + +# Doesn't depend on $(BUILDDIR) to rebuild properly at every run. +html: install + . $(VENV); $(SPHINXBUILD) -W --keep-going -b dirhtml "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) + +epub: install + . $(VENV); $(SPHINXBUILD) -b epub "$(SOURCEDIR)" "$(BUILDDIR)" -w $(SPHINXDIR)/warnings.txt $(SPHINXOPTS) + +serve: html + cd "$(BUILDDIR)"; python3 -m http.server --bind 127.0.0.1 8000 + +clean: clean-doc + @test ! -e "$(VENVDIR)" -o -d "$(VENVDIR)" -a "$(abspath $(VENVDIR))" != "$(VENVDIR)" + rm -rf $(VENVDIR) + rm -rf $(SPHINXDIR)/node_modules/ + rm -rf $(SPHINXDIR)/styles + rm -rf $(SPHINXDIR)/vale.ini + +clean-doc: + git clean -fx "$(BUILDDIR)" + rm -rf $(SPHINXDIR)/.doctrees + +spellcheck: spellcheck-install + . $(VENV) ; python3 -m pyspelling -c $(SPHINXDIR)/spellingcheck.yaml -j $(shell nproc) + +spelling: html spellcheck + +linkcheck: install + . $(VENV) ; $(SPHINXBUILD) -b linkcheck "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) || { grep --color -F "[broken]" "$(BUILDDIR)/output.txt"; exit 1; } + exit 0 + +woke: woke-install + woke $(ALLFILES) --exit-1-on-failure \ + -c https://raw.githubusercontent.com/canonical/Inclusive-naming/main/config.yml + +pa11y: pa11y-install html + find $(BUILDDIR) -name *.html -print0 | xargs -n 1 -0 $(PA11Y) + +vale: install + @. $(VENV); test -d $(SPHINXDIR)/venv/lib/python*/site-packages/vale || pip install vale + @. $(VENV); test -f $(SPHINXDIR)/vale.ini || python3 $(SPHINXDIR)/get_vale_conf.py + @. $(VENV); find $(SPHINXDIR)/venv/lib/python*/site-packages/vale/vale_bin -size 195c -exec vale --config "$(SPHINXDIR)/vale.ini" $(TARGET) > /dev/null \; + @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt + @cat $(SPHINXDIR)/.wordlist.txt $(SOURCEDIR)/.custom_wordlist.txt >> $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt + @echo "" + @echo "Running Vale against $(TARGET). To change target set TARGET= with make command" + @echo "" + @. $(VENV); vale --config "$(SPHINXDIR)/vale.ini" --glob='*.{md,txt,rst}' $(TARGET) || true + @cat $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt > $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept.txt && rm $(SPHINXDIR)/styles/config/vocabularies/Canonical/accept_backup.txt + +pdf-prep: install + @for packageName in $(REQPDFPACKS); do (dpkg-query -W -f='$${Status}' $$packageName 2>/dev/null | \ + grep -c "ok installed" >/dev/null && echo "Package $$packageName is installed") && continue || \ + (echo "\nPDF generation requires the installation of the following packages: $(REQPDFPACKS)" && \ + echo "" && echo "Run 'sudo make pdf-prep-force' to install these packages" && echo "" && echo \ + "Please be aware these packages will be installed to your system") && exit 1 ; done + +pdf-prep-force: + apt-get update + apt-get upgrade -y + apt-get install --no-install-recommends -y $(REQPDFPACKS) \ + +pdf: pdf-prep + @. $(VENV); sphinx-build -M latexpdf "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) + @rm ./$(BUILDDIR)/latex/front-page-light.pdf || true + @rm ./$(BUILDDIR)/latex/normal-page-footer.pdf || true + @find ./$(BUILDDIR)/latex -name "*.pdf" -exec mv -t ./$(BUILDDIR) {} + + @rm -r $(BUILDDIR)/latex + @echo "\nOutput can be found in ./$(BUILDDIR)\n" + +allmetrics: html + @echo "Recording documentation metrics..." + @echo "Checking for existence of vale..." + . $(VENV) + @. $(VENV); test -d $(SPHINXDIR)/venv/lib/python*/site-packages/vale || pip install vale + @. $(VENV); test -f $(SPHINXDIR)/vale.ini || python3 $(SPHINXDIR)/get_vale_conf.py + @. $(VENV); find $(SPHINXDIR)/venv/lib/python*/site-packages/vale/vale_bin -size 195c -exec vale --config "$(SPHINXDIR)/vale.ini" $(TARGET) > /dev/null \; + @eval '$(METRICSDIR)/source_metrics.sh $(PWD)' + @eval '$(METRICSDIR)/build_metrics.sh $(PWD) $(METRICSDIR)' + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: + . $(VENV); $(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/conf.py b/docs/conf.py new file mode 100644 index 00000000..aaac6609 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,293 @@ +import datetime +import ast + +# Configuration for the Sphinx documentation builder. +# All configuration specific to your project should be done in this file. +# +# A complete list of built-in Sphinx configuration values: +# https://www.sphinx-doc.org/en/master/usage/configuration.html +# +# Our starter pack uses the custom Canonical Sphinx extension +# to keep all documentation based on it consistent and on brand: +# https://github.com/canonical/canonical-sphinx + + +####################### +# Project information # +####################### + +# Project name + +project = "Test Observer" +author = "Canonical Ltd." + + +# Sidebar documentation title; best kept reasonably short +# +# To include a version number, add it here (hardcoded or automated). +# +# To disable the title, set to an empty string. + +html_title = project + " documentation" + + +# Copyright string; shown at the bottom of the page +# +# Now, the starter pack uses CC-BY-SA as the license +# and the current year as the copyright year. +# +# If your docs need another license, specify it instead of 'CC-BY-SA'. +# +# If your documentation is a part of the code repository of your project, +# it inherits the code license instead; specify it instead of 'CC-BY-SA'. +# +# For static works, it is common to provide the first publication year. +# Another option is to provide both the first year of publication +# and the current year, especially for docs that frequently change, +# e.g. 2022–2023 (note the en-dash). +# +# A way to check a repo's creation date is to get a classic GitHub token +# with 'repo' permissions; see https://github.com/settings/tokens +# Next, use 'curl' and 'jq' to extract the date from the API's output: +# +# curl -H 'Authorization: token ' \ +# -H 'Accept: application/vnd.github.v3.raw' \ +# https://api.github.com/repos/canonical/ | jq '.created_at' + +copyright = "%s CC-BY-SA, %s" % (datetime.date.today().year, author) + + +# Documentation website URL +# +# TODO: Update with the official URL of your docs or leave empty if unsure. +# +# The Open Graph Protocol (OGP) enhances page display in a social graph +# and is used by social media platforms; see https://ogp.me/ + +ogp_site_url = "" + + +# Preview name of the documentation website +# +# To use a different name for the project in previews, update as needed. + +ogp_site_name = project + + +# Preview image URL +# +# To customise the preview image, update as needed. + +ogp_image = "https://assets.ubuntu.com/v1/253da317-image-document-ubuntudocs.svg" + + +# Product favicon; shown in bookmarks, browser tabs, etc. + +# To customise the favicon, uncomment and update as needed. + +# html_favicon = '.sphinx/_static/favicon.png' + + +# Dictionary of values to pass into the Sphinx context for all pages: +# https://www.sphinx-doc.org/en/master/usage/configuration.html#confval-html_context + +html_context = { + # Product page URL; can be different from product docs URL + # + # Change to your product website URL, + # dropping the 'https://' prefix, e.g. 'ubuntu.com/lxd'. + # + # If there's no such website, + # remove the {{ product_page }} link from the page header template + # (usually .sphinx/_templates/header.html; also, see README.rst). + "product_page": "test-observer.canonical.com", + # Product tag image; the orange part of your logo, shown in the page header + # + # To add a tag image, uncomment and update as needed. + # 'product_tag': '_static/tag.png', + # Your Discourse instance URL + # + # Change to your Discourse instance URL or leave empty. + # + # If set, adding ':discourse: 123' to an .rst file + # will add a link to Discourse topic 123 at the bottom of the page. + "discourse": "", + # Your Mattermost channel URL + # + # Change to your Mattermost channel URL or leave empty. + "mattermost": "https://chat.canonical.com/canonical/channels/test-observer", + # Your Matrix channel URL + # + # Change to your Matrix channel URL or leave empty. + "matrix": "", + # Your documentation GitHub repository URL + # + # Change to your documentation GitHub repository URL or leave empty. + # + # If set, links for viewing the documentation source files + # and creating GitHub issues are added at the bottom of each page. + "github_url": "https://github.com/canonical/test_observer", + # Docs branch in the repo; used in links for viewing the source files + # + # To customise the branch, uncomment and update as needed. + 'github_version': 'main', + # Docs location in the repo; used in links for viewing the source files + # + # To customise the directory, uncomment and update as needed. + "github_folder": "/docs/", + # To enable or disable the Previous / Next buttons at the bottom of pages + # Valid options: none, prev, next, both + "sequential_nav": "none", + # To enable listing contributors on individual pages, set to True + "display_contributors": False, +} + +# Project slug; see https://meta.discourse.org/t/what-is-category-slug/87897 +# +# If your documentation is hosted on https://docs.ubuntu.com/, +# uncomment and update as needed. + +# slug = '' + + +# Template and asset locations + +html_static_path = [".sphinx/_static"] +templates_path = [".sphinx/_templates"] + + +############# +# Redirects # +############# + +# To set up redirects: https://documatt.gitlab.io/sphinx-reredirects/usage.html +# For example: 'explanation/old-name.html': '../how-to/prettify.html', + +# To set up redirects in the Read the Docs project dashboard: +# https://docs.readthedocs.io/en/stable/guides/redirects.html + +# If undefined, set to None, or empty, +# the sphinx_reredirects extension will be disabled. + +redirects = {} + + +########################### +# Link checker exceptions # +########################### + +# A regex list of URLs that are ignored by 'make linkcheck' +# + +linkcheck_ignore = [ + "http://127.0.0.1:8000", + # TO is currently behind a VPN so ignore link checks + "https://test-observer.canonical.com/*", + "https://test-observer-staging.canonical.com/*", + "https://test-observer-api.canonical.com/*", + "https://test-observer-api-staging.canonical.com/*", + ] + + +# A regex list of URLs where anchors are ignored by 'make linkcheck' + +linkcheck_anchors_ignore_for_url = [r"https://github\.com/.*"] + +# give linkcheck multiple tries on failure +# linkcheck_timeout = 30 +linkcheck_retries = 3 + +######################## +# Configuration extras # +######################## + +# Custom MyST syntax extensions; see +# https://myst-parser.readthedocs.io/en/latest/syntax/optional.html +# +# NOTE: By default, the following MyST extensions are enabled: +# substitution, deflist, linkify + +# myst_enable_extensions = set() + + +# Custom Sphinx extensions; see +# https://www.sphinx-doc.org/en/master/usage/extensions/index.html + +# NOTE: The canonical_sphinx extension is required for the starter pack. +# It automatically enables the following extensions: +# - custom-rst-roles +# - myst_parser +# - notfound.extension +# - related-links +# - sphinx_copybutton +# - sphinx_design +# - sphinx_reredirects +# - sphinx_tabs.tabs +# - sphinxcontrib.jquery +# - sphinxext.opengraph +# - terminal-output +# - youtube-links + +extensions = [ + "canonical_sphinx", + "sphinxcontrib.cairosvgconverter", + "sphinx_last_updated_by_git", +] + +# Excludes files or directories from processing + +exclude_patterns = [ + "doc-cheat-sheet*", +] + +# Adds custom CSS files, located under 'html_static_path' + +html_css_files = [ + "css/pdf.css", +] + + +# Adds custom JavaScript files, located under 'html_static_path' + +# html_js_files = [] + + +# Specifies a reST snippet to be appended to each .rst file + +rst_epilog = """ +.. include:: /reuse/links.txt +""" + +# Feedback button at the top; enabled by default +# +# To disable the button, uncomment this. + +# disable_feedback_button = True + + +# Your manpage URL +# +# To enable manpage links, uncomment and update as needed. +# +# NOTE: If set, adding ':manpage:' to an .rst file +# adds a link to the corresponding man section at the bottom of the page. + +# manpages_url = f'https://manpages.ubuntu.com/manpages/{codename}/en/' + \ +# f'man{section}/{page}.{section}.html' + + +# Specifies a reST snippet to be prepended to each .rst file +# This defines a :center: role that centers table cell content. +# This defines a :h2: role that styles content for use with PDF generation. + +rst_prolog = """ +.. role:: center + :class: align-center +.. role:: h2 + :class: hclass2 +""" + +# Workaround for https://github.com/canonical/canonical-sphinx/issues/34 + +if "discourse_prefix" not in html_context and "discourse" in html_context: + html_context["discourse_prefix"] = html_context["discourse"] + "/t/" diff --git a/docs/explanation/glossary.rst b/docs/explanation/glossary.rst new file mode 100644 index 00000000..c63b1453 --- /dev/null +++ b/docs/explanation/glossary.rst @@ -0,0 +1,43 @@ +Glossary +======== + +Here is a list of terms used by Test Observer (TO) and what they mean. + +Artefact +-------- + +An artefact is the thing under test, for instance a particular snap or image. An artefact has a name, a version, a family and a stage. Artefacts also have other attributes that are specific to their families (e.g. track is specific to snaps and charms). + +Family +------ + +The type of an artefact. TO currently supports snaps, debs, charms and images. + +Stage +----- + +The level of risk of this artefact. This property is dependant on the family. Specifically: + +* snaps and charms can be one of edge, beta, candidate and stable +* debs can be proposed or updates +* images can be pending or current + +Environment +----------- + +The architecture and name of what the artefact was tested on. In most cases this is some physical machine. But can be more complicated. + +Test Plan +--------- + +The name of a particular grouping of tests. This is useful if you want to logically partition the tests you have, or if multiple teams are running tests on the same artefact and environment. + +Test Execution +-------------- + +An execution of a test plan on an artefact under a particular environment. It can contain many test results. + +Test Result +----------- + +Includes the name and status (PASSED, FAILED, or SKIPPED) of a test. Additionally, it optionally can have logs and other useful bits of information. diff --git a/docs/explanation/index.rst b/docs/explanation/index.rst new file mode 100644 index 00000000..ae43c048 --- /dev/null +++ b/docs/explanation/index.rst @@ -0,0 +1,9 @@ +Explanation +=========== + +This section covers conceptual questions about Test Observer. + +.. toctree:: + :maxdepth: 1 + + glossary \ No newline at end of file diff --git a/docs/how-to/index.rst b/docs/how-to/index.rst new file mode 100644 index 00000000..9b87c371 --- /dev/null +++ b/docs/how-to/index.rst @@ -0,0 +1,9 @@ +How-to guides +============== + +These how-to guides cover key operations and processes in Test Observer. + +.. toctree:: + :maxdepth: 1 + + submit-a-test \ No newline at end of file diff --git a/docs/how-to/submit-a-test.rst b/docs/how-to/submit-a-test.rst new file mode 100644 index 00000000..be012621 --- /dev/null +++ b/docs/how-to/submit-a-test.rst @@ -0,0 +1,31 @@ +Submit a test to Test Observer +============================== + +Below are the main steps involved in submitting a test to Test Observer (TO): + +#. Inform TO that testing has started +#. Submit the results of the testing +#. Inform TO that testing has ended + +Note that if you want to execute multiple test plans on the same environment, or test on multiple environments then you will need to repeat the steps for each test execution. + +Inform TO that testing has started +---------------------------------- + +Send a ``PUT`` request to the `start_test endpoint `_ with a body following the schema appropriate to the type of artefact you are testing (the aforementioned link includes the different schemas). The body of this request includes information about the artefact, the environment and the test plan. TO will store this information and return a test execution id that you will need to submit the results to. It is worth noting that a test execution is not a single test. It is a collection of tests grouped together under a single test plan to be executed on a single environment. + +Submit the results of the testing +--------------------------------- + +When you have the results of your testing, you can submit them to TO using a ``POST`` to the `test-results `_ endpoint. This endpoint may be called multiple times in case you wanted to submit the results in batches. + +Inform TO that testing has ended +-------------------------------- + +Once testing has been completed and you have submitted the results to TO, you should inform TO that the test execution has ended. You can do so by sending a ``PATCH`` request to the `test-executions `_ endpoint with a body containing:: + + { + "status": "COMPLETED" + } + +TO will then parse the submitted results and set the status of the test execution to either ``PASSED`` (if all tests passed), ``FAILED`` (if some tests failed), or ``ENDED_PREMATURELY`` (if no tests were submitted). diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 00000000..b9bcdb9c --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,13 @@ +Introduction +============ + +Test Observer (TO) is a dashboard for viewing the results of tests run on different environments for a particular artefact. A user interested in testing an artefact (a deb, snap, charm or image) under different environments (particular machines or cloud setups) can use TO as means for storing, viewing and comparing results with previous runs or versions of an artefact. The last use case is particularly useful for catching regressions. Additionally, TO provides a mechanism to assign reviewers that can look at results and mark artefacts as approved or failed to gate updates. It is important to note that TO does not run the tests itself, but provides an API with which users can report the results to. + +Certification currently deploys an instance of TO that they used for reviewing Stable Release Updates (SRUs). Other teams also use this instance for their tests. You can visit `the frontend `_ and view `the API docs `_, although this currently requires Canonical VPN access. There's also a staging deployment of `frontend `_ and `API `_ that teams can use to test their integration. + +.. toctree:: + :hidden: + :maxdepth: 2 + + how-to/index + explanation/index diff --git a/docs/reuse/links.txt b/docs/reuse/links.txt new file mode 100644 index 00000000..e69de29b diff --git a/terraform/README.md b/terraform/README.md new file mode 100644 index 00000000..8f875b6b --- /dev/null +++ b/terraform/README.md @@ -0,0 +1,183 @@ +# Test Observer + +Observe the status and state of certification tests for various artefacts + +## Prerequisites for deploying locally + +- `juju` 3.1 or later (`sudo snap install juju --channel=3.1/stable`) +- `microk8s` 1.27 or later (`sudo snap install microk8s --channel=1.27-strict/stable`) + [permission setup steps after install](https://juju.is/docs/sdk/set-up-your-development-environment#heading--install-microk8s) +- `terraform` 1.4.6 or later (`sudo snap install terraform --classic`) +- `lxd` 5.19 or later (`sudo snap install lxd --channel=5.19/stable` or `sudo snap refresh lxd --channel=5.19/stable` if already installed) + `lxd init --auto` after install. +- `charmcraft` 2.3.0 or later (`sudo snap install charmcraft --channel=2.x/stable --classic`) +- optional: `jhack` for all kinds of handy Juju and charm SDK development and debugging operations (`sudo snap install jhack`) + +## Deploying a copy of the system with terraform / juju in microk8s + +Workaround for juju bug https://bugs.launchpad.net/juju/+bug/1988355 + +``` +mkdir -p ~/.local/share +``` + +Fist configure microk8s with the needed extensions: + +``` +sudo microk8s enable dns hostpath-storage metallb ingress# metallb setup involves choosing a free IP range for the load balancer. +``` + +Setup juju: + +```bash +juju bootstrap microk8s +juju model-config logging-config="=DEBUG" +``` + +### Deploy the system locally with Terraform + +In the `terraform` directory of your working copy, complete the one-time initialisation: + +```bash +cd terraform +terraform init +``` + +After initialization (or after making changes to the terraform configuration) you can deploy the whole system with: + +```bash +TF_VAR_environment=development TF_VAR_external_ingress_hostname="mah-domain.com" terraform apply -auto-approve +``` + +At the time of writing, this will accomplish deploying the following: + +- the backend API server +- the frontend served using nginx +- a postgresql database +- nginx as ingress +- backend connected to frontend (the backend's public facing base URI passed to the frontend app) +- backend connected to database +- backend connected to load balancer +- frontend connected to load balancer + +Terraform works by applying changes between the current state of the system and what is in the plan (the test-observer.tf configuration file). When `terraform apply` is run the 1st time, there is no state -> it will create the Juju model and all resources inside it. When it is run with a pre-existing model already in place, it will instead set / unset config values that have changed, add / remove relations, add / remove applications, etc. Basically, it makes working with Juju declarative - yay! + +The terraform juju provider is documented over here: https://registry.terraform.io/providers/juju/juju/latest/docs + +Terraform tracks its state with a .tfstate file which is created as a result of running `terraform apply` -- for production purposes this will be stored in an S3-like bucket remotely, and for local development purposes it sits in the `terraform` directory aftery you have done a `terraform apply`). + +After all is up, you can run `juju switch test-observer-development` to use the development juju model. Then `juju status --relations` should give you output to the direction of the following: + +```bash +$ juju status --relations +Model Controller Cloud/Region Version SLA Timestamp +test-observer-development juju-controller microk8s/localhost 3.1.2 unsupported 15:38:51+03:00 + +App Version Status Scale Charm Channel Rev Address Exposed Message +api active 1 test-observer-api latest/edge 15 10.152.183.182 no +db 14.7 active 1 postgresql-k8s 14/stable 73 10.152.183.172 no Primary +frontend active 1 test-observer-frontend latest/edge 8 10.152.183.79 no +ingress 25.3.0 active 1 nginx-ingress-integrator stable 59 10.152.183.103 no Ingress IP(s): 127.0.0.1, 127.0.0.1, Service IP(s): 10.152.183.72, 10.152.183.34 + +Unit Workload Agent Address Ports Message +api/0* active idle 10.1.131.142 +db/0* active idle 10.1.131.132 Primary +frontend/0* active idle 10.1.131.169 +ingress/0* active idle 10.1.131.167 Ingress IP(s): 127.0.0.1, 127.0.0.1, Service IP(s): 10.152.183.72, 10.152.183.34 + +Relation provider Requirer Interface Type Message +api:test-observer-rest-api frontend:test-observer-rest-api http regular +db:database api:database postgresql_client regular +db:database-peers db:database-peers postgresql_peers peer +db:restart db:restart rolling_op peer +ingress:nginx-route api:nginx-route nginx-route regular +ingress:nginx-route frontend:nginx-route nginx-route regular +``` + +## Add /etc/hosts entries + +To test the application, you need to create some aliases in `/etc/hosts` to the IP address that the ingress got from `metallb` (`juju status` above will find you the ingress IP). Let's assume you have a domain `mah-domain.com` that you want to expose service under, the backend and frontend will be present as subdomains `test-observer.mah-domain.com` and `test-observer-api.mah-domain.com`, respectively: + +```bash +$ cat /etc/hosts +192.168.0.202 test-observer.mah-domain.com test-observer-api.mah-domain.com +... +``` + +Note that without this step the frontend will fail to connect to api as it's trying to use `test-observer-api.mah-domain.com` + +## Developing the charm + +To develop and test updates to the backend and frontend charms, you would typically want to first complete the above steps to deploy a working system. Once you have done that, proceed with the following steps. + +### Build and refresh the backend charm + +You can make edits to the backend charm and refresh it in the running system on the fly with: + +```bash +cd backend/charm +charmcraft pack +juju refresh test-observer-api --path ./test-observer-api_ubuntu-22.04-amd64.charm + +# to update the OCI image that runs the backend +juju attach-resource test-observer-api api-image=ghcr.io/canonical/test_observer/backend:[tag or sha] +``` + +### Build and refresh the frontend charm + +Same thing with the frontend: + +```bash +cd frontend/charm +charmcraft pack + +juju refresh test-observer-frontend ./test-observer-frontend_ubuntu-22.04-amd64.charm + +# to update the OCI image that runs the backend +juju attach-resource test-observer-frontend frontend-image=ghcr.io/canonical/test_observer/frontend:[tag or sha] +``` + +Note that the frontend app is made aware of the backend URL to connect to using the global `window.testObserverAPIBaseURI`, which is set at runtime with some nginx config level trickery based on... + +- the `test-observer-api` charm's `hostname` config value. +- the frontend charm's `test-observer-api-scheme` config value. + +These in turn can be set using the terraform plan (`terraform/test-observer.tf` and associated variables). + +## Running tests + +To run the unit and integration tests for the frontend charms, do the following: + +```bash +cd frontend/charm +tox -e unit +tox -e integration +``` + +## Releasing the charms + +Charms are released through GitHub actions on push to main. If however you need to release charms on your branch before merging with main you could always just add your branch as a trigger to those same GitHub actions. + +## VS Code & charm libraries + +VS Code fails to find (for autocompletions and code navigation purposes) the charm libraries under `lib` in each of `backend/charm` and `frontend/charm`. There is a .vscode-settings-default.json found under each of these directories which you can copy to the `.gitignore`d path `.vscode/settings.json` to make them fly. Taking the backend charm as an example: + +```bash +mkdir -p backend/charm/.vscode +cp backend/charm/.vscode-settings-default.json backend/charm/.vscode/settings.json + +mkdir -p frontend/charm/.vscode +cp frontend/charm/.vscode-settings-default.json frontend/charm/.vscode/settings.json +``` + +Now if you use as your project the directory `backend/charm` and `frontend/charm` respectively (which you'll want to do also for them to keep their own virtual environments), VS Code should be happy. + +## Handy documentation pointers about charming + +- [Integrations (how to provide and require relations)](https://juju.is/docs/sdk/integration) + +### Enable the K8s Dashboard + +You need an auth token in case you want to connect to the kubernetes dashboard: + +```bash +microk8s kubectl describe secret -n kube-system microk8s-dashboard-token +```