Skip to content

Commit

Permalink
Merge branch 'main' into 783-fix-download-authentication-feeds
Browse files Browse the repository at this point in the history
  • Loading branch information
qcdyx authored Jan 27, 2025
2 parents f0dd0f4 + e59d956 commit d946b77
Show file tree
Hide file tree
Showing 160 changed files with 20,519 additions and 1,182 deletions.
8 changes: 0 additions & 8 deletions .github/workflows/api-deployer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -201,10 +201,6 @@ jobs:
name: database_gen
path: api/src/database_gen/

- name: Copy to db models to functions directory
run: |
cp -R api/src/database_gen/ functions-python/database_gen
# api schema was generated and uploaded in api-build-test job above.
- uses: actions/download-artifact@v4
with:
Expand Down Expand Up @@ -249,10 +245,6 @@ jobs:
name: database_gen
path: api/src/database_gen/

- name: Copy to db models to functions directory
run: |
cp -R api/src/database_gen/ functions-python/database_gen
# api schema was generated and uploaded in api-build-test job above.
- uses: actions/download-artifact@v4
with:
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/datasets-batch-deployer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ jobs:
uses: actions/upload-artifact@v4
with:
name: database_gen
path: functions-python/database_gen/
path: api/src/database_gen/

- name: Build python functions
run: |
Expand Down
7 changes: 7 additions & 0 deletions .github/workflows/web-app-deployer.yml
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,13 @@ jobs:
working-directory: functions
run: npx firebase deploy --only functions

- name: Set robots.txt
working-directory: web-app
run: |
if [ "${{ inputs.FIREBASE_PROJECT }}" != "prod" ]; then
mv public/robots.staging.txt public/robots.txt
fi
- name: Build
working-directory: web-app
run: yarn build:${FIREBASE_PROJECT}
Expand Down
6 changes: 2 additions & 4 deletions api/src/feeds/impl/models/basic_feed_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,14 +23,12 @@ class Config:
def from_orm(cls, feed: Feed | None, _=None) -> BasicFeed | None:
if not feed:
return None
latest_official_status = None
if len(feed.officialstatushistories) > 0:
latest_official_status = max(feed.officialstatushistories, key=lambda x: x.timestamp).is_official
return cls(
id=feed.stable_id,
data_type=feed.data_type,
status=feed.status,
official=latest_official_status,
official=feed.official,
official_updated_at=feed.official_updated_at,
created_at=feed.created_at,
external_ids=sorted(
[ExternalIdImpl.from_orm(item) for item in feed.externalids], key=lambda x: x.external_id
Expand Down
8 changes: 5 additions & 3 deletions api/src/feeds/impl/models/gtfs_dataset_impl.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
from functools import reduce
from typing import List

from packaging.version import Version

from database_gen.sqlacodegen_models import Gtfsdataset, Validationreport
from feeds.impl.models.bounding_box_impl import BoundingBoxImpl
from feeds.impl.models.validation_report_impl import ValidationReportImpl
from feeds_gen.models.gtfs_dataset import GtfsDataset
from utils.model_utils import compare_java_versions


class GtfsDatasetImpl(GtfsDataset):
Expand All @@ -30,7 +29,8 @@ def from_orm_latest_validation_report(
"""
if validation_reports:
latest_report = reduce(
lambda a, b: a if Version(a.validator_version) > Version(b.validator_version) else b, validation_reports
lambda a, b: a if compare_java_versions(a.validator_version, b.validator_version) == 1 else b,
validation_reports,
)
return ValidationReportImpl.from_orm(latest_report)
return None
Expand All @@ -49,4 +49,6 @@ def from_orm(cls, gtfs_dataset: Gtfsdataset | None) -> GtfsDataset | None:
hash=gtfs_dataset.hash,
bounding_box=BoundingBoxImpl.from_orm(gtfs_dataset.bounding_box),
validation_report=cls.from_orm_latest_validation_report(gtfs_dataset.validation_reports),
service_date_range_start=gtfs_dataset.service_date_range_start,
service_date_range_end=gtfs_dataset.service_date_range_end,
)
7 changes: 4 additions & 3 deletions api/src/feeds/impl/models/latest_dataset_impl.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
from functools import reduce

from packaging.version import Version

from database_gen.sqlacodegen_models import Gtfsdataset
from feeds.impl.models.bounding_box_impl import BoundingBoxImpl
from feeds.impl.models.validation_report_impl import ValidationReportImpl
from feeds_gen.models.latest_dataset import LatestDataset
from feeds_gen.models.latest_dataset_validation_report import LatestDatasetValidationReport
from utils.model_utils import compare_java_versions


class LatestDatasetImpl(LatestDataset):
Expand All @@ -28,7 +27,7 @@ def from_orm(cls, dataset: Gtfsdataset | None) -> LatestDataset | None:
validation_report: LatestDatasetValidationReport | None = None
if dataset.validation_reports:
latest_report = reduce(
lambda a, b: a if Version(a.validator_version) > Version(b.validator_version) else b,
lambda a, b: a if compare_java_versions(a.validator_version, b.validator_version) == 1 else b,
dataset.validation_reports,
)
(
Expand All @@ -52,6 +51,8 @@ def from_orm(cls, dataset: Gtfsdataset | None) -> LatestDataset | None:
hosted_url=dataset.hosted_url,
bounding_box=BoundingBoxImpl.from_orm(dataset.bounding_box),
downloaded_at=dataset.downloaded_at,
service_date_range_start=dataset.service_date_range_start,
service_date_range_end=dataset.service_date_range_end,
hash=dataset.hash,
validation_report=validation_report,
)
2 changes: 2 additions & 0 deletions api/src/feeds/impl/models/search_feed_item_result_impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,8 @@ def from_orm_gtfs(cls, feed_search_row):
hosted_url=feed_search_row.latest_dataset_hosted_url,
downloaded_at=feed_search_row.latest_dataset_downloaded_at,
hash=feed_search_row.latest_dataset_hash,
service_date_range_start=feed_search_row.latest_dataset_service_date_range_start,
service_date_range_end=feed_search_row.latest_dataset_service_date_range_end,
)
if feed_search_row.latest_dataset_id
else None,
Expand Down
26 changes: 26 additions & 0 deletions api/src/utils/model_utils.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
from packaging.version import Version


def compare_java_versions(v1: str | None, v2: str | None):
"""
Compare two version strings v1 and v2.
Returns 1 if v1 > v2, -1 if v1 < v2,
otherwise 0.
The version strings are expected to be in the format of
major.minor.patch[-SNAPSHOT]
"""
if v1 is None and v2 is None:
return 0
if v1 is None:
return -1
if v2 is None:
return 1
# clean version strings replacing the SNAPSHOT suffix with .dev0
v1 = v1.replace("-SNAPSHOT", ".dev0")
v2 = v2.replace("-SNAPSHOT", ".dev0")
if Version(v1) > Version(v2):
return 1
elif Version(v1) < Version(v2):
return -1
else:
return 0
16 changes: 12 additions & 4 deletions api/tests/test_data/extra_test_data.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
"hosted_url": "https://example.com/dataset-1",
"hash": "hash",
"downloaded_at": "2024-01-31T00:00:00+00:00",
"feed_stable_id": "mdb-1"
"feed_stable_id": "mdb-1",
"service_date_range_start": "2024-01-01",
"service_date_range_end":"2025-01-01"
},
{
"id": "dataset-2",
Expand All @@ -16,7 +18,9 @@
"hosted_url": "https://example.com/dataset-2",
"hash": "hash",
"downloaded_at": "2024-02-01T00:00:00+00:00",
"feed_stable_id": "mdb-1"
"feed_stable_id": "mdb-1",
"service_date_range_start": "2024-01-01",
"service_date_range_end":"2025-01-01"
},
{
"id": "dataset-3",
Expand All @@ -25,7 +29,9 @@
"hosted_url": "https://example.com/dataset-3",
"hash": "hash",
"downloaded_at": "2024-02-02T00:00:00+00:00",
"feed_stable_id": "mdb-10"
"feed_stable_id": "mdb-10",
"service_date_range_start": "2024-01-01",
"service_date_range_end":"2025-01-01"
},
{
"id": "dataset-4",
Expand All @@ -34,7 +40,9 @@
"hosted_url": "https://example.com/dataset-4",
"hash": "hash",
"downloaded_at": "2024-02-03T00:00:00+00:00",
"feed_stable_id": "mdb-10"
"feed_stable_id": "mdb-10",
"service_date_range_start": "2024-01-01",
"service_date_range_end":"2025-01-01"
}
],
"validation_reports": [
Expand Down
20 changes: 15 additions & 5 deletions api/tests/test_data/test_datasets.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,9 @@
"downloaded_at": "2024-02-01T00:00:00Z",
"hash": "hash-1",
"latest": true,
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))"
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
"service_date_range_start": "2024-01-01",
"service_date_range_end":"2025-01-01"
},
{
"id": "dataset-2",
Expand All @@ -17,7 +19,9 @@
"downloaded_at": "2024-01-01T00:00:00Z",
"hash": "hash-2",
"latest": false,
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))"
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
"service_date_range_start": "2024-01-01",
"service_date_range_end":"2025-01-01"
},
{
"id": "dataset-3",
Expand All @@ -26,7 +30,9 @@
"downloaded_at": "2024-01-01T00:00:00Z",
"hash": "hash-3",
"latest": true,
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))"
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
"service_date_range_start": "2024-01-01",
"service_date_range_end":"2025-01-01"
},
{
"id": "dataset-4",
Expand All @@ -35,7 +41,9 @@
"downloaded_at": "2024-02-01T00:00:00Z",
"hash": "hash-4",
"latest": false,
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))"
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
"service_date_range_start": "2024-01-01",
"service_date_range_end":"2025-01-01"
},
{
"id": "dataset-5",
Expand All @@ -44,7 +52,9 @@
"downloaded_at": "2024-01-01T00:00:00Z",
"hash": "hash-5",
"latest": true,
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))"
"bounding_box": "POLYGON((-122.75 36.8, -122.75 37.8, -121.75 37.8, -121.75 36.8, -122.75 36.8))",
"service_date_range_start": "2024-01-01",
"service_date_range_end":"2025-01-01"
}
],
"validation_reports": [
Expand Down
4 changes: 3 additions & 1 deletion api/tests/unittest/models/test_basic_feed_impl.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import copy
import unittest
from datetime import datetime
from datetime import datetime, date

from database_gen.sqlacodegen_models import (
Feed,
Expand Down Expand Up @@ -64,6 +64,8 @@
downloaded_at="downloaded_at",
hash="hash",
bounding_box="bounding_box",
service_date_range_start=date(2024, 1, 1),
service_date_range_end=date(2025, 1, 1),
validation_reports=[
Validationreport(
id="id",
Expand Down
6 changes: 5 additions & 1 deletion api/tests/unittest/models/test_gtfs_dataset_impl.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import unittest
from datetime import datetime
from datetime import datetime, date

from geoalchemy2 import WKTElement

Expand Down Expand Up @@ -42,6 +42,8 @@ def test_from_orm(self):
Validationreport(validator_version="0.2.0"),
Validationreport(validator_version="1.1.1"),
],
service_date_range_start=date(2024, 1, 1),
service_date_range_end=date(2025, 1, 1),
)
result = GtfsDatasetImpl.from_orm(orm)
assert result.id == "stable_id"
Expand All @@ -56,5 +58,7 @@ def test_from_orm(self):
assert result.bounding_box.minimum_longitude == 3.0
assert result.bounding_box.maximum_longitude == 4.0
assert result.validation_report.validator_version == "1.1.1"
assert result.service_date_range_start == date(2024, 1, 1)
assert result.service_date_range_end == date(2025, 1, 1)

assert GtfsDatasetImpl.from_orm(None) is None
6 changes: 5 additions & 1 deletion api/tests/unittest/models/test_gtfs_feed_impl.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import copy
import unittest
from datetime import datetime
from datetime import datetime, date

from geoalchemy2 import WKTElement

Expand Down Expand Up @@ -87,6 +87,8 @@ def create_test_notice(notice_code: str, total_notices: int, severity: str):
note="note",
downloaded_at=datetime(year=2022, month=12, day=31, hour=13, minute=45, second=56),
hash="hash",
service_date_range_start=date(2024, 1, 1),
service_date_range_end=date(2025, 1, 1),
bounding_box=WKTElement(POLYGON, srid=4326),
latest=True,
validation_reports=[
Expand Down Expand Up @@ -169,6 +171,8 @@ def create_test_notice(notice_code: str, total_notices: int, severity: str):
unique_warning_count=4,
unique_info_count=2,
),
service_date_range_start="2024-01-01",
service_date_range_end="2025-01-01",
),
)

Expand Down
6 changes: 5 additions & 1 deletion api/tests/unittest/models/test_latest_dataset_impl.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import unittest
from datetime import datetime
from datetime import datetime, date

from geoalchemy2 import WKTElement

Expand All @@ -23,6 +23,8 @@ def test_from_orm(self):
downloaded_at=now,
hash="hash",
bounding_box=WKTElement(POLYGON, srid=4326),
service_date_range_start=date(2024, 1, 1),
service_date_range_end=date(2025, 1, 1),
validation_reports=[
Validationreport(validator_version="1.0.0"),
Validationreport(
Expand Down Expand Up @@ -50,6 +52,8 @@ def test_from_orm(self):
minimum_longitude=3.0,
maximum_longitude=4.0,
),
service_date_range_start=date(2024, 1, 1),
service_date_range_end=date(2025, 1, 1),
validation_report={
"validator_version": "1.2.0",
"total_error": 3,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,8 @@ def __init__(self, **kwargs):
latest_dataset_downloaded_at=downloaded_at,
latest_dataset_bounding_box=None,
latest_dataset_hash="latest_dataset_hash",
latest_dataset_service_date_range_start="2030-10-10",
latest_dataset_service_date_range_end="2031-10-10",
external_ids=[],
redirect_ids=[],
feed_reference_ids=[],
Expand Down Expand Up @@ -80,6 +82,8 @@ def test_from_orm_gtfs(self):
hosted_url=item.latest_dataset_hosted_url,
downloaded_at=item.latest_dataset_downloaded_at,
hash=item.latest_dataset_hash,
service_date_range_start=item.latest_dataset_service_date_range_start,
service_date_range_end=item.latest_dataset_service_date_range_end,
),
)
assert result == expected
Expand Down
27 changes: 27 additions & 0 deletions api/tests/utils/test_compare_java_versions.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
import unittest
from utils.model_utils import compare_java_versions


class TestCompareJavaVersions(unittest.TestCase):
def test_compare_versions_equal(self):
self.assertEqual(compare_java_versions("1.0.0", "1.0.0"), 0)
self.assertEqual(compare_java_versions("1.0.0-SNAPSHOT", "1.0.0-SNAPSHOT"), 0)

def test_compare_versions_v1_greater(self):
self.assertEqual(compare_java_versions("1.0.1", "1.0.0"), 1)
self.assertEqual(compare_java_versions("1.0.0", "0.9.9"), 1)
self.assertEqual(compare_java_versions("1.0.0", "1.0.0-SNAPSHOT"), 1)

def test_compare_versions_v2_greater(self):
self.assertEqual(compare_java_versions("1.0.0", "1.0.1"), -1)
self.assertEqual(compare_java_versions("0.9.9", "1.0.0"), -1)
self.assertEqual(compare_java_versions("1.0.0-SNAPSHOT", "1.0.0"), -1)

def test_compare_versions_with_none(self):
self.assertEqual(compare_java_versions(None, None), 0)
self.assertEqual(compare_java_versions(None, "1.0.0"), -1)
self.assertEqual(compare_java_versions("1.0.0", None), 1)


if __name__ == "__main__":
unittest.main()
Loading

0 comments on commit d946b77

Please sign in to comment.