Skip to content

Commit

Permalink
Update datetime parsing in tests
Browse files Browse the repository at this point in the history
  • Loading branch information
jonathangreen committed Mar 10, 2025
1 parent 20a6c64 commit 5d8777b
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 28 deletions.
18 changes: 1 addition & 17 deletions src/palace/manager/service/analytics/eventdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,7 @@
from uuid import UUID

import flask
from pydantic import (
AwareDatetime,
BaseModel,
ConfigDict,
computed_field,
field_serializer,
)
from pydantic_core.core_schema import FieldSerializationInfo
from pydantic import AwareDatetime, BaseModel, ConfigDict, computed_field
from typing_extensions import Self

from palace.manager.sqlalchemy.model.library import Library
Expand Down Expand Up @@ -98,15 +91,6 @@ def delta(self) -> int | None:
frozen=True,
)

# We serialize the datetime fields as strings in the JSON output, to match what
# the output looked like before we switched to Pydantic.
# TODO: It would be nice to be able to drop this and just use the default
# which is to serialize datetimes as ISO8601 strings. Need to see if the
# analytics consumers can handle that.
@field_serializer("start", "end", "availability_time", when_used="json")
def serialize_dt(self, value: datetime, _info: FieldSerializationInfo) -> str:
return str(value)

@classmethod
def create(
cls,
Expand Down
4 changes: 1 addition & 3 deletions src/palace/manager/service/analytics/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,4 @@ def collect(
),
)
if was_new:
self.log.info(
"EVENT %s %s=>%s", event.type, event.old_value, event.new_value
)
self.log.info(f"EVENT {event.type} {event.old_value}=>{event.new_value}")
5 changes: 1 addition & 4 deletions src/palace/manager/service/analytics/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,10 +26,7 @@ def collect(
event: AnalyticsEventData,
session: Session | None = None,
) -> None:
# We exclude the collection_id from the json because it wasn't included
# in our pre-pydantic implementation, it was only used in the file key.
# TODO: See if adding collection_id will cause any issue with the ingest process.
content = event.model_dump_json(exclude={"collection_id"})
content = event.model_dump_json()

storage = self._get_storage()
analytics_file_key = self._get_file_key(event)
Expand Down
11 changes: 7 additions & 4 deletions tests/manager/service/analytics/test_s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from unittest.mock import MagicMock, create_autospec

import pytest
from pydantic import TypeAdapter

from palace.manager.core.classifier import Classifier
from palace.manager.core.config import CannotLoadConfiguration
Expand All @@ -30,8 +31,9 @@ def __init__(self, db: DatabaseTransactionFixture) -> None:
self.analytics_storage,
)

@staticmethod
def timestamp_to_string(timestamp: datetime.datetime) -> str:
self.timestamp_adapter = TypeAdapter(datetime.datetime)

def timestamp_to_string(self, timestamp: datetime.datetime) -> str:
"""Return a string representation of a datetime object.
:param timestamp: datetime object storing a timestamp
Expand All @@ -40,7 +42,8 @@ def timestamp_to_string(timestamp: datetime.datetime) -> str:
:return: String representation of the timestamp
:rtype: str
"""
return str(timestamp)

return self.timestamp_adapter.dump_python(timestamp, mode="json")


@pytest.fixture(scope="function")
Expand Down Expand Up @@ -162,7 +165,7 @@ def test_analytics_data_with_associated_license_pool_is_correctly_stored_in_s3(
assert event["published"] == edition.published
assert event["medium"] == edition.medium
assert event["collection"] == collection.name
assert event.get("collection_id") is None
assert event["collection_id"] == collection.id
assert event["identifier_type"] == identifier.type
assert event["identifier"] == identifier.identifier
assert event["data_source"] == data_source.name
Expand Down

0 comments on commit 5d8777b

Please sign in to comment.