Skip to content

Commit

Permalink
Removed TimezoneAwareDatetime in favor of Pydantic's AwareDatetime an…
Browse files Browse the repository at this point in the history
…d removed unnecessary direct dependencies

Signed-off-by: ammar <[email protected]>
  • Loading branch information
ammar92 committed Oct 25, 2023
1 parent 25dc132 commit 723fa11
Show file tree
Hide file tree
Showing 15 changed files with 28 additions and 57 deletions.
10 changes: 3 additions & 7 deletions bytes/bytes/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,8 @@
from typing import Any, Dict, List, NewType, Optional
from uuid import UUID

from pydantic import AfterValidator, BaseModel, Field
from pydantic import BaseModel, Field, AwareDatetime
from pydantic.v1.datetime_parse import parse_datetime
from typing_extensions import Annotated

RetrievalLink = NewType("RetrievalLink", str)
SecureHash = NewType("SecureHash", str)
Expand Down Expand Up @@ -36,17 +35,14 @@ def _validate_timezone_aware_datetime(value: datetime) -> datetime:
return parsed


TimezoneAwareDatetime = Annotated[datetime, AfterValidator(_validate_timezone_aware_datetime)]


class MimeType(BaseModel):
value: str


class Job(BaseModel):
id: UUID
started_at: TimezoneAwareDatetime
ended_at: TimezoneAwareDatetime
started_at: AwareDatetime
ended_at: AwareDatetime


class Boefje(BaseModel):
Expand Down
5 changes: 3 additions & 2 deletions bytes/bytes/timestamping/hashing.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,19 @@
import logging
from typing import Any

from pydantic import AwareDatetime

from bytes.models import (
HashingAlgorithm,
RawData,
SecureHash,
TimezoneAwareDatetime,
)

logger = logging.getLogger(__name__)


def hash_data(
data: RawData, datetime: TimezoneAwareDatetime, hash_algo: HashingAlgorithm = HashingAlgorithm.SHA512
data: RawData, datetime: AwareDatetime, hash_algo: HashingAlgorithm = HashingAlgorithm.SHA512
) -> SecureHash:
"""Hash the raw data"""
timestamp_bytes = str(datetime.timestamp()).encode("utf-8")
Expand Down
11 changes: 6 additions & 5 deletions bytes/tests/loading.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
from typing import Any, Dict, Optional
from uuid import UUID

from pydantic import AwareDatetime

from bytes.config import BASE_DIR
from bytes.models import (
Boefje,
Expand All @@ -12,7 +14,6 @@
NormalizerMeta,
RawData,
RawDataMeta,
TimezoneAwareDatetime,
)


Expand All @@ -39,8 +40,8 @@ def get_boefje_meta(
input_ooi=input_ooi,
arguments={"domain": "test.org"},
organization="test",
started_at=TimezoneAwareDatetime(1000, 10, 10, 10, 10, 10, tzinfo=timezone.utc),
ended_at=TimezoneAwareDatetime(1000, 10, 10, 10, 10, 11, tzinfo=timezone.utc),
started_at=AwareDatetime(1000, 10, 10, 10, 10, 10, tzinfo=timezone.utc),
ended_at=AwareDatetime(1000, 10, 10, 10, 10, 11, tzinfo=timezone.utc),
)


Expand All @@ -49,8 +50,8 @@ def get_normalizer_meta(raw_file_id: UUID = UUID("2c9f47db-dfca-4928-b29f-368e64
id=UUID("203eedee-a590-43e1-8f80-6d18ffe529f5"),
raw_data=get_raw_data_meta(raw_file_id),
normalizer=Normalizer(id="kat_test.main"),
started_at=TimezoneAwareDatetime(1001, 10, 10, 10, 10, 10, tzinfo=timezone.utc),
ended_at=TimezoneAwareDatetime(1001, 10, 10, 10, 10, 12, tzinfo=timezone.utc),
started_at=AwareDatetime(year=1001, month=10, day=10, hour=10, minute=10, second=10, tzinfo=timezone.utc),
ended_at=AwareDatetime(year=1001, month=10, day=10, hour=10, minute=10, second=12, tzinfo=timezone.utc),
)


Expand Down
8 changes: 5 additions & 3 deletions bytes/tests/unit/test_hash.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
from datetime import timezone
from unittest import TestCase

from bytes.models import HashingAlgorithm, TimezoneAwareDatetime
from pydantic import AwareDatetime

from bytes.models import HashingAlgorithm
from bytes.timestamping.hashing import hash_data
from tests.loading import get_raw_data


class HashTests(TestCase):
def test_hash_same_data(self) -> None:
dt = TimezoneAwareDatetime(year=2022, month=1, day=1, hour=0, minute=0, second=0, tzinfo=timezone.utc)
dt = AwareDatetime(year=2022, month=1, day=1, hour=0, minute=0, second=0, tzinfo=timezone.utc)

secure_hash = hash_data(data=get_raw_data(), datetime=dt)

Expand All @@ -27,7 +29,7 @@ def test_hash_same_data(self) -> None:
)

def test_hash_sha224(self) -> None:
dt = TimezoneAwareDatetime(year=2022, month=1, day=1, hour=0, minute=0, second=0, tzinfo=timezone.utc)
dt = AwareDatetime(year=2022, month=1, day=1, hour=0, minute=0, second=0, tzinfo=timezone.utc)

secure_hash = hash_data(data=get_raw_data(), datetime=dt, hash_algo=HashingAlgorithm.SHA224)

Expand Down
2 changes: 1 addition & 1 deletion keiko/keiko/keiko.py
Original file line number Diff line number Diff line change
Expand Up @@ -183,7 +183,7 @@ def generate_report(
)

json_output_file_path = output_file.with_suffix(".keiko.json")
json_output_file_path.write_text(report_data.model_dump_json())
json_output_file_path.write_text(report_data.model_dump_json(indent=4))

# run pdflatex
cmd = [
Expand Down
2 changes: 1 addition & 1 deletion keiko/keiko/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@ class Settings(BaseSettings):
templates_folder: DirectoryPath = Field("templates", description="Folder containing the templates")
glossaries_folder: DirectoryPath = Field("glossaries", description="Folder containing the glossaries")
assets_folder: DirectoryPath = Field("assets", description="Folder containing the assets")
reports_folder: DirectoryPath = Field("reports", description="Output folder containing the reports")
reports_folder: DirectoryPath = Field("/reports", description="Output folder containing the reports")

span_export_grpc_endpoint: Optional[AnyHttpUrl] = Field(
None, description="OpenTelemetry endpoint", validation_alias="SPAN_EXPORT_GRPC_ENDPOINT"
Expand Down
9 changes: 4 additions & 5 deletions octopoes/octopoes/api/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,10 +2,9 @@
from datetime import datetime
from typing import Any, List, Optional

from pydantic import BaseModel, Field
from pydantic import BaseModel, Field, AwareDatetime

from octopoes.models import Reference
from octopoes.models.datetime import TimezoneAwareDatetime
from octopoes.models.types import OOIType


Expand All @@ -24,7 +23,7 @@ class _BaseObservation(BaseModel):
method: str
source: Reference
result: List[OOIType]
valid_time: TimezoneAwareDatetime
valid_time: AwareDatetime
task_id: uuid.UUID


Expand Down Expand Up @@ -56,13 +55,13 @@ class ValidatedObservation(_BaseObservation):
"""Used by Octopoes API to validate and parse correctly"""

result: List[OOIType]
valid_time: TimezoneAwareDatetime
valid_time: AwareDatetime


class ValidatedDeclaration(BaseModel):
"""Used by Octopoes API to validate and parse correctly"""

ooi: OOIType
valid_time: TimezoneAwareDatetime
valid_time: AwareDatetime
method: Optional[str] = "manual"
task_id: Optional[uuid.UUID] = Field(default_factory=lambda: uuid.uuid4())
6 changes: 3 additions & 3 deletions octopoes/octopoes/api/router.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
from typing import Generator, List, Optional, Set, Type

from fastapi import APIRouter, Body, Depends, HTTPException, Path, Query, status
from pydantic import AwareDatetime
from requests import RequestException

from octopoes.api.models import ServiceHealth, ValidatedDeclaration, ValidatedObservation
Expand All @@ -26,7 +27,6 @@
ScanProfileBase,
ScanProfileType,
)
from octopoes.models.datetime import TimezoneAwareDatetime
from octopoes.models.exception import ObjectNotFoundException
from octopoes.models.explanation import InheritanceSection
from octopoes.models.ooi.findings import Finding, RiskLevelSeverity
Expand All @@ -49,13 +49,13 @@ def extract_client(client: str = Path(...)) -> str:
return client


def extract_valid_time(valid_time: Optional[TimezoneAwareDatetime] = Query(None)) -> datetime:
def extract_valid_time(valid_time: Optional[AwareDatetime] = Query(None)) -> datetime:
if valid_time is None:
return datetime.now(timezone.utc)
return valid_time


def extract_required_valid_time(valid_time: TimezoneAwareDatetime) -> datetime:
def extract_required_valid_time(valid_time: AwareDatetime) -> datetime:
return valid_time


Expand Down
5 changes: 0 additions & 5 deletions octopoes/octopoes/models/datetime.py
Original file line number Diff line number Diff line change
@@ -1,15 +1,10 @@
from datetime import datetime

from pydantic import AfterValidator
from pydantic.v1.datetime_parse import parse_datetime
from typing_extensions import Annotated


def _validate_timezone_aware_datetime(value: datetime) -> datetime:
parsed = parse_datetime(value)
if parsed.tzinfo is None or parsed.tzinfo.utcoffset(parsed) is None:
raise ValueError(f"{parsed} is not timezone aware")
return parsed


TimezoneAwareDatetime = Annotated[datetime, AfterValidator(_validate_timezone_aware_datetime)]
2 changes: 1 addition & 1 deletion poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,6 @@ sphinxcontrib-mermaid = "^0.9.2"
myst-parser = "^2.0.0"
settings-doc = "^3.0.0"
colorama = "0.4.6" # Required on all platforms, not just win32
pydantic = "^2.4.2"

[tool.poetry.group.dev.dependencies]
pre-commit = "3.2.2"
17 changes: 1 addition & 16 deletions rocky/poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 0 additions & 1 deletion rocky/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,6 @@ whitenoise = {extras = ["brotli"], version = "^6.5.0"}
opentelemetry-instrumentation = "^0.41b0"
opentelemetry-instrumentation-fastapi = "^0.41b0"
granian = "^0.7.0"
pydantic-settings = "^2.0.3"


[tool.poetry.group.dev.dependencies]
Expand Down
3 changes: 0 additions & 3 deletions rocky/requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -1163,9 +1163,6 @@ pydantic-core==2.10.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531 \
--hash=sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6 \
--hash=sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521
pydantic-settings==2.0.3 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945 \
--hash=sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625
pydantic==2.4.2 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7 \
--hash=sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1
Expand Down
3 changes: 0 additions & 3 deletions rocky/requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -818,9 +818,6 @@ pydantic-core==2.10.1 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:f7c2b8eb9fc872e68b46eeaf835e86bccc3a58ba57d0eedc109cbb14177be531 \
--hash=sha256:fa7db7558607afeccb33c0e4bf1c9a9a835e26599e76af6fe2fcea45904083a6 \
--hash=sha256:fcb83175cc4936a5425dde3356f079ae03c0802bbdf8ff82c035f8a54b333521
pydantic-settings==2.0.3 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:962dc3672495aad6ae96a4390fac7e593591e144625e5112d359f8f67fb75945 \
--hash=sha256:ddd907b066622bd67603b75e2ff791875540dc485b7307c4fffc015719da8625
pydantic==2.4.2 ; python_version >= "3.8" and python_version < "4.0" \
--hash=sha256:94f336138093a5d7f426aac732dcfe7ab4eb4da243c88f891d65deb4a2556ee7 \
--hash=sha256:bc3ddf669d234f4220e6e1c4d96b061abe0998185a8d7855c0126782b7abc8c1
Expand Down

0 comments on commit 723fa11

Please sign in to comment.