Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

chore: improve typing and tracing of BA reports #402

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 5 additions & 1 deletion shared/bundle_analysis/db_migrations.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
from collections.abc import Callable

import sentry_sdk
from sqlalchemy import text
from sqlalchemy.orm import Session

Expand Down Expand Up @@ -26,7 +29,7 @@ def __init__(self, db_session: Session, from_version: int, to_version: int):

# Mapping of the schema_version number to the migration function that needs to run
# {x: fcn} means to bring version x-1 to x, fcn must be ran
self.migrations = {
self.migrations: dict[int, Callable[[Session], None]] = {
2: add_gzip_size,
3: add_is_cached,
4: modify_gzip_size_nullable,
Expand All @@ -38,6 +41,7 @@ def update_schema_version(self, version):
"""
self.db_session.execute(text(stmt))

@sentry_sdk.trace
def migrate(self):
for version in range(self.from_version + 1, self.to_version + 1):
self.migrations[version](self.db_session)
Expand Down
11 changes: 7 additions & 4 deletions shared/bundle_analysis/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,12 @@
from sqlalchemy.orm import Session as DbSession

from shared.bundle_analysis.parsers import ParserInterface, ParserV1, ParserV2
from shared.bundle_analysis.parsers.base import ParserTrait

log = logging.getLogger(__name__)


PARSER_VERSION_MAPPING = {
PARSER_VERSION_MAPPING: dict[str, type[ParserTrait]] = {
"1": ParserV1,
"2": ParserV2,
}
Expand All @@ -23,7 +24,7 @@ def __init__(self, path: str, db_session: DbSession):
self.path = path
self.db_session = db_session

def get_proper_parser(self) -> object:
def get_proper_parser(self) -> ParserTrait:
error = None
try:
with open(self.path, "rb") as f:
Expand All @@ -33,11 +34,13 @@ def get_proper_parser(self) -> object:
selected_parser = PARSER_VERSION_MAPPING.get(value)
if selected_parser is None:
error = f"parser not implemented for version {value}"
if not issubclass(selected_parser, ParserInterface):
elif not issubclass(selected_parser, ParserInterface):
error = "invalid parser implementation"
return selected_parser(self.db_session)
else:
return selected_parser(self.db_session)
error = "version does not exist in bundle file"
except IOError:
error = "unable to open file"
if error:
raise Exception(f"Couldn't parse bundle: {error}")
raise Exception("Couldn't parse bundle: unknown error")
13 changes: 13 additions & 0 deletions shared/bundle_analysis/parsers/base.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,20 @@
import abc
from typing import Tuple

from sqlalchemy.orm import Session


class ParserInterface(metaclass=abc.ABCMeta):
@classmethod
def __subclasshook__(cls, subclass):
return hasattr(subclass, "parse") and callable(subclass.parse)


class ParserTrait:
@abc.abstractmethod
def __init__(self, db_session: Session):
pass

@abc.abstractmethod
def parse(self, path: str) -> Tuple[int, str]:
pass
3 changes: 2 additions & 1 deletion shared/bundle_analysis/parsers/v1.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
assets_chunks,
chunks_modules,
)
from shared.bundle_analysis.parsers.base import ParserTrait
from shared.bundle_analysis.utils import get_extension

log = logging.getLogger(__name__)
Expand Down Expand Up @@ -57,7 +58,7 @@
"""


class ParserV1:
class ParserV1(ParserTrait):
"""
This does a streaming JSON parse of the stats JSON file referenced by `path`.
It's more complicated that just doing a `json.loads` but should keep our memory
Expand Down
3 changes: 2 additions & 1 deletion shared/bundle_analysis/parsers/v2.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@
assets_chunks,
chunks_modules,
)
from shared.bundle_analysis.parsers.base import ParserTrait
from shared.bundle_analysis.utils import get_extension

log = logging.getLogger(__name__)
Expand Down Expand Up @@ -58,7 +59,7 @@
"""


class ParserV2:
class ParserV2(ParserTrait):
"""
This does a streaming JSON parse of the stats JSON file referenced by `path`.
It's more complicated that just doing a `json.loads` but should keep our memory
Expand Down
19 changes: 12 additions & 7 deletions shared/bundle_analysis/report.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,7 +136,7 @@ def asset_reports(
asset_types: Optional[List[AssetType]] = None,
chunk_entry: Optional[bool] = None,
chunk_initial: Optional[bool] = None,
ordering_column: Optional[str] = "size",
ordering_column: str = "size",
ordering_desc: Optional[bool] = True,
) -> Iterator[AssetReport]:
with get_db_session(self.db_path) as session:
Expand Down Expand Up @@ -185,7 +185,7 @@ def total_gzip_size(
"""
Returns the sum of all assets' gzip_size if present plus
the sum of all assets' size if they do not have gzip_size value.
This simulates the amount of data transfer in a realisitic setting,
This simulates the amount of data transfer in a realistic setting,
for those assets that are not compressible we will use its uncompressed size.
"""
with get_db_session(self.db_path) as session:
Expand Down Expand Up @@ -225,14 +225,18 @@ class BundleAnalysisReport:
Report wrapper around multiple bundles for a single commit report.
"""

def __init__(self, db_path: Optional[str] = None):
self.db_path = db_path
if self.db_path is None:
db_path: str

def __init__(self, db_path: str | None = None):
if db_path is None:
_, self.db_path = tempfile.mkstemp(prefix="bundle_analysis_")
else:
self.db_path = db_path
with get_db_session(self.db_path) as db_session:
self._setup(db_session)

def _setup(self, db_session: DbSession):
@sentry_sdk.trace
def _setup(self, db_session: DbSession) -> None:
"""
Creates the schema for a new bundle report database.
"""
Expand Down Expand Up @@ -295,7 +299,7 @@ def ingest(self, path: str, compare_sha: Optional[str] = None) -> Tuple[int, str
session.commit()
rows_deleted = result.rowcount
if rows_deleted > 0:
log.warn(
log.warning(
f"Integrity error detected, deleted {rows_deleted} corrupted rows from {params[0]}"
)

Expand Down Expand Up @@ -452,6 +456,7 @@ def is_cached(self) -> bool:
cached_bundles = session.query(Bundle).filter_by(is_cached=True)
return cached_bundles.count() > 0

@sentry_sdk.trace
def delete_bundle_by_name(self, bundle_name: str) -> None:
with get_db_session(self.db_path) as session:
bundle_to_be_deleted = (
Expand Down
Loading