Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add national risk #15

Merged
merged 3 commits into from
Jan 17, 2024
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
24 changes: 11 additions & 13 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -6,47 +6,45 @@
#
cfgv==3.4.0
# via pre-commit
coverage[toml]==7.3.2
# via
# coverage
# pytest-cov
distlib==0.3.7
coverage[toml]==7.4.0
# via pytest-cov
distlib==0.3.8
# via virtualenv
filelock==3.13.1
# via virtualenv
greenlet==3.0.1
greenlet==3.0.3
# via sqlalchemy
hdx-python-database==1.2.9
# via hapi-schema (pyproject.toml)
identify==2.5.32
identify==2.5.33
# via pre-commit
iniconfig==2.0.0
# via pytest
nodeenv==1.8.0
# via pre-commit
packaging==23.2
# via pytest
platformdirs==4.0.0
platformdirs==4.1.0
# via virtualenv
pluggy==1.3.0
# via pytest
pre-commit==3.5.0
pre-commit==3.6.0
# via hapi-schema (pyproject.toml)
pytest==7.4.3
pytest==7.4.4
# via
# hapi-schema (pyproject.toml)
# pytest-cov
pytest-cov==4.1.0
# via hapi-schema (pyproject.toml)
pyyaml==6.0.1
# via pre-commit
sqlalchemy==2.0.23
sqlalchemy==2.0.25
# via
# hapi-schema (pyproject.toml)
# hdx-python-database
typing-extensions==4.8.0
typing-extensions==4.9.0
# via sqlalchemy
virtualenv==20.24.7
virtualenv==20.25.0
# via pre-commit

# The following packages are considered to be unsafe in a requirements file:
Expand Down
90 changes: 90 additions & 0 deletions src/hapi_schema/db_national_risk.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
"""NationalRisk table and view."""
from datetime import datetime

from sqlalchemy import (
DateTime,
Float,
ForeignKey,
Integer,
Text,
select,
text,
)
from sqlalchemy.orm import Mapped, mapped_column, relationship

from hapi_schema.db_dataset import DBDataset
from hapi_schema.db_location import DBLocation
from hapi_schema.db_resource import DBResource
from hapi_schema.utils.base import Base
from hapi_schema.utils.view_params import ViewParams


class DBNationalRisk(Base):
__tablename__ = "national_risk"

id: Mapped[int] = mapped_column(Integer, primary_key=True)
resource_ref: Mapped[int] = mapped_column(
ForeignKey("resource.id", onupdate="CASCADE", ondelete="CASCADE"),
nullable=False,
)
location_ref: Mapped[int] = mapped_column(
ForeignKey("location.id", onupdate="CASCADE"), nullable=False
)
risk_class: Mapped[int] = mapped_column(Integer, nullable=False)
global_rank: Mapped[int] = mapped_column(Integer, nullable=False)
overall_risk: Mapped[float] = mapped_column(Float, nullable=False)
hazard_exposure_risk: Mapped[float] = mapped_column(Float, nullable=False)
vulnerability_risk: Mapped[float] = mapped_column(Float, nullable=False)
coping_capacity_risk: Mapped[float] = mapped_column(Float, nullable=False)
meta_missing_indicators_pct: Mapped[float] = mapped_column(
Float, nullable=False
b-j-mills marked this conversation as resolved.
Show resolved Hide resolved
)
meta_avg_recentness_years: Mapped[float] = mapped_column(
Float, nullable=False
b-j-mills marked this conversation as resolved.
Show resolved Hide resolved
)
reference_period_start: Mapped[datetime] = mapped_column(
DateTime, nullable=False, index=True
)
reference_period_end: Mapped[datetime] = mapped_column(
DateTime, nullable=True, server_default=text("NULL")
b-j-mills marked this conversation as resolved.
Show resolved Hide resolved
)
source_data: Mapped[str] = mapped_column(Text, nullable=True)

resource = relationship("DBResource")
location = relationship("DBLocation")


view_params_national_risk = ViewParams(
name="national_risk_view",
metadata=Base.metadata,
selectable=select(
b-j-mills marked this conversation as resolved.
Show resolved Hide resolved
*DBNationalRisk.__table__.columns,
DBDataset.hdx_id.label("dataset_hdx_id"),
DBDataset.hdx_stub.label("dataset_hdx_stub"),
DBDataset.title.label("dataset_title"),
DBDataset.hdx_provider_stub.label("dataset_hdx_provider_stub"),
DBDataset.hdx_provider_name.label("dataset_hdx_provider_name"),
DBResource.hdx_id.label("resource_hdx_id"),
DBResource.name.label("resource_name"),
DBResource.update_date.label("resource_update_date"),
DBLocation.code.label("location_code"),
DBLocation.name.label("location_name"),
).select_from(
# Join risk to loc
DBNationalRisk.__table__.join(
DBLocation.__table__,
DBNationalRisk.location_ref == DBLocation.id,
isouter=True,
)
# Join risk to resource to dataset
.join(
DBResource.__table__,
DBNationalRisk.resource_ref == DBResource.id,
isouter=True,
).join(
DBDataset.__table__,
DBResource.dataset_ref == DBDataset.id,
isouter=True,
)
),
)
3 changes: 3 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from hapi_schema.db_ipc_phase import DBIpcPhase
from hapi_schema.db_ipc_type import DBIpcType
from hapi_schema.db_location import DBLocation
from hapi_schema.db_national_risk import DBNationalRisk
from hapi_schema.db_operational_presence import (
DBOperationalPresence,
)
Expand All @@ -33,6 +34,7 @@
from sample_data.data_ipc_phase import data_ipc_phase
from sample_data.data_ipc_type import data_ipc_type
from sample_data.data_location import data_location
from sample_data.data_national_risk import data_national_risk
from sample_data.data_operational_presence import data_operational_presence
from sample_data.data_org import data_org
from sample_data.data_org_type import data_org_type
Expand Down Expand Up @@ -66,6 +68,7 @@ def engine():
session.execute(insert(DBIpcType), data_ipc_type)
session.execute(insert(DBGender), data_gender)
session.execute(insert(DBAgeRange), data_age_range)
session.execute(insert(DBNationalRisk), data_national_risk)
session.execute(insert(DBPopulation), data_population)
session.execute(insert(DBOperationalPresence), data_operational_presence)
session.execute(insert(DBFoodSecurity), data_food_security)
Expand Down
20 changes: 20 additions & 0 deletions tests/sample_data/data_national_risk.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,20 @@
from datetime import datetime

data_national_risk = [
dict(
id=1,
resource_ref=1,
location_ref=1,
risk_class=5,
global_rank=4,
overall_risk=8.1,
hazard_exposure_risk=8.7,
vulnerability_risk=8.5,
coping_capacity_risk=7.1,
meta_missing_indicators_pct=8,
meta_avg_recentness_years=0.26,
reference_period_start=datetime(2024, 1, 1),
reference_period_end=datetime(2024, 12, 31),
source_data="DATA,DATA,DATA",
),
]
19 changes: 19 additions & 0 deletions tests/test_national_risk_view.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
from hdx.database.views import build_view

from hapi_schema.db_national_risk import view_params_national_risk


def test_national_risk_view(run_view_test):
"""Check that national risk references other tables."""
view_national_risk = build_view(view_params_national_risk.__dict__)
run_view_test(
view=view_national_risk,
whereclause=(
view_national_risk.c.id == 1,
view_national_risk.c.dataset_hdx_id
== "c3f001fa-b45b-464c-9460-1ca79fd39b40",
view_national_risk.c.resource_hdx_id
== "90deb235-1bf5-4bae-b231-3393222c2d01",
view_national_risk.c.location_code == "FOO",
),
)