Skip to content

Commit

Permalink
Fix some uncovered issues
Browse files Browse the repository at this point in the history
  • Loading branch information
VladimirFilonov committed Nov 13, 2024
1 parent b996e7a commit e34fe00
Show file tree
Hide file tree
Showing 8 changed files with 29 additions and 61 deletions.
2 changes: 1 addition & 1 deletion keep-ui/app/alerts/alert-associate-incident-modal.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ const AlertAssociateIncidentModal = ({
Authorization: `Bearer ${session?.accessToken}`,
"Content-Type": "application/json",
},
body: JSON.stringify(alerts.map(({ event_id }) => event_id)),
body: JSON.stringify(alerts.map(({ fingerprint }) => fingerprint)),
});
if (response.ok) {
handleSuccess();
Expand Down
2 changes: 1 addition & 1 deletion keep-ui/app/incidents/[id]/alerts/incident-alert-menu.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ export default function IncidentAlertMenu({ incidentId, alert }: Props) {
Authorization: `Bearer ${session?.accessToken}`,
"Content-Type": "application/json",
},
body: JSON.stringify([alert.event_id]),
body: JSON.stringify([alert.fingerprint]),
}).then((response) => {
if (response.ok) {
toast.success("Alert removed from incident successfully", {
Expand Down
1 change: 1 addition & 0 deletions keep-ui/entities/incidents/model/models.ts
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ export interface IncidentDto {
merged_into_incident_id: string;
merged_by: string;
merged_at: Date;
fingerprint: string;
}

export interface IncidentCandidateDto {
Expand Down
26 changes: 13 additions & 13 deletions keep/api/bl/incidents_bl.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,51 +94,51 @@ def create_incident(
return new_incident_dto

async def add_alerts_to_incident(
self, incident_id: UUID, alert_ids: List[UUID]
self, incident_id: UUID, alert_fingerprints: List[str]
) -> None:
self.logger.info(
"Adding alerts to incident",
extra={"incident_id": incident_id, "alert_ids": alert_ids},
extra={"incident_id": incident_id, "alert_fingerprints": alert_fingerprints},
)
incident = get_incident_by_id(tenant_id=self.tenant_id, incident_id=incident_id)
if not incident:
raise HTTPException(status_code=404, detail="Incident not found")

add_alerts_to_incident_by_incident_id(self.tenant_id, incident_id, alert_ids)
add_alerts_to_incident_by_incident_id(self.tenant_id, incident_id, alert_fingerprints)
self.logger.info(
"Alerts added to incident",
extra={"incident_id": incident_id, "alert_ids": alert_ids},
extra={"incident_id": incident_id, "alert_fingerprints": alert_fingerprints},
)
self.__update_elastic(incident_id, alert_ids)
self.__update_elastic(incident_id, alert_fingerprints)
self.logger.info(
"Alerts pushed to elastic",
extra={"incident_id": incident_id, "alert_ids": alert_ids},
extra={"incident_id": incident_id, "alert_fingerprints": alert_fingerprints},
)
self.__update_client_on_incident_change(incident_id)
self.logger.info(
"Client updated on incident change",
extra={"incident_id": incident_id, "alert_ids": alert_ids},
extra={"incident_id": incident_id, "alert_fingerprints": alert_fingerprints},
)
incident_dto = IncidentDto.from_db_incident(incident)
self.__run_workflows(incident_dto, "updated")
self.logger.info(
"Workflows run on incident",
extra={"incident_id": incident_id, "alert_ids": alert_ids},
extra={"incident_id": incident_id, "alert_fingerprints": alert_fingerprints},
)
await self.__generate_summary(incident_id, incident)
self.logger.info(
"Summary generated",
extra={"incident_id": incident_id, "alert_ids": alert_ids},
extra={"incident_id": incident_id, "alert_fingerprints": alert_fingerprints},
)

def __update_elastic(self, incident_id: UUID, alert_ids: List[UUID]):
def __update_elastic(self, incident_id: UUID, alert_fingerprints: List[str]):
try:
elastic_client = ElasticClient(self.tenant_id)
if elastic_client.enabled:
db_alerts, _ = get_incident_alerts_by_incident_id(
tenant_id=self.tenant_id,
incident_id=incident_id,
limit=len(alert_ids),
limit=len(alert_fingerprints),
)
enriched_alerts_dto = convert_db_alerts_to_dto_alerts(
db_alerts, with_incidents=True
Expand Down Expand Up @@ -205,7 +205,7 @@ async def __generate_summary(self, incident_id: UUID, incident: Incident):
)

def delete_alerts_from_incident(
self, incident_id: UUID, alert_ids: List[UUID]
self, incident_id: UUID, alert_fingerprints: List[str]
) -> None:
self.logger.info(
"Fetching incident",
Expand All @@ -218,7 +218,7 @@ def delete_alerts_from_incident(
if not incident:
raise HTTPException(status_code=404, detail="Incident not found")

remove_alerts_to_incident_by_incident_id(self.tenant_id, incident_id, alert_ids)
remove_alerts_to_incident_by_incident_id(self.tenant_id, incident_id, alert_fingerprints)

def delete_incident(self, incident_id: UUID) -> None:
self.logger.info(
Expand Down
1 change: 0 additions & 1 deletion keep/api/core/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -3280,7 +3280,6 @@ def update_incident_from_dto_by_id(
Incident.tenant_id == tenant_id,
Incident.id == incident_id,
)
.options(joinedload(Incident.alerts))
).first()

if not incident:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,53 +18,19 @@

# revision identifiers, used by Alembic.
revision = "bdae8684d0b4"
down_revision = "ef0b5b0df41c"
down_revision = "620b6c048091"
branch_labels = None
depends_on = None

migration_metadata = sa.MetaData()
#
# alert_to_incident_table = sa.Table(
# 'alerttoincident',
# migration_metadata,
# sa.Column("tenant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
# sa.Column('alert_id', UUID(as_uuid=False), sa.ForeignKey('alert.id', ondelete='CASCADE'), primary_key=True),
# sa.Column('incident_id', UUID(as_uuid=False), sa.ForeignKey('incident.id', ondelete='CASCADE'), primary_key=True),
# sa.Column("timestamp", sa.DateTime(), nullable=False, server_default=sa.func.current_timestamp()),
# sa.Column("is_created_by_ai", sa.Boolean(), nullable=False, server_default=expression.false()),
# sa.Column("deleted_at", sa.DateTime(), nullable=False, server_default="1000-01-01 00:00:00"),
#
# )
#
# # The following code will shoow SA warning about dialect, so we suppress it.
# with warnings.catch_warnings():
# warnings.simplefilter("ignore", category=sa_exc.SAWarning)
# incident_table = sa.Table(
# 'incident',
# migration_metadata,
# sa.Column('id', UUID(as_uuid=False), primary_key=True),
# sa.Column('alerts_count', sa.Integer, default=0),
# sa.Column('affected_services', sa.JSON, default_factory=list),
# sa.Column('sources', sa.JSON, default_factory=list)
# )
#
# alert_table = sa.Table(
# 'alert',
# migration_metadata,
# sa.Column('id', UUID(as_uuid=False), primary_key=True),
# sa.Column('fingerprint', sa.String),
# sa.Column('provider_type', sa.String),
# sa.Column('event', sa.JSON)
# )

#

def populate_db():
session = Session(op.get_bind())

if session.bind.dialect.name == "postgresql":
migrate_lastalert_query = """
insert into lastalert (fingerprint, alert_id, timestamp)
select alert.fingerprint, alert.id as alert_id, alert.timestamp
insert into lastalert (tenant_id, fingerprint, alert_id, timestamp)
select alert.tenant_id, alert.fingerprint, alert.id as alert_id, alert.timestamp
from alert
join (
select
Expand Down Expand Up @@ -97,8 +63,8 @@ def populate_db():

else:
migrate_lastalert_query = """
replace into lastalert (fingerprint, alert_id, timestamp)
select alert.fingerprint, alert.id as alert_id, alert.timestamp
replace into lastalert (tenant_id, fingerprint, alert_id, timestamp)
select alert.tenant_id, alert.fingerprint, alert.id as alert_id, alert.timestamp
from alert
join (
select
Expand Down Expand Up @@ -132,6 +98,7 @@ def populate_db():
def upgrade() -> None:
op.create_table(
"lastalert",
sa.Column("tenant_id", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("fingerprint", sqlmodel.sql.sqltypes.AutoString(), nullable=False),
sa.Column("alert_id", sqlmodel.sql.sqltypes.GUID(), nullable=False),
sa.Column("timestamp", sa.DateTime(), nullable=False),
Expand Down
8 changes: 4 additions & 4 deletions keep/api/routes/incidents.py
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,7 @@ def get_incident_workflows(
)
async def add_alerts_to_incident(
incident_id: UUID,
alert_ids: List[UUID],
alert_fingerprints: List[str],
is_created_by_ai: bool = False,
authenticated_entity: AuthenticatedEntity = Depends(
IdentityManagerFactory.get_auth_verifier(["write:incident"])
Expand All @@ -470,7 +470,7 @@ async def add_alerts_to_incident(
):
tenant_id = authenticated_entity.tenant_id
incident_bl = IncidentBl(tenant_id, session, pusher_client)
await incident_bl.add_alerts_to_incident(incident_id, alert_ids)
await incident_bl.add_alerts_to_incident(incident_id, alert_fingerprints)
return Response(status_code=202)


Expand All @@ -482,7 +482,7 @@ async def add_alerts_to_incident(
)
def delete_alerts_from_incident(
incident_id: UUID,
alert_ids: List[UUID],
fingerprints: List[str],
authenticated_entity: AuthenticatedEntity = Depends(
IdentityManagerFactory.get_auth_verifier(["write:incident"])
),
Expand All @@ -492,7 +492,7 @@ def delete_alerts_from_incident(
tenant_id = authenticated_entity.tenant_id
incident_bl = IncidentBl(tenant_id, session, pusher_client)
incident_bl.delete_alerts_from_incident(
incident_id=incident_id, alert_ids=alert_ids
incident_id=incident_id, alert_fingerprints=fingerprints
)
return Response(status_code=202)

Expand Down
3 changes: 2 additions & 1 deletion keep/api/routes/workflows.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,7 +190,8 @@ def run_workflow(
event_body = body.get("body", {}) or body

# if its event that was triggered by the UI with the Modal
if "test-workflow" in event_body.get("fingerprint", "") or not body:
fingerprint = event_body.get("fingerprint", "")
if fingerprint and "test-workflow" in fingerprint or not body:
# some random
event_body["id"] = event_body.get("fingerprint", "manual-run")
event_body["name"] = event_body.get("fingerprint", "manual-run")
Expand Down

0 comments on commit e34fe00

Please sign in to comment.