Skip to content

Commit

Permalink
TimelineLog (#1201)
Browse files Browse the repository at this point in the history
  • Loading branch information
tomchop authored Dec 4, 2024
1 parent b94fab4 commit 5da2a36
Show file tree
Hide file tree
Showing 12 changed files with 690 additions and 57 deletions.
5 changes: 4 additions & 1 deletion core/database_arango.py
Original file line number Diff line number Diff line change
Expand Up @@ -679,6 +679,8 @@ def clear_tags(self):
while job.status() != "done":
time.sleep(ASYNC_JOB_WAIT_TIME)

self._tags = {}

def link_to(
self, target, relationship_type: str, description: str
) -> "Relationship":
Expand Down Expand Up @@ -805,6 +807,7 @@ def get_tags(self) -> List[Tuple["TagRelationship", "Tag"]]:
if tag_paths.empty():
return []
relationships = []
self._tags = {}
for path in tag_paths:
tag_data = Tag.load(path["vertices"][1])
edge_data = path["edges"][0]
Expand Down Expand Up @@ -1259,7 +1262,7 @@ def delete(self, all_versions=True):
job = col.delete(self.id)
while job.status() != "done":
time.sleep(ASYNC_JOB_WAIT_TIME)
if self._collection_name == "auditlog":
if self._collection_name in ("auditlog", "timeline"):
return
try:
event_type = message.EventType.delete
Expand Down
76 changes: 74 additions & 2 deletions core/schemas/audit.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,20 @@
import datetime
from typing import ClassVar, Literal
from functools import wraps
from typing import TYPE_CHECKING, Any, ClassVar, Literal

from pydantic import ConfigDict, computed_field
from pydantic import BaseModel, ConfigDict, computed_field

from core import database_arango
from core.schemas.model import YetiModel

if TYPE_CHECKING:
from core.schemas.dfiq import DFIQTypes
from core.schemas.entity import EntityTypes
from core.schemas.indicator import IndicatorTypes
from core.schemas.observable import ObservableTypes

AllObjectTypes = EntityTypes | ObservableTypes | IndicatorTypes | DFIQTypes


class AuditLog(YetiModel, database_arango.ArangoYetiConnector):
model_config = ConfigDict(str_strip_whitespace=True)
Expand All @@ -31,3 +40,66 @@ def root_type(self):
@classmethod
def load(cls, object: dict) -> "AuditLog":
return cls(**object)


class TimelineLog(BaseModel, database_arango.ArangoYetiConnector):
model_config = ConfigDict(str_strip_whitespace=True)
_exclude_overwrite: list[str] = []

_collection_name: ClassVar[str] = "timeline"
_type_filter = None
_root_type: Literal["timeline"] = "timeline"

timestamp: datetime.datetime
actor: str
target_id: str
action: str
details: dict

@classmethod
def load(cls, object: dict) -> "TimelineLog":
return cls(**object)


def log_timeline(
username: str,
new: "AllObjectTypes",
old: "AllObjectTypes" = None,
action: str | None = None,
details: dict | None = None,
):
if details is None:
if not action:
action = "update" if old else "create"
if old:
old_dump = old.model_dump()
new_dump = new.model_dump()
# only retain fields that are different
for key in old_dump:
if old_dump[key] == new_dump[key]:
del new_dump[key]
details = new_dump
else:
details = new.model_dump()
TimelineLog(
timestamp=datetime.datetime.now(),
actor=username,
target_id=new.extended_id,
action=action,
details=details,
).save()


def log_timeline_tags(actor: str, obj: "AllObjectTypes", old_tags: list[str]):
new_tags = obj.tags
details = {
"removed": set(old_tags) - set(new_tags),
"added": set(new_tags) - set(old_tags),
}
TimelineLog(
timestamp=datetime.datetime.now(),
actor=actor,
target_id=obj.extended_id,
action="tag",
details=details,
).save()
9 changes: 7 additions & 2 deletions core/schemas/dfiq.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from core import database_arango
from core.config.config import yeti_config
from core.helpers import now
from core.schemas import indicator
from core.schemas import audit, indicator
from core.schemas.model import YetiModel

LATEST_SUPPORTED_DFIQ_VERSION = "1.1.0"
Expand All @@ -35,11 +35,14 @@ def custom_null_representer(dumper, data):
yaml.add_representer(type(None), custom_null_representer)


def read_from_data_directory(globpath: str, overwrite: bool = False) -> int:
def read_from_data_directory(
globpath: str, username: str, overwrite: bool = False
) -> int:
"""Read DFIQ files from a directory and add them to the database.
Args:
globpath: Glob path to search for DFIQ files (supports recursion).
username: Username to attribute the changes to.
overwrite: Whether to overwrite existing DFIQs with the same ID.
"""
dfiq_kb = {}
Expand Down Expand Up @@ -76,6 +79,7 @@ def read_from_data_directory(globpath: str, overwrite: bool = False) -> int:
if not dfiq_object.uuid:
dfiq_object.uuid = str(uuid.uuid4())
dfiq_object = dfiq_object.save()
audit.log_timeline(username, dfiq_object, old=db_dfiq)
total_added += 1
except (ValueError, KeyError) as e:
logging.warning("Error processing %s: %s", file, e)
Expand Down Expand Up @@ -119,6 +123,7 @@ def extract_indicators(question: "DFIQQuestion") -> None:
location=step.type,
diamond=indicator.DiamondModel.victim,
).save()
audit.log_timeline("dfiq-indicator-extract", query)
question.link_to(query, "query", "Uses query")

else:
Expand Down
13 changes: 13 additions & 0 deletions core/web/apiv2/audit.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import datetime

from fastapi import APIRouter, HTTPException
from pydantic import BaseModel, ConfigDict

from core.schemas.audit import TimelineLog

router = APIRouter()


@router.get("/timeline/{id:path}")
def trail(id: str):
return TimelineLog.filter({"target_id": id})
26 changes: 19 additions & 7 deletions core/web/apiv2/dfiq.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
from io import BytesIO
from zipfile import ZipFile

from fastapi import APIRouter, HTTPException, UploadFile, status
from fastapi import APIRouter, HTTPException, Request, UploadFile, status
from fastapi.responses import FileResponse
from pydantic import BaseModel, ConfigDict, ValidationError

from core.schemas import dfiq
from core.schemas import audit, dfiq


# Request schemas
Expand Down Expand Up @@ -94,17 +94,19 @@ def config() -> DFIQConfigResponse:


@router.post("/from_archive")
def from_archive(archive: UploadFile) -> dict[str, int]:
def from_archive(httpreq: Request, archive: UploadFile) -> dict[str, int]:
"""Uncompresses a ZIP archive and processes the DFIQ content inside it."""
tempdir = tempfile.TemporaryDirectory()
contents = archive.file.read()
ZipFile(BytesIO(contents)).extractall(path=tempdir.name)
total_added = dfiq.read_from_data_directory(f"{tempdir.name}/*/*.yaml")
total_added = dfiq.read_from_data_directory(
f"{tempdir.name}/*/*.yaml", username=httpreq.state.username
)
return {"total_added": total_added}


@router.post("/from_yaml")
def new_from_yaml(request: NewDFIQRequest) -> dfiq.DFIQTypes:
def new_from_yaml(httpreq: Request, request: NewDFIQRequest) -> dfiq.DFIQTypes:
"""Creates a new DFIQ object in the database."""
try:
new = dfiq.TYPE_MAPPING[request.dfiq_type].from_yaml(request.dfiq_yaml)
Expand Down Expand Up @@ -141,6 +143,7 @@ def new_from_yaml(request: NewDFIQRequest) -> dfiq.DFIQTypes:
)

new = new.save()
audit.log_timeline(httpreq.state.username, new)

try:
new.update_parents()
Expand Down Expand Up @@ -263,7 +266,7 @@ def validate_dfiq_yaml(request: DFIQValidateRequest) -> DFIQValidateResponse:


@router.patch("/{dfiq_id}")
def patch(request: PatchDFIQRequest, dfiq_id) -> dfiq.DFIQTypes:
def patch(httpreq: Request, request: PatchDFIQRequest, dfiq_id) -> dfiq.DFIQTypes:
"""Modifies an DFIQ object in the database."""
db_dfiq: dfiq.DFIQTypes = dfiq.DFIQBase.get(dfiq_id) # type: ignore
if not db_dfiq:
Expand All @@ -279,8 +282,10 @@ def patch(request: PatchDFIQRequest, dfiq_id) -> dfiq.DFIQTypes:
status_code=400,
detail=f"DFIQ type mismatch: {db_dfiq.type} != {update_data.type}",
)
db_dfiq.get_tags()
updated_dfiq = db_dfiq.model_copy(update=update_data.model_dump())
new = updated_dfiq.save()
audit.log_timeline(httpreq.state.username, new, old=db_dfiq)
new.update_parents()

if request.update_indicators and new.type == dfiq.DFIQType.question:
Expand All @@ -299,7 +304,7 @@ def details(dfiq_id) -> dfiq.DFIQTypes:


@router.delete("/{dfiq_id}")
def delete(dfiq_id: str) -> None:
def delete(httpreq: Request, dfiq_id: str) -> None:
"""Deletes a DFIQ object."""
db_dfiq = dfiq.DFIQBase.get(dfiq_id)
if not db_dfiq:
Expand All @@ -319,8 +324,15 @@ def delete(dfiq_id: str) -> None:
child.parent_ids.remove(db_dfiq.dfiq_id)
if db_dfiq.uuid in child.parent_ids:
child.parent_ids.remove(db_dfiq.uuid)
audit.log_timeline(
httpreq.state.username,
child,
action="delete-parent",
details={"parent": db_dfiq.id},
)
child.save()

audit.log_timeline(httpreq.state.username, db_dfiq, action="delete")
db_dfiq.delete()


Expand Down
20 changes: 13 additions & 7 deletions core/web/apiv2/entities.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from fastapi import APIRouter, HTTPException
from fastapi import APIRouter, HTTPException, Request
from pydantic import BaseModel, ConfigDict, Field, conlist

from core.schemas import graph
from core.schemas import audit, graph
from core.schemas.entity import Entity, EntityType, EntityTypes
from core.schemas.tag import MAX_TAGS_REQUEST

Expand Down Expand Up @@ -58,16 +58,17 @@ class EntityTagResponse(BaseModel):


@router.post("/")
def new(request: NewEntityRequest) -> EntityTypes:
def new(httpreq: Request, request: NewEntityRequest) -> EntityTypes:
"""Creates a new entity in the database."""
new = request.entity.save()
audit.log_timeline(httpreq.state.username, new)
if request.tags:
new.tag(request.tags)
return new


@router.patch("/{entity_id}")
def patch(request: PatchEntityRequest, entity_id) -> EntityTypes:
def patch(httpreq: Request, request: PatchEntityRequest, entity_id) -> EntityTypes:
"""Modifies entity in the database."""
db_entity: EntityTypes = Entity.get(entity_id)
if not db_entity:
Expand All @@ -77,9 +78,11 @@ def patch(request: PatchEntityRequest, entity_id) -> EntityTypes:
status_code=400,
detail=f"Entity {entity_id} type mismatch. Provided '{request.entity.type}'. Expected '{db_entity.type}'",
)
db_entity.get_tags()
update_data = request.entity.model_dump(exclude_unset=True)
updated_entity = db_entity.model_copy(update=update_data)
new = updated_entity.save()
audit.log_timeline(httpreq.state.username, new, old=db_entity)
return new


Expand All @@ -94,11 +97,12 @@ def details(entity_id) -> EntityTypes:


@router.delete("/{entity_id}")
def delete(entity_id: str) -> None:
def delete(httpreq: Request, entity_id: str) -> None:
"""Deletes an Entity."""
db_entity = Entity.get(entity_id)
if not db_entity:
raise HTTPException(status_code=404, detail="Entity ID {entity_id} not found")
audit.log_timeline(httpreq.state.username, db_entity, action="delete")
db_entity.delete()


Expand All @@ -124,7 +128,7 @@ def search(request: EntitySearchRequest) -> EntitySearchResponse:


@router.post("/tag")
def tag(request: EntityTagRequest) -> EntityTagResponse:
def tag(httpreq: Request, request: EntityTagRequest) -> EntityTagResponse:
"""Tags entities."""
entities = []
for entity_id in request.ids:
Expand All @@ -138,7 +142,9 @@ def tag(request: EntityTagRequest) -> EntityTagResponse:

entity_tags = {}
for db_entity in entities:
db_entity.tag(request.tags, strict=request.strict)
old_tags = [tag[1].name for tag in db_entity.get_tags()]
db_entity = db_entity.tag(request.tags, strict=request.strict)
audit.log_timeline_tags(httpreq.state.username, db_entity, old_tags)
entity_tags[db_entity.extended_id] = db_entity.tags

return EntityTagResponse(tagged=len(entities), tags=entity_tags)
Loading

0 comments on commit 5da2a36

Please sign in to comment.