Skip to content

Python API Reference

briefcase

@capture()

Decorator that records function calls as DecisionSnapshot records.

from briefcase import capture
@capture(metadata={"team": "ml"})
def my_function(input: str) -> str:
...

DecisionSnapshot

from briefcase import DecisionSnapshot
snapshot.id # str — unique identifier
snapshot.input # Input — captured input
snapshot.output # Output — captured output
snapshot.model_parameters # ModelParameters
snapshot.execution_context # ExecutionContext
snapshot.created_at # datetime
snapshot.metadata # dict

configure()

from briefcase import configure
configure(
storage_backend="sqlite",
database_path="./decisions.db",
enable_otel=True,
)

briefcase.replay

ReplayEngine

from briefcase.replay import ReplayEngine
engine = ReplayEngine(strict_mode=True)
snapshot = engine.load("snapshot-id")
result = engine.replay(snapshot)

briefcase.drift

DriftDetector

from briefcase.drift import DriftDetector
detector = DriftDetector(similarity_threshold=0.95)
metrics = detector.compare("snapshot-a", "snapshot-b")

briefcase.sanitize

Sanitizer

from briefcase.sanitize import Sanitizer
sanitizer = Sanitizer(replacement="[REDACTED]")
clean_snapshot = sanitizer.redact(snapshot)

briefcase.storage

Backend Interface

All backends implement:

backend.save(snapshot)
backend.load(snapshot_id) -> DecisionSnapshot
backend.query(filters) -> list[DecisionSnapshot]
backend.delete(snapshot_id)

briefcase.validate

PromptValidationEngine

from briefcase.validate import PromptValidationEngine
engine = PromptValidationEngine()
engine.add_rule("max_tokens", max_value=4096)
result = engine.validate(prompt="...")

Rule

Base class for custom validation rules.

from briefcase.validate import Rule
class MyRule(Rule):
def check(self, prompt: str) -> bool:
return "forbidden" not in prompt
engine.add_rule(MyRule())

briefcase.guardrails

GuardrailEnv

from briefcase.guardrails import GuardrailEnv
env = GuardrailEnv()
env.load_policy("policies/production.cedar")
decision = env.evaluate(principal="user:alice", action="invoke", resource="model:gpt-4")