mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-05-10 03:22:05 +00:00
feat(teams-pipeline): add plugin runtime and operator cli
Third slice of the Microsoft Teams meeting pipeline stack, salvaged onto current main. Adds the standalone teams_pipeline plugin that consumes Graph change notifications from the webhook listener, resolves meeting artifacts (transcript first, recording + STT fallback later), persists job state in a durable store, and exposes an operator CLI for inspection, replay, subscription management, and validation. Design choices follow maintainer review feedback on PR #19815: - Standalone plugin rather than bolted-on core surface (plugins/teams_pipeline/, kind: standalone in plugin.yaml). - Zero new model tools. The agent drives the pipeline by invoking the operator CLI via the terminal tool, guided by the skill that ships with a follow-up PR. - Reuses the existing msgraph_webhook gateway platform for Graph ingress. Pipeline runtime is wired in via bind_gateway_runtime and gated on plugins.enabled so gateways that don't run the plugin boot cleanly. Additions: - plugins/teams_pipeline/: runtime (gateway wiring + config builder), pipeline core, durable SQLite store, subscription maintenance helpers, Graph artifact resolution, operator CLI (list, show, run/replay, fetch dry-run, subscriptions list, subscribe, renew-subscription, delete-subscription, maintain-subscriptions, token-health, validate). - hermes_cli/main.py: second-pass plugin CLI discovery so any standalone plugin registered via ctx.register_cli_command() outside the memory-plugin convention path gets its subcommand wired into argparse without touching core. - gateway/run.py: _teams_pipeline_plugin_enabled() config gate, _wire_teams_pipeline_runtime() binding after adapter setup, and the two runner attributes used by the runtime. Credit to @dlkakbs for the entire plugin implementation.
This commit is contained in:
parent
ea86714cc0
commit
07bbd93337
14 changed files with 3332 additions and 1 deletions
185
tests/gateway/test_teams_pipeline_runtime_wiring.py
Normal file
185
tests/gateway/test_teams_pipeline_runtime_wiring.py
Normal file
|
|
@ -0,0 +1,185 @@
|
|||
"""Tests for Teams pipeline runtime wiring into the gateway."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
from types import ModuleType
|
||||
from types import SimpleNamespace
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from gateway.config import Platform, PlatformConfig
|
||||
from gateway.run import GatewayRunner
|
||||
from plugins.teams_pipeline.runtime import (
|
||||
bind_gateway_runtime,
|
||||
build_pipeline_runtime,
|
||||
build_pipeline_runtime_config,
|
||||
)
|
||||
|
||||
|
||||
def test_gateway_runner_wires_teams_pipeline_runtime(monkeypatch):
|
||||
runner = GatewayRunner.__new__(GatewayRunner)
|
||||
runner.adapters = {Platform.MSGRAPH_WEBHOOK: object()}
|
||||
runner._teams_pipeline_runtime_error = None
|
||||
|
||||
calls: list[object] = []
|
||||
|
||||
def _bind(gateway_runner):
|
||||
calls.append(gateway_runner)
|
||||
return True
|
||||
|
||||
monkeypatch.setattr("plugins.teams_pipeline.runtime.bind_gateway_runtime", _bind)
|
||||
|
||||
GatewayRunner._wire_teams_pipeline_runtime(runner)
|
||||
|
||||
assert calls == [runner]
|
||||
|
||||
|
||||
def test_gateway_runner_skips_wiring_without_msgraph_adapter(monkeypatch):
|
||||
runner = GatewayRunner.__new__(GatewayRunner)
|
||||
runner.adapters = {Platform.TELEGRAM: MagicMock()}
|
||||
runner._teams_pipeline_runtime_error = None
|
||||
|
||||
called = False
|
||||
|
||||
def _bind(_gateway_runner):
|
||||
nonlocal called
|
||||
called = True
|
||||
return True
|
||||
|
||||
monkeypatch.setattr("plugins.teams_pipeline.runtime.bind_gateway_runtime", _bind)
|
||||
|
||||
GatewayRunner._wire_teams_pipeline_runtime(runner)
|
||||
|
||||
assert called is False
|
||||
|
||||
|
||||
def test_gateway_runner_skips_wiring_when_teams_pipeline_plugin_disabled(monkeypatch):
|
||||
runner = GatewayRunner.__new__(GatewayRunner)
|
||||
runner.adapters = {Platform.MSGRAPH_WEBHOOK: object()}
|
||||
runner._teams_pipeline_runtime_error = None
|
||||
|
||||
called = False
|
||||
|
||||
def _bind(_gateway_runner):
|
||||
nonlocal called
|
||||
called = True
|
||||
return True
|
||||
|
||||
monkeypatch.setattr("plugins.teams_pipeline.runtime.bind_gateway_runtime", _bind)
|
||||
monkeypatch.setattr(
|
||||
"gateway.run._load_gateway_config",
|
||||
lambda: {"plugins": {"enabled": []}},
|
||||
)
|
||||
|
||||
GatewayRunner._wire_teams_pipeline_runtime(runner)
|
||||
|
||||
assert called is False
|
||||
|
||||
|
||||
def test_runtime_config_disables_teams_delivery_without_target():
|
||||
gateway_config = SimpleNamespace(
|
||||
platforms={
|
||||
Platform("teams"): PlatformConfig(enabled=True, extra={}),
|
||||
}
|
||||
)
|
||||
|
||||
config = build_pipeline_runtime_config(gateway_config)
|
||||
|
||||
assert "teams_delivery" not in config
|
||||
|
||||
|
||||
def test_build_pipeline_runtime_only_wires_sender_when_delivery_configured(monkeypatch):
|
||||
gateway = SimpleNamespace(
|
||||
config=SimpleNamespace(
|
||||
platforms={
|
||||
Platform("teams"): PlatformConfig(enabled=True, extra={}),
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
monkeypatch.setattr(
|
||||
"plugins.teams_pipeline.runtime.build_graph_client",
|
||||
lambda: object(),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"plugins.teams_pipeline.runtime.resolve_teams_pipeline_store_path",
|
||||
lambda: "/tmp/teams-pipeline-store.json",
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"plugins.teams_pipeline.runtime.TeamsPipelineStore",
|
||||
lambda path: {"path": path},
|
||||
)
|
||||
|
||||
runtime = build_pipeline_runtime(gateway)
|
||||
|
||||
assert runtime.teams_sender is None
|
||||
|
||||
|
||||
def test_build_pipeline_runtime_skips_sender_when_adapter_layer_is_unavailable(monkeypatch):
|
||||
gateway = SimpleNamespace(
|
||||
config=SimpleNamespace(
|
||||
platforms={
|
||||
Platform("teams"): PlatformConfig(
|
||||
enabled=True,
|
||||
extra={
|
||||
"delivery_mode": "graph",
|
||||
"team_id": "team-1",
|
||||
"channel_id": "channel-1",
|
||||
},
|
||||
),
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
monkeypatch.setattr(
|
||||
"plugins.teams_pipeline.runtime.build_graph_client",
|
||||
lambda: object(),
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"plugins.teams_pipeline.runtime.resolve_teams_pipeline_store_path",
|
||||
lambda: "/tmp/teams-pipeline-store.json",
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"plugins.teams_pipeline.runtime.TeamsPipelineStore",
|
||||
lambda path: {"path": path},
|
||||
)
|
||||
monkeypatch.setitem(
|
||||
sys.modules,
|
||||
"plugins.platforms.teams.adapter",
|
||||
ModuleType("plugins.platforms.teams.adapter"),
|
||||
)
|
||||
|
||||
runtime = build_pipeline_runtime(gateway)
|
||||
|
||||
assert runtime.teams_sender is None
|
||||
|
||||
|
||||
def test_bind_gateway_runtime_leaves_scheduler_unchanged_on_failure(monkeypatch):
|
||||
class FakeAdapter:
|
||||
def __init__(self):
|
||||
self.scheduler = None
|
||||
|
||||
def set_notification_scheduler(self, scheduler):
|
||||
self.scheduler = scheduler
|
||||
|
||||
gateway = SimpleNamespace(
|
||||
adapters={Platform.MSGRAPH_WEBHOOK: FakeAdapter()},
|
||||
config=SimpleNamespace(
|
||||
platforms={
|
||||
Platform("teams"): PlatformConfig(enabled=True, extra={}),
|
||||
}
|
||||
),
|
||||
_teams_pipeline_runtime=None,
|
||||
_teams_pipeline_runtime_error=None,
|
||||
)
|
||||
|
||||
monkeypatch.setattr(
|
||||
"plugins.teams_pipeline.runtime.build_pipeline_runtime",
|
||||
lambda _gateway: (_ for _ in ()).throw(RuntimeError("boom")),
|
||||
)
|
||||
|
||||
bound = bind_gateway_runtime(gateway)
|
||||
|
||||
assert bound is False
|
||||
assert gateway.adapters[Platform.MSGRAPH_WEBHOOK].scheduler is None
|
||||
assert gateway._teams_pipeline_runtime_error == "boom"
|
||||
214
tests/hermes_cli/test_teams_pipeline_plugin_cli.py
Normal file
214
tests/hermes_cli/test_teams_pipeline_plugin_cli.py
Normal file
|
|
@ -0,0 +1,214 @@
|
|||
"""Tests for the teams_pipeline plugin CLI."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from argparse import ArgumentParser, Namespace
|
||||
from types import SimpleNamespace
|
||||
|
||||
import pytest
|
||||
|
||||
from plugins.teams_pipeline.cli import register_cli, teams_pipeline_command
|
||||
from plugins.teams_pipeline.store import TeamsPipelineStore
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _isolate(tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
|
||||
|
||||
def _make_args(**kwargs):
|
||||
defaults = {
|
||||
"teams_pipeline_action": None,
|
||||
"store_path": "",
|
||||
"status": "",
|
||||
"limit": 20,
|
||||
"job_id": "",
|
||||
"meeting_id": "",
|
||||
"join_web_url": "",
|
||||
"tenant_id": "",
|
||||
"call_record_id": "",
|
||||
"resource": "",
|
||||
"notification_url": "",
|
||||
"change_type": "updated",
|
||||
"expiration": "",
|
||||
"client_state": "",
|
||||
"lifecycle_notification_url": "",
|
||||
"latest_supported_tls_version": "v1_2",
|
||||
"subscription_id": "",
|
||||
"force_refresh": False,
|
||||
"renew_within_hours": 24,
|
||||
"extend_hours": 24,
|
||||
"dry_run": False,
|
||||
}
|
||||
defaults.update(kwargs)
|
||||
return Namespace(**defaults)
|
||||
|
||||
|
||||
def test_register_cli_builds_tree():
|
||||
parser = ArgumentParser()
|
||||
register_cli(parser)
|
||||
args = parser.parse_args(["list"])
|
||||
assert args.teams_pipeline_action == "list"
|
||||
|
||||
|
||||
def test_list_prints_recent_jobs(capsys, tmp_path):
|
||||
store = TeamsPipelineStore(tmp_path / "teams_pipeline_store.json")
|
||||
store.upsert_job(
|
||||
"job-1",
|
||||
{
|
||||
"event_id": "evt-1",
|
||||
"source_event_type": "updated",
|
||||
"dedupe_key": "evt-1",
|
||||
"status": "completed",
|
||||
"meeting_ref": {"meeting_id": "meeting-1"},
|
||||
},
|
||||
)
|
||||
|
||||
teams_pipeline_command(
|
||||
_make_args(
|
||||
teams_pipeline_action="list",
|
||||
store_path=str(tmp_path / "teams_pipeline_store.json"),
|
||||
)
|
||||
)
|
||||
out = capsys.readouterr().out
|
||||
assert "job-1" in out
|
||||
assert "meeting-1" in out
|
||||
|
||||
|
||||
def test_show_prints_job_json(capsys, tmp_path):
|
||||
store = TeamsPipelineStore(tmp_path / "teams_pipeline_store.json")
|
||||
store.upsert_job(
|
||||
"job-1",
|
||||
{
|
||||
"event_id": "evt-1",
|
||||
"source_event_type": "updated",
|
||||
"dedupe_key": "evt-1",
|
||||
"status": "completed",
|
||||
"meeting_ref": {"meeting_id": "meeting-1"},
|
||||
},
|
||||
)
|
||||
|
||||
teams_pipeline_command(
|
||||
_make_args(
|
||||
teams_pipeline_action="show",
|
||||
job_id="job-1",
|
||||
store_path=str(tmp_path / "teams_pipeline_store.json"),
|
||||
)
|
||||
)
|
||||
out = capsys.readouterr().out
|
||||
payload = json.loads(out)
|
||||
assert payload["job_id"] == "job-1"
|
||||
assert payload["meeting_ref"]["meeting_id"] == "meeting-1"
|
||||
|
||||
|
||||
def test_fetch_requires_meeting_identifier(capsys):
|
||||
teams_pipeline_command(_make_args(teams_pipeline_action="fetch"))
|
||||
out = capsys.readouterr().out
|
||||
assert "meeting_id or join_web_url is required" in out
|
||||
|
||||
|
||||
def test_subscriptions_lists_graph_subscriptions(monkeypatch, capsys):
|
||||
class FakeClient:
|
||||
async def collect_paginated(self, path):
|
||||
assert path == "/subscriptions"
|
||||
return [
|
||||
{
|
||||
"id": "sub-1",
|
||||
"resource": "communications/onlineMeetings/getAllTranscripts",
|
||||
"changeType": "updated",
|
||||
"expirationDateTime": "2026-05-05T00:00:00Z",
|
||||
}
|
||||
]
|
||||
|
||||
monkeypatch.setattr("plugins.teams_pipeline.cli.build_graph_client", lambda: FakeClient())
|
||||
teams_pipeline_command(_make_args(teams_pipeline_action="subscriptions"))
|
||||
out = capsys.readouterr().out
|
||||
assert "sub-1" in out
|
||||
assert "getAllTranscripts" in out
|
||||
|
||||
|
||||
def test_subscribe_defaults_to_created_for_transcript_resources(monkeypatch, capsys):
|
||||
captured = {}
|
||||
|
||||
class FakeClient:
|
||||
async def post_json(self, path, json_body=None, headers=None):
|
||||
captured["path"] = path
|
||||
captured["json_body"] = json_body
|
||||
return {
|
||||
"id": "sub-transcript",
|
||||
"resource": json_body["resource"],
|
||||
"changeType": json_body["changeType"],
|
||||
"notificationUrl": json_body["notificationUrl"],
|
||||
"expirationDateTime": json_body["expirationDateTime"],
|
||||
}
|
||||
|
||||
monkeypatch.setattr("plugins.teams_pipeline.cli.build_graph_client", lambda: FakeClient())
|
||||
teams_pipeline_command(
|
||||
_make_args(
|
||||
teams_pipeline_action="subscribe",
|
||||
resource="communications/onlineMeetings/getAllTranscripts",
|
||||
notification_url="https://example.com/webhooks/msgraph",
|
||||
change_type="",
|
||||
)
|
||||
)
|
||||
payload = json.loads(capsys.readouterr().out)
|
||||
assert captured["path"] == "/subscriptions"
|
||||
assert captured["json_body"]["changeType"] == "created"
|
||||
assert payload["changeType"] == "created"
|
||||
|
||||
|
||||
def test_token_health_force_refresh(monkeypatch, capsys):
|
||||
class FakeProvider:
|
||||
def inspect_token_health(self):
|
||||
return {"configured": True, "cache_state": "warm"}
|
||||
|
||||
async def get_access_token(self, force_refresh=False):
|
||||
assert force_refresh is True
|
||||
return "token-123"
|
||||
|
||||
monkeypatch.setattr(
|
||||
"plugins.teams_pipeline.cli.MicrosoftGraphTokenProvider",
|
||||
SimpleNamespace(from_env=lambda: FakeProvider()),
|
||||
)
|
||||
teams_pipeline_command(_make_args(teams_pipeline_action="token-health", force_refresh=True))
|
||||
payload = json.loads(capsys.readouterr().out)
|
||||
assert payload["configured"] is True
|
||||
assert payload["last_refresh_succeeded"] is True
|
||||
assert payload["access_token_length"] == len("token-123")
|
||||
|
||||
|
||||
def test_validate_accepts_msgraph_credentials_for_graph_delivery(monkeypatch, capsys, tmp_path):
|
||||
from gateway.config import Platform, PlatformConfig
|
||||
|
||||
monkeypatch.setenv("MSGRAPH_TENANT_ID", "tenant")
|
||||
monkeypatch.setenv("MSGRAPH_CLIENT_ID", "client")
|
||||
monkeypatch.setenv("MSGRAPH_CLIENT_SECRET", "secret")
|
||||
|
||||
gateway_config = SimpleNamespace(
|
||||
platforms={
|
||||
Platform.MSGRAPH_WEBHOOK: PlatformConfig(enabled=True, extra={}),
|
||||
Platform("teams"): PlatformConfig(
|
||||
enabled=True,
|
||||
extra={
|
||||
"delivery_mode": "graph",
|
||||
"team_id": "team-1",
|
||||
"channel_id": "channel-1",
|
||||
},
|
||||
),
|
||||
}
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"plugins.teams_pipeline.cli.load_gateway_config",
|
||||
lambda: gateway_config,
|
||||
)
|
||||
|
||||
teams_pipeline_command(
|
||||
_make_args(
|
||||
teams_pipeline_action="validate",
|
||||
store_path=str(tmp_path / "teams_pipeline_store.json"),
|
||||
)
|
||||
)
|
||||
payload = json.loads(capsys.readouterr().out)
|
||||
assert payload["ok"] is True
|
||||
assert payload["issues"] == []
|
||||
437
tests/plugins/test_teams_pipeline_plugin.py
Normal file
437
tests/plugins/test_teams_pipeline_plugin.py
Normal file
|
|
@ -0,0 +1,437 @@
|
|||
"""Tests for the Teams pipeline plugin package."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from types import SimpleNamespace
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from hermes_cli.plugins import PluginContext, PluginManager, PluginManifest
|
||||
from gateway.config import GatewayConfig, Platform, PlatformConfig
|
||||
from plugins.teams_pipeline import register
|
||||
from plugins.teams_pipeline.pipeline import TeamsMeetingPipeline
|
||||
from plugins.teams_pipeline.store import TeamsPipelineStore
|
||||
from plugins.teams_pipeline.models import MeetingArtifact
|
||||
|
||||
|
||||
class FakeGraphClient:
|
||||
def __init__(self) -> None:
|
||||
self.downloaded = False
|
||||
|
||||
|
||||
async def _transcript_meeting_resolver(client, *, meeting_id=None, join_web_url=None, tenant_id=None):
|
||||
from plugins.teams_pipeline.models import TeamsMeetingRef
|
||||
|
||||
return TeamsMeetingRef(
|
||||
meeting_id=str(meeting_id),
|
||||
tenant_id=tenant_id,
|
||||
metadata={"subject": "Weekly Sync", "participants": [{"displayName": "Ada"}]},
|
||||
)
|
||||
|
||||
|
||||
async def _no_call_record(*args, **kwargs):
|
||||
return None
|
||||
|
||||
|
||||
def test_register_adds_cli_only():
|
||||
mgr = PluginManager()
|
||||
manifest = PluginManifest(name="teams_pipeline")
|
||||
ctx = PluginContext(manifest, mgr)
|
||||
|
||||
register(ctx)
|
||||
|
||||
assert "teams-pipeline" in mgr._cli_commands
|
||||
entry = mgr._cli_commands["teams-pipeline"]
|
||||
assert entry["plugin"] == "teams_pipeline"
|
||||
assert callable(entry["setup_fn"])
|
||||
assert callable(entry["handler_fn"])
|
||||
|
||||
|
||||
def test_runtime_config_uses_existing_teams_platform_settings():
|
||||
from plugins.teams_pipeline.runtime import build_pipeline_runtime_config
|
||||
|
||||
gateway_config = GatewayConfig(
|
||||
platforms={
|
||||
Platform("teams"): PlatformConfig(
|
||||
enabled=True,
|
||||
extra={
|
||||
"delivery_mode": "graph",
|
||||
"team_id": "team-1",
|
||||
"channel_id": "channel-1",
|
||||
"meeting_pipeline": {
|
||||
"transcript_min_chars": 120,
|
||||
"notion": {"enabled": True, "database_id": "db-1"},
|
||||
},
|
||||
},
|
||||
)
|
||||
}
|
||||
)
|
||||
|
||||
runtime_config = build_pipeline_runtime_config(gateway_config)
|
||||
|
||||
assert runtime_config["transcript_min_chars"] == 120
|
||||
assert runtime_config["notion"]["database_id"] == "db-1"
|
||||
assert runtime_config["teams_delivery"] == {
|
||||
"enabled": True,
|
||||
"mode": "graph",
|
||||
"team_id": "team-1",
|
||||
"channel_id": "channel-1",
|
||||
}
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_bind_gateway_runtime_attaches_scheduler(monkeypatch, tmp_path):
|
||||
from plugins.teams_pipeline import runtime as runtime_module
|
||||
|
||||
class FakeAdapter:
|
||||
def __init__(self) -> None:
|
||||
self.scheduler = None
|
||||
|
||||
def set_notification_scheduler(self, scheduler) -> None:
|
||||
self.scheduler = scheduler
|
||||
|
||||
class FakePipeline:
|
||||
def __init__(self) -> None:
|
||||
self.notifications = []
|
||||
|
||||
async def run_notification(self, notification):
|
||||
self.notifications.append(notification)
|
||||
|
||||
adapter = FakeAdapter()
|
||||
pipeline = FakePipeline()
|
||||
gateway = SimpleNamespace(
|
||||
adapters={Platform.MSGRAPH_WEBHOOK: adapter},
|
||||
config=GatewayConfig(platforms={}),
|
||||
_teams_pipeline_runtime=None,
|
||||
_teams_pipeline_runtime_error=None,
|
||||
)
|
||||
|
||||
monkeypatch.setattr(runtime_module, "build_pipeline_runtime", lambda gateway_runner: pipeline)
|
||||
|
||||
bound = runtime_module.bind_gateway_runtime(gateway)
|
||||
|
||||
assert bound is True
|
||||
assert gateway._teams_pipeline_runtime is pipeline
|
||||
assert callable(adapter.scheduler)
|
||||
|
||||
notification = {"id": "notif-1"}
|
||||
await adapter.scheduler(notification, object())
|
||||
assert pipeline.notifications == [notification]
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
async def test_bind_gateway_runtime_drops_notifications_when_unavailable(monkeypatch):
|
||||
from plugins.teams_pipeline import runtime as runtime_module
|
||||
from tools.microsoft_graph_auth import MicrosoftGraphConfigError
|
||||
|
||||
class FakeAdapter:
|
||||
def __init__(self) -> None:
|
||||
self.scheduler = None
|
||||
|
||||
def set_notification_scheduler(self, scheduler) -> None:
|
||||
self.scheduler = scheduler
|
||||
|
||||
adapter = FakeAdapter()
|
||||
gateway = SimpleNamespace(
|
||||
adapters={Platform.MSGRAPH_WEBHOOK: adapter},
|
||||
config=GatewayConfig(platforms={}),
|
||||
_teams_pipeline_runtime=None,
|
||||
_teams_pipeline_runtime_error=None,
|
||||
)
|
||||
|
||||
def _raise(_gateway_runner):
|
||||
raise MicrosoftGraphConfigError("missing graph env")
|
||||
|
||||
monkeypatch.setattr(runtime_module, "build_pipeline_runtime", _raise)
|
||||
|
||||
bound = runtime_module.bind_gateway_runtime(gateway)
|
||||
|
||||
assert bound is False
|
||||
assert "missing graph env" in gateway._teams_pipeline_runtime_error
|
||||
assert callable(adapter.scheduler)
|
||||
await adapter.scheduler({"id": "notif-2"}, object())
|
||||
|
||||
|
||||
def test_store_persists_subscription_event_and_job_state(tmp_path):
|
||||
store_path = tmp_path / "teams-store.json"
|
||||
store = TeamsPipelineStore(store_path)
|
||||
store.upsert_subscription(
|
||||
"sub-1",
|
||||
{"client_state": "abc", "resource": "communications/onlineMeetings"},
|
||||
)
|
||||
store.record_event_timestamp("evt-1", "2026-05-03T19:30:00Z")
|
||||
store.upsert_job("job-1", {"status": "received", "event_id": "evt-1"})
|
||||
store.upsert_sink_record("notion:meeting-1", {"page_id": "page-1"})
|
||||
|
||||
reloaded = TeamsPipelineStore(store_path)
|
||||
subscription = reloaded.get_subscription("sub-1")
|
||||
job = reloaded.get_job("job-1")
|
||||
sink = reloaded.get_sink_record("notion:meeting-1")
|
||||
|
||||
assert subscription is not None
|
||||
assert subscription["subscription_id"] == "sub-1"
|
||||
assert subscription["client_state"] == "abc"
|
||||
assert reloaded.get_event_timestamp("evt-1") == "2026-05-03T19:30:00Z"
|
||||
assert job is not None
|
||||
assert job["status"] == "received"
|
||||
assert sink is not None
|
||||
assert sink["page_id"] == "page-1"
|
||||
|
||||
|
||||
def test_store_notification_receipts_are_idempotent(tmp_path):
|
||||
store = TeamsPipelineStore(tmp_path / "teams-store.json")
|
||||
notification = {
|
||||
"subscriptionId": "sub-1",
|
||||
"resource": "communications/onlineMeetings/meeting-1",
|
||||
"changeType": "updated",
|
||||
}
|
||||
receipt_key = TeamsPipelineStore.build_notification_receipt_key(notification)
|
||||
|
||||
assert store.record_notification_receipt(receipt_key, notification) is True
|
||||
assert store.record_notification_receipt(receipt_key, notification) is False
|
||||
assert store.has_notification_receipt(receipt_key) is True
|
||||
|
||||
reloaded = TeamsPipelineStore(tmp_path / "teams-store.json")
|
||||
assert reloaded.has_notification_receipt(receipt_key) is True
|
||||
|
||||
|
||||
@pytest.mark.anyio
|
||||
class TestTeamsMeetingPipeline:
|
||||
async def test_transcript_first_path_persists_state_and_skips_recording(self, tmp_path, monkeypatch):
|
||||
from plugins.teams_pipeline import pipeline as pipeline_module
|
||||
|
||||
monkeypatch.setattr(pipeline_module, "resolve_meeting_reference", _transcript_meeting_resolver)
|
||||
|
||||
async def _fetch_transcript(client, meeting_ref):
|
||||
return (
|
||||
MeetingArtifact(artifact_type="transcript", artifact_id="tx-1", display_name="meeting.vtt"),
|
||||
"Action: Send draft by Friday.\nDecision: Ship the transcript-first path.\nDetailed transcript content.",
|
||||
)
|
||||
|
||||
async def _call_record(client, meeting_ref, *, call_record_id=None, allow_permission_errors=True):
|
||||
return MeetingArtifact(
|
||||
artifact_type="call_record",
|
||||
artifact_id="call-1",
|
||||
metadata={"metrics": {"participant_count": 4}},
|
||||
)
|
||||
|
||||
async def _summarize(**kwargs):
|
||||
return pipeline_module.TeamsMeetingSummaryPayload(
|
||||
meeting_ref=kwargs["resolved_meeting"],
|
||||
title="Weekly Sync",
|
||||
transcript_text=kwargs["transcript_text"],
|
||||
summary="Short summary",
|
||||
key_decisions=["Ship the transcript-first path."],
|
||||
action_items=["Send draft by Friday."],
|
||||
risks=["Timeline risk."],
|
||||
confidence="high",
|
||||
confidence_notes="Transcript available.",
|
||||
source_artifacts=kwargs["artifacts"],
|
||||
)
|
||||
|
||||
monkeypatch.setattr(pipeline_module, "fetch_preferred_transcript_text", _fetch_transcript)
|
||||
monkeypatch.setattr(pipeline_module, "enrich_meeting_with_call_record", _call_record)
|
||||
|
||||
store = TeamsPipelineStore(tmp_path / "teams-store.json")
|
||||
pipeline = TeamsMeetingPipeline(
|
||||
graph_client=FakeGraphClient(),
|
||||
store=store,
|
||||
config={"transcript_min_chars": 20},
|
||||
summarize_fn=_summarize,
|
||||
)
|
||||
|
||||
job = await pipeline.run_notification(
|
||||
{
|
||||
"id": "notif-1",
|
||||
"changeType": "updated",
|
||||
"resource": "communications/onlineMeetings/meeting-123",
|
||||
"resourceData": {"id": "meeting-123"},
|
||||
}
|
||||
)
|
||||
|
||||
assert job.status == "completed"
|
||||
assert job.selected_artifact_strategy == "transcript_first"
|
||||
assert job.summary_payload is not None
|
||||
assert job.summary_payload.summary == "Short summary"
|
||||
stored = store.get_job(job.job_id)
|
||||
assert stored is not None
|
||||
assert stored["status"] == "completed"
|
||||
|
||||
async def test_recording_fallback_uses_stt_and_updates_sink_records(self, tmp_path, monkeypatch):
|
||||
from plugins.teams_pipeline import pipeline as pipeline_module
|
||||
|
||||
monkeypatch.setattr(pipeline_module, "resolve_meeting_reference", _transcript_meeting_resolver)
|
||||
|
||||
async def _no_transcript(client, meeting_ref):
|
||||
return None, None
|
||||
|
||||
async def _recordings(client, meeting_ref):
|
||||
return [
|
||||
MeetingArtifact(
|
||||
artifact_type="recording",
|
||||
artifact_id="rec-1",
|
||||
display_name="recording.mp4",
|
||||
download_url="https://files.example/recording.mp4",
|
||||
)
|
||||
]
|
||||
|
||||
async def _download(client, meeting_ref, recording, destination):
|
||||
target = Path(destination)
|
||||
target.write_bytes(b"video-bytes")
|
||||
return {"path": str(target), "size_bytes": 11, "content_type": "video/mp4"}
|
||||
|
||||
async def _prepare_audio(self, recording_path):
|
||||
audio_path = recording_path.with_suffix(".wav")
|
||||
audio_path.write_bytes(b"audio-bytes")
|
||||
return audio_path
|
||||
|
||||
def _transcribe(file_path, model):
|
||||
return {"success": True, "transcript": "Action: Follow up with Legal.\nRisk: Budget approval pending.", "provider": "local"}
|
||||
|
||||
async def _summarize(**kwargs):
|
||||
return pipeline_module.TeamsMeetingSummaryPayload(
|
||||
meeting_ref=kwargs["resolved_meeting"],
|
||||
title="Weekly Sync",
|
||||
transcript_text=kwargs["transcript_text"],
|
||||
summary="Fallback summary",
|
||||
key_decisions=[],
|
||||
action_items=["Follow up with Legal."],
|
||||
risks=["Budget approval pending."],
|
||||
confidence="medium",
|
||||
confidence_notes="Generated from STT fallback.",
|
||||
source_artifacts=kwargs["artifacts"],
|
||||
)
|
||||
|
||||
class FakeNotionWriter:
|
||||
async def write_summary(self, payload, config, existing_record=None):
|
||||
return {"page_id": existing_record.get("page_id") if existing_record else "page-1", "url": "https://notion.so/page-1"}
|
||||
|
||||
async def _teams_sender(payload, config, existing_record=None):
|
||||
return {"message_id": existing_record.get("message_id") if existing_record else "msg-1"}
|
||||
|
||||
monkeypatch.setattr(pipeline_module, "fetch_preferred_transcript_text", _no_transcript)
|
||||
monkeypatch.setattr(pipeline_module, "list_recording_artifacts", _recordings)
|
||||
monkeypatch.setattr(pipeline_module, "download_recording_artifact", _download)
|
||||
monkeypatch.setattr(pipeline_module.TeamsMeetingPipeline, "_prepare_audio_path", _prepare_audio)
|
||||
monkeypatch.setattr(pipeline_module, "enrich_meeting_with_call_record", _no_call_record)
|
||||
|
||||
store = TeamsPipelineStore(tmp_path / "teams-store.json")
|
||||
pipeline = TeamsMeetingPipeline(
|
||||
graph_client=FakeGraphClient(),
|
||||
store=store,
|
||||
config={
|
||||
"notion": {"enabled": True, "database_id": "db-1"},
|
||||
"teams_delivery": {"enabled": True, "channel_id": "channel-1"},
|
||||
},
|
||||
transcribe_fn=_transcribe,
|
||||
summarize_fn=_summarize,
|
||||
notion_writer=FakeNotionWriter(),
|
||||
teams_sender=_teams_sender,
|
||||
)
|
||||
|
||||
job = await pipeline.run_notification(
|
||||
{
|
||||
"id": "notif-2",
|
||||
"changeType": "updated",
|
||||
"resource": "communications/onlineMeetings/meeting-456",
|
||||
"resourceData": {"id": "meeting-456"},
|
||||
}
|
||||
)
|
||||
|
||||
assert job.status == "completed"
|
||||
assert job.selected_artifact_strategy == "recording_stt_fallback"
|
||||
assert job.summary_payload is not None
|
||||
assert job.summary_payload.summary == "Fallback summary"
|
||||
notion_record = store.get_sink_record("notion:meeting-456")
|
||||
teams_record = store.get_sink_record("teams:meeting-456")
|
||||
assert notion_record is not None
|
||||
assert notion_record["page_id"] == "page-1"
|
||||
assert teams_record is not None
|
||||
assert teams_record["message_id"] == "msg-1"
|
||||
|
||||
async def test_missing_transcript_and_recording_schedules_retry(self, tmp_path, monkeypatch):
|
||||
from plugins.teams_pipeline import pipeline as pipeline_module
|
||||
|
||||
monkeypatch.setattr(pipeline_module, "resolve_meeting_reference", _transcript_meeting_resolver)
|
||||
monkeypatch.setattr(pipeline_module, "fetch_preferred_transcript_text", lambda *a, **kw: asyncio.sleep(0, result=(None, None)))
|
||||
monkeypatch.setattr(pipeline_module, "list_recording_artifacts", lambda *a, **kw: asyncio.sleep(0, result=[]))
|
||||
|
||||
store = TeamsPipelineStore(tmp_path / "teams-store.json")
|
||||
pipeline = TeamsMeetingPipeline(
|
||||
graph_client=FakeGraphClient(),
|
||||
store=store,
|
||||
config={},
|
||||
summarize_fn=lambda **kwargs: asyncio.sleep(0, result=None),
|
||||
)
|
||||
|
||||
job = await pipeline.run_notification(
|
||||
{
|
||||
"id": "notif-3",
|
||||
"changeType": "updated",
|
||||
"resource": "communications/onlineMeetings/meeting-789",
|
||||
"resourceData": {"id": "meeting-789"},
|
||||
}
|
||||
)
|
||||
|
||||
assert job.status == "retry_scheduled"
|
||||
assert job.error_info["retryable"] is True
|
||||
assert "Recording unavailable" in job.error_info["message"]
|
||||
|
||||
async def test_duplicate_notification_reuses_completed_job(self, tmp_path, monkeypatch):
|
||||
from plugins.teams_pipeline import pipeline as pipeline_module
|
||||
|
||||
monkeypatch.setattr(pipeline_module, "resolve_meeting_reference", _transcript_meeting_resolver)
|
||||
|
||||
async def _fetch_transcript(client, meeting_ref):
|
||||
return (
|
||||
MeetingArtifact(artifact_type="transcript", artifact_id="tx-dup", display_name="meeting.vtt"),
|
||||
"Decision: Keep duplicate notifications idempotent.\nAction: Verify the cached job is reused.",
|
||||
)
|
||||
|
||||
summarize_calls = 0
|
||||
|
||||
async def _summarize(**kwargs):
|
||||
nonlocal summarize_calls
|
||||
summarize_calls += 1
|
||||
return pipeline_module.TeamsMeetingSummaryPayload(
|
||||
meeting_ref=kwargs["resolved_meeting"],
|
||||
title="Weekly Sync",
|
||||
transcript_text=kwargs["transcript_text"],
|
||||
summary="Duplicate-safe summary",
|
||||
key_decisions=["Keep duplicate notifications idempotent."],
|
||||
action_items=["Verify the cached job is reused."],
|
||||
confidence="high",
|
||||
confidence_notes="Transcript available.",
|
||||
source_artifacts=kwargs["artifacts"],
|
||||
)
|
||||
|
||||
monkeypatch.setattr(pipeline_module, "fetch_preferred_transcript_text", _fetch_transcript)
|
||||
monkeypatch.setattr(pipeline_module, "enrich_meeting_with_call_record", _no_call_record)
|
||||
|
||||
store = TeamsPipelineStore(tmp_path / "teams-store.json")
|
||||
pipeline = TeamsMeetingPipeline(
|
||||
graph_client=FakeGraphClient(),
|
||||
store=store,
|
||||
config={"transcript_min_chars": 20},
|
||||
summarize_fn=_summarize,
|
||||
)
|
||||
notification = {
|
||||
"id": "notif-dup",
|
||||
"changeType": "updated",
|
||||
"resource": "communications/onlineMeetings/meeting-dup",
|
||||
"resourceData": {"id": "meeting-dup"},
|
||||
}
|
||||
|
||||
first_job = await pipeline.run_notification(notification)
|
||||
second_job = await pipeline.run_notification(notification)
|
||||
|
||||
assert first_job.status == "completed"
|
||||
assert second_job.status == "completed"
|
||||
assert second_job.job_id == first_job.job_id
|
||||
assert summarize_calls == 1
|
||||
assert len(store.list_jobs()) == 1
|
||||
receipt_key = TeamsPipelineStore.build_notification_receipt_key(notification)
|
||||
assert store.has_notification_receipt(receipt_key) is True
|
||||
Loading…
Add table
Add a link
Reference in a new issue