forked from LiveCarta/CommentAutomation
Added webhooks subapp for instagram webhooks and celery app and task to pass comment text to an LLM for further processing
This commit is contained in:
@@ -2,7 +2,6 @@ from fastapi.testclient import TestClient
|
||||
|
||||
from comment_automation.main import app
|
||||
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
|
||||
49
tests/test_instagram_webhooks.py
Normal file
49
tests/test_instagram_webhooks.py
Normal file
@@ -0,0 +1,49 @@
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from comment_automation.main import app
|
||||
|
||||
client = TestClient(app)
|
||||
|
||||
|
||||
def test_instagram_comment_webhook_enqueues_celery_task(monkeypatch) -> None:
|
||||
captured_payload: dict = {}
|
||||
|
||||
class FakeAsyncResult:
|
||||
id = "task-123"
|
||||
|
||||
def fake_delay(payload: dict) -> FakeAsyncResult:
|
||||
captured_payload.update(payload)
|
||||
return FakeAsyncResult()
|
||||
|
||||
monkeypatch.setattr(
|
||||
"comment_automation.webhooks.services.send_instagram_comment_to_llm.delay",
|
||||
fake_delay,
|
||||
)
|
||||
|
||||
body = {
|
||||
"object": "instagram",
|
||||
"entry": [
|
||||
{
|
||||
"id": "17841400000000000",
|
||||
"time": 1711799299,
|
||||
"changes": [
|
||||
{
|
||||
"field": "comments",
|
||||
"value": {
|
||||
"from_id": "123456",
|
||||
"media_id": "17895695668004550",
|
||||
"comment_id": "17900000000000000",
|
||||
"text": "Nice post!",
|
||||
"timestamp": 1711799299,
|
||||
},
|
||||
}
|
||||
],
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
response = client.post("/webhooks/instagram/comments", json=body)
|
||||
|
||||
assert response.status_code == 202
|
||||
assert response.json() == {"status": "accepted", "task_id": "task-123"}
|
||||
assert captured_payload == body
|
||||
52
tests/test_webhook_tasks.py
Normal file
52
tests/test_webhook_tasks.py
Normal file
@@ -0,0 +1,52 @@
|
||||
import pytest
|
||||
|
||||
from comment_automation.webhooks.tasks import (
|
||||
LLMEndpointConfigurationError,
|
||||
LLMEndpointTemporaryError,
|
||||
_forward_payload_to_llm,
|
||||
)
|
||||
|
||||
|
||||
def test_forward_payload_raises_when_endpoint_missing(monkeypatch) -> None:
|
||||
monkeypatch.delenv("LLM_ENDPOINT_URL", raising=False)
|
||||
|
||||
with pytest.raises(LLMEndpointConfigurationError):
|
||||
_forward_payload_to_llm({"hello": "world"})
|
||||
|
||||
|
||||
def test_forward_payload_retries_on_retryable_status(monkeypatch) -> None:
|
||||
class FakeResponse:
|
||||
status_code = 503
|
||||
|
||||
def raise_for_status(self) -> None:
|
||||
return None
|
||||
|
||||
def fake_post(*args, **kwargs):
|
||||
return FakeResponse()
|
||||
|
||||
monkeypatch.setenv("LLM_ENDPOINT_URL", "https://example.org/llm")
|
||||
monkeypatch.setattr("comment_automation.webhooks.tasks.httpx.post", fake_post)
|
||||
|
||||
with pytest.raises(LLMEndpointTemporaryError):
|
||||
_forward_payload_to_llm({"hello": "world"})
|
||||
|
||||
|
||||
def test_forward_payload_success(monkeypatch) -> None:
|
||||
called = {"value": False}
|
||||
|
||||
class FakeResponse:
|
||||
status_code = 200
|
||||
|
||||
def raise_for_status(self) -> None:
|
||||
return None
|
||||
|
||||
def fake_post(*args, **kwargs):
|
||||
called["value"] = True
|
||||
return FakeResponse()
|
||||
|
||||
monkeypatch.setenv("LLM_ENDPOINT_URL", "https://example.org/llm")
|
||||
monkeypatch.setattr("comment_automation.webhooks.tasks.httpx.post", fake_post)
|
||||
|
||||
_forward_payload_to_llm({"hello": "world"})
|
||||
|
||||
assert called["value"] is True
|
||||
Reference in New Issue
Block a user