1
0

Implemented content upload app with tests and pre-commit hooks

This commit is contained in:
2026-03-13 11:30:28 +00:00
commit 342d39d457
33 changed files with 2740 additions and 0 deletions

1
.devcontainer/Dockerfile Normal file
View File

@@ -0,0 +1 @@
FROM mcr.microsoft.com/devcontainers/base:ubuntu

View File

@@ -0,0 +1,29 @@
{
"name": "Python + uv",
"dockerComposeFile": "./docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspace",
"remoteUser": "vscode",
"features": {
"ghcr.io/devcontainers-extra/features/uv:1": {}
},
"customizations": {
"vscode": {
"extensions": [
"ms-python.python",
"ms-python.vscode-pylance",
"charliermarsh.ruff",
"ms-python.mypy-type-checker",
"ms-toolsai.jupyter"
]
}
},
"remoteEnv": {
"PROJECT_NAME": "Python + uv",
"PYTHON_VERSION": "3.13",
"UV_INIT_BARE": "false",
"INSTALL_IPYKERNEL": "false"
},
"postCreateCommand": "make post-create",
"mounts": []
}

View File

@@ -0,0 +1,10 @@
version: '3.8'
services:
app:
build: .
image: python_playground
container_name: python_playground
volumes:
- ..:/workspace:cached
command: sleep infinity

View File

@@ -0,0 +1,70 @@
#!/bin/bash
PROJECT_NAME=${PROJECT_NAME:-Python + uv}
echo "=== Testing Installed Tools ==="
echo "Run date: $(date -Is)"
echo
echo "Documentation: Tools included in ${PROJECT_NAME} devcontainer"
echo "================================================================"
echo
echo "Core Tools (from Ubuntu base + uv feature):"
echo " - uv: Python package manager"
echo " - bash: Shell"
echo " - git: Version control"
echo " - curl: Data transfer/downloads"
echo " - wget: Downloads"
echo " - build-essential: C/C++ compiler, make, build tools"
echo " - apt: Package manager"
echo " - openssh-client: SSH tools"
echo " - gnupg: Encryption/signing"
echo " - zip/unzip: Archive tools"
echo " - Standard utilities: grep, sed, awk, cut, sort, find, etc."
echo
echo "Note: Python is NOT pre-installed. Use 'uv python install' to add it."
echo
echo "=== Running Tool Tests ==="
echo
report_tool() {
local name="$1"; shift
local cmd="$*"
if command -v "$name" >/dev/null 2>&1; then
if [ -n "$cmd" ]; then
local v
v=$($cmd 2>/dev/null | head -1)
echo "${name}: ${v}"
else
echo "${name}: available"
fi
else
echo "${name}: not found"
fi
}
report_tool uv "uv --version"
report_tool python "python --version"
report_tool bash "bash --version"
report_tool zsh "zsh --version"
report_tool git "git --version"
report_tool zip "zip --version"
report_tool unzip "unzip -v"
report_tool grep "grep --version"
report_tool sed "sed --version"
report_tool awk "awk --version"
report_tool find "find --version"
report_tool cut "cut --version"
report_tool sort "sort --version"
report_tool cat
report_tool ls
report_tool mkdir
report_tool rm
echo
echo "=== Test Complete ==="
# Final summary
echo "✅ Apps are ready for '${PROJECT_NAME}'"
echo " Using python-uv devcontainer template"
echo " https://github.com/metinsenturk/devcontainer-templates/tree/main/src/python-uv"

16
.dockerignore Normal file
View File

@@ -0,0 +1,16 @@
.git
.gitignore
.devcontainer
.venv
.mypy_cache
.pytest_cache
.ruff_cache
.vscode
__pycache__
*.py[cod]
*.log
.env
.env.*
youtube_credentials.json
client_secret_*.json
tests

35
.env.example Normal file
View File

@@ -0,0 +1,35 @@
# App targets
CONTENT_AUTOMATION_TARGET_SOCIAL_NETWORKS=["instagram","youtube"]
# Instagram settings
CONTENT_AUTOMATION_INSTAGRAM__ACCESS_TOKEN=
CONTENT_AUTOMATION_INSTAGRAM__USER_ID=
CONTENT_AUTOMATION_INSTAGRAM__API_VERSION=v25.0
# YouTube settings
CONTENT_AUTOMATION_YOUTUBE__ACCESS_TOKEN=
CONTENT_AUTOMATION_YOUTUBE__CREDENTIALS_FILE_PATH=
CONTENT_AUTOMATION_YOUTUBE__REFRESH_TOKEN=
CONTENT_AUTOMATION_YOUTUBE__CLIENT_ID=
CONTENT_AUTOMATION_YOUTUBE__CLIENT_SECRET=
CONTENT_AUTOMATION_YOUTUBE__TOKEN_URI=https://oauth2.googleapis.com/token
CONTENT_AUTOMATION_YOUTUBE__SCOPES=["https://www.googleapis.com/auth/youtube.upload"]
CONTENT_AUTOMATION_YOUTUBE__EXPIRY=
CONTENT_AUTOMATION_YOUTUBE__CATEGORY_ID=22
CONTENT_AUTOMATION_YOUTUBE__PRIVACY_STATUS=public
CONTENT_AUTOMATION_YOUTUBE__USE_RESUMABLE_UPLOAD=true
CONTENT_AUTOMATION_YOUTUBE__RESUMABLE_CHUNK_SIZE=8388608
# Storage backend: local or s3
CONTENT_AUTOMATION_STORAGE__BACKEND=local
# Local storage
CONTENT_AUTOMATION_STORAGE__LOCAL__ROOT_DIRECTORY=.
# S3 storage
CONTENT_AUTOMATION_STORAGE__S3__BUCKET_NAME=
CONTENT_AUTOMATION_STORAGE__S3__KEY_PREFIX=
CONTENT_AUTOMATION_STORAGE__S3__REGION_NAME=
CONTENT_AUTOMATION_STORAGE__S3__ENDPOINT_URL=
CONTENT_AUTOMATION_STORAGE__S3__PUBLIC_URL_BASE=
CONTENT_AUTOMATION_STORAGE__S3__URL_EXPIRATION_SECONDS=3600

30
.gitignore vendored Normal file
View File

@@ -0,0 +1,30 @@
__pycache__/
*.py[cod]
*.so
.Python
.python-version
.venv/
venv/
env/
.mypy_cache/
.pytest_cache/
.ruff_cache/
.coverage
.coverage.*
htmlcov/
.vscode/
.idea/
.DS_Store
build/
dist/
*.egg-info/
.env
youtube_credentials.json
client_secret_*.json
*.log

50
.pre-commit-config.yaml Normal file
View File

@@ -0,0 +1,50 @@
minimum_pre_commit_version: 4.0.0
fail_fast: false
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v5.0.0
hooks:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- id: check-merge-conflict
- repo: local
hooks:
- id: black
name: black
entry: uv run black --check --diff
language: system
types_or: [python]
- id: isort
name: isort
entry: uv run isort --check-only --diff
language: system
types_or: [python]
- id: ruff
name: ruff
entry: uv run ruff check
language: system
types_or: [python]
- id: bandit
name: bandit
entry: uv run bandit -q -r src
language: system
pass_filenames: false
- id: mypy
name: mypy
entry: uv run mypy src tests
language: system
pass_filenames: false
- id: pytest
name: pytest-with-coverage
entry: uv run pytest -q
language: system
pass_filenames: false
stages: [pre-push]

26
Dockerfile Normal file
View File

@@ -0,0 +1,26 @@
FROM python:3.13-slim
ENV PYTHONDONTWRITEBYTECODE=1 \
PYTHONUNBUFFERED=1 \
UV_PROJECT_ENVIRONMENT=/app/.venv \
PATH=/app/.venv/bin:/root/.local/bin:${PATH} \
PYTHONPATH=/app/src
WORKDIR /app
RUN apt-get update \
&& apt-get install -y --no-install-recommends bash ca-certificates curl \
&& rm -rf /var/lib/apt/lists/*
RUN curl -LsSf https://astral.sh/uv/install.sh | sh
COPY pyproject.toml uv.lock README.md ./
RUN uv sync --frozen --no-dev
COPY src ./src
COPY docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh
RUN chmod +x /usr/local/bin/docker-entrypoint.sh
ENTRYPOINT ["/usr/local/bin/docker-entrypoint.sh"]
CMD ["bash"]

89
Makefile Normal file
View File

@@ -0,0 +1,89 @@
.PHONY: post-create test-tools init sync lint format type-check gitignore freeze dev-tools ensure-ipykernel pre-commit-install pre-commit-run
# Post-create command: run tool verification, init project, and sync dependencies
post-create: test-tools init ensure-ipykernel sync
# Initialize Python environment with uv and create pyproject.toml if needed
init:
@echo "Initializing Python environment..."
@(uv python install ${PYTHON_VERSION} && \
if [ ! -f pyproject.toml ]; then \
if [ "${UV_INIT_BARE}" = "true" ]; then \
uv init --bare --python ${PYTHON_VERSION}; \
else \
uv init --python ${PYTHON_VERSION}; \
fi; \
fi) > /tmp/init.log 2>&1
@echo "✓ Initialization complete (log: /tmp/init.log)"
# Ensure ipykernel is installed as a dev dependency (optional)
ensure-ipykernel:
@if [ "${INSTALL_IPYKERNEL}" = "true" ]; then \
( \
if ! uv run python -c "import ipykernel" >/dev/null 2>&1; then \
echo "Installing ipykernel (dev dependency)..."; \
uv add --dev ipykernel; \
else \
echo "ipykernel already present; skipping installation."; \
fi \
) > /tmp/jupyter-kernel.log 2>&1; \
echo "✓ ipykernel check complete (log: /tmp/jupyter-kernel.log)"; \
else \
echo "Skipping ipykernel installation (INSTALL_IPYKERNEL != true)"; \
fi
# Verify installed tools
test-tools:
@echo "Running tool verification..."
@bash .devcontainer/test_tools.sh > /tmp/test-tools.log 2>&1
@echo "✓ Tool verification complete (log: /tmp/test-tools.log)"
# Sync dependencies with uv
sync:
@echo "Syncing dependencies..."
@uv sync > /tmp/uv-sync.log 2>&1
@echo "✓ Dependency sync complete (log: /tmp/uv-sync.log)"
# Run ruff linter
lint:
@uv run ruff check .
# Run ruff formatter
format:
@uv run ruff format .
# Run mypy type checker
type-check:
@uv run mypy .
# Download Python .gitignore from GitHub
gitignore:
@if [ -f .gitignore ]; then \
echo "⚠️ .gitignore already exists, skipping"; \
else \
( \
echo "📥 Downloading Python .gitignore from GitHub..."; \
curl -fsSL https://raw.githubusercontent.com/github/gitignore/main/Python.gitignore -o .gitignore; \
echo "✅ .gitignore created"; \
) > /tmp/gitignore.log 2>&1; \
echo "✓ .gitignore download complete (log: /tmp/gitignore.log)"; \
fi
# Install dev tools (pre-commit, black, isort, bandit, ruff, mypy)
dev-tools:
@echo "Installing dev tools (pre-commit, black, isort, bandit, ruff, mypy)..."
@uv add --dev pre-commit black isort 'bandit[toml]' ruff mypy > /tmp/dev-tools.log 2>&1
@echo "✓ Dev tools installed (log: /tmp/dev-tools.log)"
pre-commit-install:
@uv run pre-commit install --hook-type pre-commit --hook-type pre-push
pre-commit-run:
@uv run pre-commit run --all-files
# Freeze dependencies to tmp folder
freeze:
@echo "Freezing dependencies..."
@echo "# Generated on $$(date)" > /tmp/requirements.txt
@uv pip freeze >> /tmp/requirements.txt
@echo "✓ Dependencies frozen (log: /tmp/requirements.txt)"

112
README.md Normal file
View File

@@ -0,0 +1,112 @@
# ContentAutomation Boilerplate
Project scaffold for posting media from a storage backend to multiple social networks.
## Structure
```text
src/content_automation/
adapters/
social/
base.py
instagram.py
youtube.py
storage/
base.py
local.py
s3.py
controller.py
factories.py
interfaces.py
main.py
settings.py
```
## Design
- `SocialNetworkAdapter` interface defines a typed `post_media(media_url, caption)` contract.
- Social adapters inherit from `SocialNetworkBaseAdapter` and use typed `uplink.Consumer` clients.
- `StorageAdapterBase` defines `exists()` and `get_public_url()`.
- `LocalFilesystemStorageAdapter` and `S3StorageAdapter` implement storage behavior.
- `PublishController` is adapter-agnostic and only depends on abstractions.
- `AppSettings` uses `pydantic-settings` with nested settings classes per adapter/backend.
## Configuration
Environment variables use prefix `CONTENT_AUTOMATION_` and nested delimiter `__`.
Examples:
```powershell
# Targets
$env:CONTENT_AUTOMATION_TARGET_SOCIAL_NETWORKS='["instagram","youtube"]'
# Instagram
$env:CONTENT_AUTOMATION_INSTAGRAM__ACCESS_TOKEN='your-instagram-token'
$env:CONTENT_AUTOMATION_INSTAGRAM__USER_ID='your-instagram-user-id'
$env:CONTENT_AUTOMATION_INSTAGRAM__API_VERSION='v25.0'
# YouTube
$env:CONTENT_AUTOMATION_YOUTUBE__ACCESS_TOKEN='your-youtube-token'
$env:CONTENT_AUTOMATION_YOUTUBE__REFRESH_TOKEN='your-youtube-refresh-token'
$env:CONTENT_AUTOMATION_YOUTUBE__CLIENT_ID='your-google-oauth-client-id'
$env:CONTENT_AUTOMATION_YOUTUBE__CLIENT_SECRET='your-google-oauth-client-secret'
$env:CONTENT_AUTOMATION_YOUTUBE__TOKEN_URI='https://oauth2.googleapis.com/token'
$env:CONTENT_AUTOMATION_YOUTUBE__SCOPES='["https://www.googleapis.com/auth/youtube.upload"]'
$env:CONTENT_AUTOMATION_YOUTUBE__EXPIRY='2026-03-13T00:00:00Z'
$env:CONTENT_AUTOMATION_YOUTUBE__CATEGORY_ID='22'
$env:CONTENT_AUTOMATION_YOUTUBE__PRIVACY_STATUS='public'
# Storage backend (local or s3)
$env:CONTENT_AUTOMATION_STORAGE__BACKEND='local'
# Local storage settings
$env:CONTENT_AUTOMATION_STORAGE__LOCAL__ROOT_DIRECTORY='D:\media'
# S3 storage settings
$env:CONTENT_AUTOMATION_STORAGE__BACKEND='s3'
$env:CONTENT_AUTOMATION_STORAGE__S3__BUCKET_NAME='your-bucket'
$env:CONTENT_AUTOMATION_STORAGE__S3__KEY_PREFIX='uploads'
$env:CONTENT_AUTOMATION_STORAGE__S3__REGION_NAME='us-west-1'
```
## Run
```powershell
python -m content_automation.main path/to/video.mp4 --caption "My new post"
```
## Pre-commit
Install hooks:
```bash
uv run pre-commit install --hook-type pre-commit --hook-type pre-push
```
Run hooks manually:
```bash
uv run pre-commit run --all-files
```
## Docker
Build the image:
```bash
docker build -t content-automation .
```
Run a publish job:
```bash
docker run --rm --env-file .env -v "$PWD:/workspace" content-automation upload path/to/video.mp4 --caption "My new post"
```
Run any other command in the container:
```bash
docker run --rm -it --env-file .env content-automation bash
```

17
docker-entrypoint.sh Normal file
View File

@@ -0,0 +1,17 @@
#!/usr/bin/env bash
set -euo pipefail
if [[ $# -eq 0 ]]; then
exec bash
fi
if [[ "$1" == "upload" ]]; then
shift
exec python -m content_automation.main "$@"
fi
if [[ "$1" == -* ]]; then
exec python -m content_automation.main "$@"
fi
exec "$@"

71
pyproject.toml Normal file
View File

@@ -0,0 +1,71 @@
[project]
name = "workspace"
version = "1.0.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.13"
dependencies = [
"boto3>=1.37.17",
"google-api-python-client>=2.190.0",
"google-auth-oauthlib>=1.2.4",
"pydantic>=2.12.5",
"pydantic-settings>=2.11.0",
"pytest>=8.3.5",
"uplink>=0.10.0",
]
[tool.pytest.ini_options]
pythonpath = ["src"]
testpaths = ["tests"]
addopts = [
"--cov=src/content_automation",
"--cov-report=term-missing",
"--cov-fail-under=70",
]
[dependency-groups]
dev = [
"bandit[toml]>=1.9.4",
"black>=26.3.1",
"isort>=8.0.1",
"mypy>=1.19.1",
"pre-commit>=4.5.1",
"pytest-cov>=7.0.0",
"ruff>=0.15.6",
]
[tool.black]
line-length = 88
target-version = ["py313"]
include = "\\.pyi?$"
[tool.isort]
profile = "black"
line_length = 88
py_version = 313
src_paths = ["src", "tests"]
[tool.bandit]
exclude_dirs = ["tests", ".venv", ".git", "build", "dist"]
skips = ["B105"]
targets = ["src"]
[tool.ruff]
line-length = 88
target-version = "py313"
src = ["src", "tests"]
[tool.ruff.lint]
select = ["E", "F", "I"]
[tool.mypy]
python_version = "3.13"
files = ["src", "tests"]
ignore_missing_imports = true
check_untyped_defs = true
warn_unused_ignores = true
pretty = true
[[tool.mypy.overrides]]
module = ["content_automation.adapters.social.instagram"]
ignore_errors = true

View File

@@ -0,0 +1 @@
"""Content automation package."""

View File

@@ -0,0 +1 @@
"""Adapter implementations."""

View File

@@ -0,0 +1 @@
"""Social network adapters."""

View File

@@ -0,0 +1,14 @@
from __future__ import annotations
from abc import ABC, abstractmethod
class SocialNetworkBaseAdapter(ABC):
"""Abstract base class for social media posting adapters."""
def __init__(self, name: str) -> None:
self.name = name
@abstractmethod
def post_media(self, media_url: str, caption: str) -> str:
"""Publish media and return a provider-specific identifier."""

View File

@@ -0,0 +1,67 @@
from __future__ import annotations
from pydantic import BaseModel
from uplink import Body, Consumer, post, returns
from uplink.auth import BearerToken
from content_automation.adapters.social.base import SocialNetworkBaseAdapter
from content_automation.interfaces import SocialNetworkAdapter
class InstagramContainerCreatePayload(BaseModel):
media_type: str = "REELS"
video_url: str
caption: str
class InstagramContainerPublishPayload(BaseModel):
creation_id: str
class InstagramGraphClient(Consumer):
@returns.json
@post("/{user_id}/media", args={"payload": Body})
def create_container(
self,
user_id: str,
payload: Body(type=InstagramContainerCreatePayload),
) -> dict[str, str]:
pass
@returns.json
@post("/{user_id}/media_publish", args={"payload": Body})
def publish_container(
self,
user_id: str,
payload: Body(type=InstagramContainerPublishPayload),
) -> dict[str, str]:
pass
class InstagramAdapter(SocialNetworkBaseAdapter, SocialNetworkAdapter):
"""Instagram reel publisher via Meta Graph API."""
def __init__(
self, access_token: str, user_id: str, api_version: str = "v25.0"
) -> None:
super().__init__(name="instagram")
self._user_id = user_id
self._client = InstagramGraphClient(
base_url=f"https://graph.instagram.com/{api_version}",
auth=BearerToken(access_token),
)
def post_media(self, media_url: str, caption: str) -> str:
container_response = self._client.create_container(
user_id=self._user_id,
payload=InstagramContainerCreatePayload(
video_url=media_url, caption=caption
),
)
creation_id = container_response["id"]
publish_response = self._client.publish_container(
user_id=self._user_id,
payload=InstagramContainerPublishPayload(creation_id=creation_id),
)
return publish_response.get("id", creation_id)

View File

@@ -0,0 +1,295 @@
import json
import mimetypes
from datetime import datetime, timezone
from pathlib import Path
from urllib.parse import unquote, urlparse
from google.auth.transport.requests import Request
from google.oauth2.credentials import Credentials
from google_auth_oauthlib.flow import InstalledAppFlow
from googleapiclient.discovery import build
from googleapiclient.http import MediaFileUpload
from pydantic import BaseModel
from content_automation.adapters.social.base import SocialNetworkBaseAdapter
from content_automation.interfaces import SocialNetworkAdapter
DEFAULT_GOOGLE_TOKEN_URI = "https://oauth2.googleapis.com/token"
DEFAULT_YOUTUBE_UPLOAD_SCOPE = "https://www.googleapis.com/auth/youtube.upload"
class YouTubeSnippet(BaseModel):
title: str
description: str
categoryId: str
class YouTubeStatus(BaseModel):
privacyStatus: str
class YouTubeVideoInsertPayload(BaseModel):
snippet: YouTubeSnippet
status: YouTubeStatus
sourceUrl: str
class YouTubeDataApiClient:
def __init__(
self,
access_token: str,
category_id: str,
privacy_status: str,
refresh_token: str | None = None,
client_id: str | None = None,
client_secret: str | None = None,
token_uri: str = DEFAULT_GOOGLE_TOKEN_URI,
scopes: list[str] | None = None,
expiry: str | None = None,
credentials_file_path: str | Path | None = None,
) -> None:
self._category_id = category_id
self._privacy_status = privacy_status
self._scopes = scopes or [DEFAULT_YOUTUBE_UPLOAD_SCOPE]
self._credentials = self._build_credentials(
access_token=access_token,
refresh_token=refresh_token,
client_id=client_id,
client_secret=client_secret,
token_uri=token_uri,
scopes=self._scopes,
expiry=expiry,
credentials_file_path=credentials_file_path,
)
@staticmethod
def _parse_expiry(expiry: str | None) -> datetime | None:
if not expiry:
return None
normalized = expiry.replace("Z", "+00:00")
parsed = datetime.fromisoformat(normalized)
if parsed.tzinfo is not None:
return parsed.astimezone(timezone.utc).replace(tzinfo=None)
return parsed
def _build_credentials(
self,
access_token: str,
refresh_token: str | None,
client_id: str | None,
client_secret: str | None,
token_uri: str,
scopes: list[str],
expiry: str | None,
credentials_file_path: str | Path | None,
) -> Credentials:
if credentials_file_path:
return Credentials.from_authorized_user_file(
str(credentials_file_path),
scopes=scopes,
)
return Credentials(
token=access_token,
refresh_token=refresh_token,
token_uri=token_uri,
client_id=client_id,
client_secret=client_secret,
scopes=scopes,
expiry=self._parse_expiry(expiry),
)
def _ensure_valid_access_token(self) -> None:
if self._credentials.valid:
return
if not self._credentials.refresh_token:
raise RuntimeError(
"YouTube credentials are invalid and no refresh token is available."
)
self._credentials.refresh(Request())
if not self._credentials.token:
raise RuntimeError("Token refresh did not return an access token.")
def _build_service(self):
self._ensure_valid_access_token()
return build(
"youtube",
"v3",
credentials=self._credentials,
cache_discovery=False,
)
def _build_metadata(self, caption: str) -> dict[str, dict[str, str]]:
return {
"snippet": {
"title": caption[:80] if caption else "Auto-uploaded short",
"description": caption,
"categoryId": self._category_id,
},
"status": {
"privacyStatus": self._privacy_status,
},
}
def build_insert_payload(
self, source_url: str, caption: str
) -> YouTubeVideoInsertPayload:
metadata = self._build_metadata(caption=caption)
return YouTubeVideoInsertPayload(
snippet=YouTubeSnippet(**metadata["snippet"]),
status=YouTubeStatus(**metadata["status"]),
sourceUrl=source_url,
)
def insert_video(
self,
part: str,
payload: YouTubeVideoInsertPayload,
) -> dict[str, object]:
service = self._build_service()
body = payload.model_dump()
request = service.videos().insert(part=part, body=body)
return request.execute()
def upload_video_regular(self, local_file_path: Path, caption: str) -> str:
mime_type = (
mimetypes.guess_type(local_file_path.name)[0] or "application/octet-stream"
)
body = self._build_metadata(caption=caption)
media = MediaFileUpload(
str(local_file_path),
mimetype=mime_type,
resumable=False,
)
service = self._build_service()
response = (
service.videos()
.insert(
part="snippet,status",
body=body,
media_body=media,
)
.execute()
)
return str(response["id"])
def upload_video_resumable(
self,
local_file_path: Path,
caption: str,
resumable_chunk_size: int,
) -> str:
mime_type = (
mimetypes.guess_type(local_file_path.name)[0] or "application/octet-stream"
)
body = self._build_metadata(caption=caption)
media = MediaFileUpload(
str(local_file_path),
mimetype=mime_type,
chunksize=resumable_chunk_size,
resumable=True,
)
service = self._build_service()
request = service.videos().insert(
part="snippet,status",
body=body,
media_body=media,
)
response = None
while response is None:
_, response = request.next_chunk()
return str(response["id"])
class YouTubeAdapter(SocialNetworkBaseAdapter, SocialNetworkAdapter):
"""YouTube posting adapter via Data API."""
def __init__(
self,
access_token: str,
category_id: str = "22",
privacy_status: str = "public",
use_resumable_upload: bool = True,
resumable_chunk_size: int = 8 * 1024 * 1024,
refresh_token: str | None = None,
client_id: str | None = None,
client_secret: str | None = None,
token_uri: str = DEFAULT_GOOGLE_TOKEN_URI,
scopes: list[str] | None = None,
expiry: str | None = None,
credentials_file_path: str | Path | None = None,
) -> None:
super().__init__(name="youtube")
self._use_resumable_upload = use_resumable_upload
self._resumable_chunk_size = resumable_chunk_size
self._client = YouTubeDataApiClient(
access_token=access_token,
category_id=category_id,
privacy_status=privacy_status,
refresh_token=refresh_token,
client_id=client_id,
client_secret=client_secret,
token_uri=token_uri,
scopes=scopes,
expiry=expiry,
credentials_file_path=credentials_file_path,
)
@staticmethod
def obtain_credentials_from_client_secret_file(
client_secret_file_path: str | Path,
scopes: list[str] | None = None,
token_output_path: str | Path | None = None,
) -> dict[str, object]:
resolved_scopes = scopes or [DEFAULT_YOUTUBE_UPLOAD_SCOPE]
flow = InstalledAppFlow.from_client_secrets_file(
str(client_secret_file_path),
resolved_scopes,
)
credentials = flow.run_local_server()
credentials_payload = json.loads(credentials.to_json())
if token_output_path is not None:
token_path = Path(token_output_path)
token_path.write_text(
json.dumps(credentials_payload, indent=4),
encoding="utf-8",
)
return credentials_payload
def post_media(self, media_url: str, caption: str) -> str:
local_file_path = self._resolve_local_file_path(media_url)
if local_file_path is not None:
if self._use_resumable_upload:
return self._client.upload_video_resumable(
local_file_path=local_file_path,
caption=caption,
resumable_chunk_size=self._resumable_chunk_size,
)
return self._client.upload_video_regular(
local_file_path=local_file_path,
caption=caption,
)
payload = self._client.build_insert_payload(
source_url=media_url,
caption=caption,
)
response = self._client.insert_video(part="snippet,status", payload=payload)
return str(response["id"])
@staticmethod
def _resolve_local_file_path(media_url: str) -> Path | None:
parsed_url = urlparse(media_url)
if parsed_url.scheme == "file":
return Path(unquote(parsed_url.path))
raw_path = Path(media_url)
if raw_path.exists():
return raw_path
return None

View File

@@ -0,0 +1 @@
"""Storage adapters."""

View File

@@ -0,0 +1,15 @@
from __future__ import annotations
from abc import ABC, abstractmethod
class StorageAdapterBase(ABC):
"""Abstract storage adapter for file presence and URL access."""
@abstractmethod
def exists(self, relative_path: str) -> bool:
"""Check if the media file exists in storage."""
@abstractmethod
def get_public_url(self, relative_path: str) -> str:
"""Resolve a URL that can be consumed by social APIs."""

View File

@@ -0,0 +1,24 @@
from __future__ import annotations
from pathlib import Path
from content_automation.adapters.storage.base import StorageAdapterBase
class LocalFilesystemStorageAdapter(StorageAdapterBase):
"""Storage adapter for local files."""
def __init__(self, root_directory: str) -> None:
self._root_directory = Path(root_directory)
def exists(self, relative_path: str) -> bool:
return self._resolve(relative_path).exists()
def get_public_url(self, relative_path: str) -> str:
file_path = self._resolve(relative_path)
if not file_path.exists():
raise FileNotFoundError(f"File not found: {file_path}")
return file_path.resolve().as_uri()
def _resolve(self, relative_path: str) -> Path:
return self._root_directory / relative_path

View File

@@ -0,0 +1,57 @@
from __future__ import annotations
import boto3
from botocore.client import BaseClient
from botocore.exceptions import ClientError
from content_automation.adapters.storage.base import StorageAdapterBase
class S3StorageAdapter(StorageAdapterBase):
"""Storage adapter backed by AWS S3-compatible API."""
def __init__(
self,
bucket_name: str,
key_prefix: str = "",
region_name: str | None = None,
endpoint_url: str | None = None,
public_url_base: str | None = None,
url_expiration_seconds: int = 3600,
) -> None:
self._bucket_name = bucket_name
self._key_prefix = key_prefix.strip("/")
self._public_url_base = public_url_base.rstrip("/") if public_url_base else None
self._url_expiration_seconds = url_expiration_seconds
self._client: BaseClient = boto3.client(
"s3",
region_name=region_name,
endpoint_url=endpoint_url,
)
def exists(self, relative_path: str) -> bool:
key = self._key(relative_path)
try:
self._client.head_object(Bucket=self._bucket_name, Key=key)
return True
except ClientError as exc:
error_code = exc.response.get("Error", {}).get("Code", "")
if error_code in {"404", "NoSuchKey", "NotFound"}:
return False
raise
def get_public_url(self, relative_path: str) -> str:
key = self._key(relative_path)
if self._public_url_base:
return f"{self._public_url_base}/{key}"
return self._client.generate_presigned_url(
ClientMethod="get_object",
Params={"Bucket": self._bucket_name, "Key": key},
ExpiresIn=self._url_expiration_seconds,
)
def _key(self, relative_path: str) -> str:
sanitized = relative_path.lstrip("/")
if not self._key_prefix:
return sanitized
return f"{self._key_prefix}/{sanitized}"

View File

@@ -0,0 +1,34 @@
from __future__ import annotations
from content_automation.adapters.storage.base import StorageAdapterBase
from content_automation.interfaces import SocialNetworkAdapter
from content_automation.settings import AppSettings
class PublishController:
"""Coordinates storage lookup and cross-network publishing."""
def __init__(
self,
settings: AppSettings,
storage: StorageAdapterBase,
social_adapters: dict[str, SocialNetworkAdapter],
) -> None:
self._settings = settings
self._storage = storage
self._social_adapters = social_adapters
def publish(self, relative_path: str, caption: str) -> dict[str, str]:
if not self._storage.exists(relative_path):
raise FileNotFoundError(
f"Media file is not available in storage: {relative_path}"
)
media_url = self._storage.get_public_url(relative_path)
result: dict[str, str] = {}
for network in self._settings.target_social_networks:
adapter = self._social_adapters.get(network)
if adapter is None:
raise ValueError(f"No adapter configured for network: {network}")
result[network] = adapter.post_media(media_url=media_url, caption=caption)
return result

View File

@@ -0,0 +1,51 @@
from __future__ import annotations
from content_automation.adapters.social.instagram import InstagramAdapter
from content_automation.adapters.social.youtube import YouTubeAdapter
from content_automation.adapters.storage.base import StorageAdapterBase
from content_automation.adapters.storage.local import LocalFilesystemStorageAdapter
from content_automation.adapters.storage.s3 import S3StorageAdapter
from content_automation.interfaces import SocialNetworkAdapter
from content_automation.settings import AppSettings
def build_storage_adapter(settings: AppSettings) -> StorageAdapterBase:
backend = settings.storage.backend.lower()
if backend == "local":
return LocalFilesystemStorageAdapter(
root_directory=settings.storage.local.root_directory
)
if backend == "s3":
return S3StorageAdapter(
bucket_name=settings.storage.s3.bucket_name,
key_prefix=settings.storage.s3.key_prefix,
region_name=settings.storage.s3.region_name,
endpoint_url=settings.storage.s3.endpoint_url,
public_url_base=settings.storage.s3.public_url_base,
url_expiration_seconds=settings.storage.s3.url_expiration_seconds,
)
raise ValueError(f"Unsupported storage backend: {settings.storage.backend}")
def build_social_adapters(settings: AppSettings) -> dict[str, SocialNetworkAdapter]:
return {
"instagram": InstagramAdapter(
access_token=settings.instagram.access_token,
user_id=settings.instagram.user_id,
api_version=settings.instagram.api_version,
),
"youtube": YouTubeAdapter(
access_token=settings.youtube.access_token,
category_id=settings.youtube.category_id,
privacy_status=settings.youtube.privacy_status,
use_resumable_upload=settings.youtube.use_resumable_upload,
resumable_chunk_size=settings.youtube.resumable_chunk_size,
credentials_file_path=settings.youtube.credentials_file_path or None,
refresh_token=settings.youtube.refresh_token or None,
client_id=settings.youtube.client_id or None,
client_secret=settings.youtube.client_secret or None,
token_uri=settings.youtube.token_uri,
scopes=settings.youtube.scopes,
expiry=settings.youtube.expiry or None,
),
}

View File

@@ -0,0 +1,12 @@
from __future__ import annotations
from typing import Protocol
class SocialNetworkAdapter(Protocol):
"""Contract for social network posting adapters."""
name: str
def post_media(self, media_url: str, caption: str) -> str:
"""Publish media and return an external post identifier."""

View File

@@ -0,0 +1,37 @@
from __future__ import annotations
import argparse
from content_automation.controller import PublishController
from content_automation.factories import build_social_adapters, build_storage_adapter
from content_automation.settings import AppSettings
def parse_args() -> argparse.Namespace:
parser = argparse.ArgumentParser(
description="Publish media to configured social networks."
)
parser.add_argument("relative_path", help="Storage-relative media path.")
parser.add_argument(
"--caption", default="", help="Caption/description for the post."
)
return parser.parse_args()
def main() -> None:
args = parse_args()
settings = AppSettings()
storage = build_storage_adapter(settings)
adapters = build_social_adapters(settings)
controller = PublishController(
settings=settings, storage=storage, social_adapters=adapters
)
publish_results = controller.publish(
relative_path=args.relative_path, caption=args.caption
)
for network, post_id in publish_results.items():
print(f"{network}: {post_id}")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,56 @@
from __future__ import annotations
from pydantic import BaseModel, Field
from pydantic_settings import BaseSettings, SettingsConfigDict
class AppSettings(BaseSettings):
"""Application settings loaded from environment variables."""
model_config = SettingsConfigDict(
env_prefix="CONTENT_AUTOMATION_", env_nested_delimiter="__"
)
class InstagramSettings(BaseModel):
access_token: str = ""
user_id: str = ""
api_version: str = "v25.0"
class YoutubeSettings(BaseModel):
access_token: str = ""
credentials_file_path: str = ""
refresh_token: str = ""
client_id: str = ""
client_secret: str = ""
token_uri: str = "https://oauth2.googleapis.com/token"
scopes: list[str] = Field(
default_factory=lambda: ["https://www.googleapis.com/auth/youtube.upload"]
)
expiry: str = ""
category_id: str = "22"
privacy_status: str = "public"
use_resumable_upload: bool = True
resumable_chunk_size: int = 8 * 1024 * 1024
class StorageSettings(BaseModel):
class LocalSettings(BaseModel):
root_directory: str = "."
class S3Settings(BaseModel):
bucket_name: str = ""
key_prefix: str = ""
region_name: str | None = None
endpoint_url: str | None = None
public_url_base: str | None = None
url_expiration_seconds: int = 3600
backend: str = "local"
local: LocalSettings = Field(default_factory=LocalSettings)
s3: S3Settings = Field(default_factory=S3Settings)
target_social_networks: list[str] = Field(
default_factory=lambda: ["instagram", "youtube"]
)
instagram: InstagramSettings = Field(default_factory=InstagramSettings)
youtube: YoutubeSettings = Field(default_factory=YoutubeSettings)
storage: StorageSettings = Field(default_factory=StorageSettings)

59
tests/test_controller.py Normal file
View File

@@ -0,0 +1,59 @@
from __future__ import annotations
import pytest
from content_automation.adapters.storage.base import StorageAdapterBase
from content_automation.controller import PublishController
from content_automation.settings import AppSettings
class FakeStorage(StorageAdapterBase):
def __init__(
self, exists_result: bool, public_url: str = "file:///tmp/video.mp4"
) -> None:
self._exists_result = exists_result
self._public_url = public_url
def exists(self, relative_path: str) -> bool:
return self._exists_result
def get_public_url(self, relative_path: str) -> str:
return self._public_url
class FakeAdapter:
def __init__(self, adapter_name: str) -> None:
self.name = adapter_name
def post_media(self, media_url: str, caption: str) -> str:
return f"{self.name}-post-id"
def test_controller_publishes_to_all_configured_networks() -> None:
settings = AppSettings.model_validate(
{"target_social_networks": ["instagram", "youtube"]}
)
controller = PublishController(
settings=settings,
storage=FakeStorage(exists_result=True),
social_adapters={
"instagram": FakeAdapter("instagram"),
"youtube": FakeAdapter("youtube"),
},
)
result = controller.publish(relative_path="video.mp4", caption="hello")
assert result == {"instagram": "instagram-post-id", "youtube": "youtube-post-id"}
def test_controller_raises_when_file_missing() -> None:
settings = AppSettings.model_validate({"target_social_networks": ["youtube"]})
controller = PublishController(
settings=settings,
storage=FakeStorage(exists_result=False),
social_adapters={"youtube": FakeAdapter("youtube")},
)
with pytest.raises(FileNotFoundError):
controller.publish(relative_path="video.mp4", caption="hello")

View File

@@ -0,0 +1,66 @@
from __future__ import annotations
from content_automation.adapters.social.instagram import InstagramAdapter
def test_instagram_post_media_happy_path(monkeypatch) -> None:
create_call_kwargs: dict = {}
publish_call_kwargs: dict = {}
def fake_create_container(self, *args, **kwargs):
create_call_kwargs.update(kwargs)
return {"id": "creation-123"}
def fake_publish_container(self, *args, **kwargs):
publish_call_kwargs.update(kwargs)
return {"id": "published-456"}
monkeypatch.setattr(
"content_automation.adapters.social.instagram.InstagramGraphClient.create_container",
fake_create_container,
)
monkeypatch.setattr(
"content_automation.adapters.social.instagram.InstagramGraphClient.publish_container",
fake_publish_container,
)
adapter = InstagramAdapter(access_token="token", user_id="user-1")
post_id = adapter.post_media(
media_url="https://cdn.example.com/reel.mp4",
caption="hello instagram",
)
assert post_id == "published-456"
assert create_call_kwargs["user_id"] == "user-1"
assert create_call_kwargs["payload"].media_type == "REELS"
assert create_call_kwargs["payload"].video_url == "https://cdn.example.com/reel.mp4"
assert create_call_kwargs["payload"].caption == "hello instagram"
assert publish_call_kwargs["user_id"] == "user-1"
assert publish_call_kwargs["payload"].creation_id == "creation-123"
def test_instagram_post_media_falls_back_to_creation_id(monkeypatch) -> None:
def fake_create_container(self, *args, **kwargs):
return {"id": "creation-abc"}
def fake_publish_container(self, *args, **kwargs):
return {}
monkeypatch.setattr(
"content_automation.adapters.social.instagram.InstagramGraphClient.create_container",
fake_create_container,
)
monkeypatch.setattr(
"content_automation.adapters.social.instagram.InstagramGraphClient.publish_container",
fake_publish_container,
)
adapter = InstagramAdapter(access_token="token", user_id="user-1")
post_id = adapter.post_media(
media_url="https://cdn.example.com/reel.mp4",
caption="hello instagram",
)
assert post_id == "creation-abc"

19
tests/test_settings.py Normal file
View File

@@ -0,0 +1,19 @@
from __future__ import annotations
from content_automation.settings import AppSettings
def test_settings_parse_nested_env(monkeypatch) -> None:
monkeypatch.setenv("CONTENT_AUTOMATION_TARGET_SOCIAL_NETWORKS", '["youtube"]')
monkeypatch.setenv("CONTENT_AUTOMATION_YOUTUBE__ACCESS_TOKEN", "yt-token")
monkeypatch.setenv("CONTENT_AUTOMATION_YOUTUBE__USE_RESUMABLE_UPLOAD", "true")
monkeypatch.setenv("CONTENT_AUTOMATION_STORAGE__BACKEND", "s3")
monkeypatch.setenv("CONTENT_AUTOMATION_STORAGE__S3__BUCKET_NAME", "bucket-a")
settings = AppSettings()
assert settings.target_social_networks == ["youtube"]
assert settings.youtube.access_token == "yt-token"
assert settings.youtube.use_resumable_upload is True
assert settings.storage.backend == "s3"
assert settings.storage.s3.bucket_name == "bucket-a"

View File

@@ -0,0 +1,208 @@
from __future__ import annotations
import json
from datetime import UTC, datetime, timedelta
from pathlib import Path
from content_automation.adapters.social.youtube import (
YouTubeAdapter,
YouTubeDataApiClient,
YouTubeSnippet,
YouTubeStatus,
YouTubeVideoInsertPayload,
)
class FakeInsertRequest:
def __init__(self, response: dict[str, object]) -> None:
self._response = response
def execute(self) -> dict[str, object]:
return self._response
class FakeResumableRequest:
def __init__(self) -> None:
self._calls = 0
def next_chunk(self):
self._calls += 1
if self._calls == 1:
return object(), None
return None, {"id": "video-123"}
class FakeVideosResource:
def __init__(self, request) -> None:
self._request = request
def insert(self, part: str, body: dict, media_body=None):
return self._request
class FakeService:
def __init__(self, request) -> None:
self._videos = FakeVideosResource(request)
def videos(self) -> FakeVideosResource:
return self._videos
def test_resumable_upload_happy_path(monkeypatch, tmp_path: Path) -> None:
media_file = tmp_path / "clip.mp4"
media_file.write_bytes(b"abcdef")
monkeypatch.setattr(
"content_automation.adapters.social.youtube.build",
lambda *args, **kwargs: FakeService(FakeResumableRequest()),
)
monkeypatch.setattr(
"content_automation.adapters.social.youtube.MediaFileUpload",
lambda *args, **kwargs: object(),
)
adapter = YouTubeAdapter(
access_token="token",
use_resumable_upload=True,
resumable_chunk_size=3,
)
post_id = adapter.post_media(media_url=media_file.as_uri(), caption="caption")
assert post_id == "video-123"
def test_regular_upload_happy_path(monkeypatch, tmp_path: Path) -> None:
media_file = tmp_path / "clip.mp4"
media_file.write_bytes(b"abcdef")
monkeypatch.setattr(
"content_automation.adapters.social.youtube.build",
lambda *args, **kwargs: FakeService(
FakeInsertRequest({"id": "video-regular-123"})
),
)
monkeypatch.setattr(
"content_automation.adapters.social.youtube.MediaFileUpload",
lambda *args, **kwargs: object(),
)
adapter = YouTubeAdapter(
access_token="token",
use_resumable_upload=False,
)
post_id = adapter.post_media(media_url=media_file.as_uri(), caption="caption")
assert post_id == "video-regular-123"
def test_insert_video_happy_path_for_non_local_url(monkeypatch) -> None:
monkeypatch.setattr(
"content_automation.adapters.social.youtube.build",
lambda *args, **kwargs: FakeService(
FakeInsertRequest({"id": "video-insert-123"})
),
)
adapter = YouTubeAdapter(access_token="token")
post_id = adapter.post_media(
media_url="https://cdn.example.com/path/to/video.mp4", caption="caption"
)
assert post_id == "video-insert-123"
def test_client_refreshes_expired_token_before_request(monkeypatch) -> None:
refreshed_tokens: list[str] = []
def fake_refresh(self, request) -> None:
self.token = "new-token"
self.expiry = datetime.now(UTC).replace(tzinfo=None) + timedelta(minutes=30)
refreshed_tokens.append(self.token)
monkeypatch.setattr(
"content_automation.adapters.social.youtube.Credentials.refresh",
fake_refresh,
)
monkeypatch.setattr(
"content_automation.adapters.social.youtube.build",
lambda *args, **kwargs: FakeService(
FakeInsertRequest({"id": "video-refreshed"})
),
)
client = YouTubeDataApiClient(
access_token="expired-token",
category_id="22",
privacy_status="public",
refresh_token="refresh-token",
client_id="client-id",
client_secret="client-secret",
expiry="2024-01-01T00:00:00Z",
)
payload = YouTubeVideoInsertPayload(
snippet=YouTubeSnippet(
title="title",
description="description",
categoryId="22",
),
status=YouTubeStatus(privacyStatus="public"),
sourceUrl="https://cdn.example.com/video.mp4",
)
response = client.insert_video(part="snippet,status", payload=payload)
assert response["id"] == "video-refreshed"
assert refreshed_tokens == ["new-token"]
def test_obtain_credentials_from_client_secret_file(
monkeypatch, tmp_path: Path
) -> None:
captured: dict[str, object] = {}
class FakeCredentials:
def to_json(self) -> str:
return json.dumps(
{
"token": "token-123",
"refresh_token": "refresh-123",
"token_uri": "https://oauth2.googleapis.com/token",
"client_id": "client-123",
"client_secret": "secret-123",
"scopes": ["https://www.googleapis.com/auth/youtube.upload"],
}
)
class FakeFlow:
def run_local_server(self):
return FakeCredentials()
def fake_from_client_secrets_file(client_secret_file: str, scopes: list[str]):
captured["client_secret_file"] = client_secret_file
captured["scopes"] = scopes
return FakeFlow()
monkeypatch.setattr(
"content_automation.adapters.social.youtube.InstalledAppFlow.from_client_secrets_file",
fake_from_client_secrets_file,
)
client_secret_path = tmp_path / "client_secret.json"
token_output_path = tmp_path / "youtube_credentials.json"
credentials_payload = YouTubeAdapter.obtain_credentials_from_client_secret_file(
client_secret_file_path=client_secret_path,
scopes=["https://www.googleapis.com/auth/youtube.upload"],
token_output_path=token_output_path,
)
assert captured["client_secret_file"] == str(client_secret_path)
assert captured["scopes"] == ["https://www.googleapis.com/auth/youtube.upload"]
assert credentials_payload["token"] == "token-123"
assert token_output_path.exists()
assert (
json.loads(token_output_path.read_text(encoding="utf-8"))["token"]
== "token-123"
)

1166
uv.lock generated Normal file

File diff suppressed because it is too large Load Diff