"""S3-compatible object storage service."""
from __future__ import annotations

import mimetypes
from pathlib import Path

import aiobotocore.session
import structlog
from botocore.config import Config

from app.config import Settings
from app.exceptions import StorageError

log = structlog.get_logger(__name__)


class StorageService:
    def __init__(self, settings: Settings) -> None:
        self._settings = settings
        self._session = aiobotocore.session.get_session()

    def _client(self):  # type: ignore[no-untyped-def]
        return self._session.create_client(
            "s3",
            region_name=self._settings.s3_region,
            endpoint_url=self._settings.s3_endpoint_url,
            aws_access_key_id=self._settings.s3_access_key_id.get_secret_value(),
            aws_secret_access_key=self._settings.s3_secret_access_key.get_secret_value(),
            config=Config(signature_version="s3v4"),
        )

    async def upload_file(
        self,
        local_path: Path,
        s3_key: str,
        content_type: str | None = None,
    ) -> int:
        """Upload a local file to S3. Returns file size in bytes."""
        if not local_path.exists():
            raise StorageError(f"Local file not found: {local_path}")

        file_size = local_path.stat().st_size
        if content_type is None:
            content_type, _ = mimetypes.guess_type(str(local_path))
            content_type = content_type or "application/octet-stream"

        try:
            async with self._client() as client:
                with local_path.open("rb") as f:
                    await client.put_object(
                        Bucket=self._settings.s3_bucket_name,
                        Key=s3_key,
                        Body=f,
                        ContentType=content_type,
                        ContentLength=file_size,
                    )
            log.info("storage.uploaded", s3_key=s3_key, size=file_size)
            return file_size
        except Exception as exc:
            raise StorageError(f"S3 upload failed for key '{s3_key}': {exc}") from exc

    async def generate_presigned_url(self, s3_key: str, expires_in: int = 900) -> str:
        """Generate a pre-signed GET URL valid for `expires_in` seconds (default 15 min)."""
        try:
            async with self._client() as client:
                url = await client.generate_presigned_url(
                    "get_object",
                    Params={"Bucket": self._settings.s3_bucket_name, "Key": s3_key},
                    ExpiresIn=expires_in,
                )
            return url  # type: ignore[return-value]
        except Exception as exc:
            raise StorageError(f"Failed to generate pre-signed URL for '{s3_key}': {exc}") from exc

    async def delete_object(self, s3_key: str) -> None:
        try:
            async with self._client() as client:
                await client.delete_object(Bucket=self._settings.s3_bucket_name, Key=s3_key)
            log.info("storage.deleted", s3_key=s3_key)
        except Exception as exc:
            raise StorageError(f"S3 delete failed for '{s3_key}': {exc}") from exc

    async def object_exists(self, s3_key: str) -> bool:
        try:
            async with self._client() as client:
                await client.head_object(Bucket=self._settings.s3_bucket_name, Key=s3_key)
            return True
        except Exception:
            return False

    def make_media_key(self, user_id: str, job_id: str, filename: str) -> str:
        return f"media/{user_id}/{job_id}/{filename}"

    def make_thumbnail_key(self, user_id: str, job_id: str) -> str:
        return f"thumb/{user_id}/{job_id}/thumb.jpg"
