"""Storage utility functions for uploading files, JSON data, and downloading content.
Uses GCP Cloud Storage with public URLs (no expiry). Function names kept as upload_to_s3 for backward compatibility."""

import asyncio
from google.cloud import storage
from google.oauth2 import service_account
from loguru import logger

from utils import constants

_gcs_client = None
_credentials = None


def _get_credentials():
    global _credentials
    if _credentials is None:
        _credentials = service_account.Credentials.from_service_account_file(
            constants.GCP_CREDENTIALS_PATH
        )
    return _credentials


def _get_gcs_client():
    global _gcs_client
    if _gcs_client is None:
        _gcs_client = storage.Client(
            project=constants.GOOGLE_CLOUD_PROJECT,
            credentials=_get_credentials()
        )
    return _gcs_client


def _get_bucket():
    return _get_gcs_client().bucket(constants.GCS_BUCKET_NAME)


def _public_url(bucket_name: str, key: str) -> str:
    """Return the public GCS URL (no expiry; bucket must allow public read)."""
    return f"https://storage.googleapis.com/{bucket_name}/{key}"


async def upload_to_s3(file_data, s3_key, content_type="application/octet-stream"):
    """Upload file data to GCS and return public URL (no expiry)."""
    try:
        bucket = _get_bucket()
        blob = bucket.blob(s3_key)
        await asyncio.to_thread(blob.upload_from_string, file_data, content_type=content_type)
        public_url = _public_url(bucket.name, s3_key)
        logger.debug(f"GCS upload: {s3_key} ({len(file_data)} bytes)")
        return public_url
    except Exception as e:
        logger.error(f"GCS upload failed for {s3_key}: {e}")
        raise


async def download_and_upload_to_s3(url, s3_key, content_type):
    """Download file from URL and upload to GCS, return public URL."""
    try:
        import aiohttp
        async with aiohttp.ClientSession() as session:
            async with session.get(url) as response:
                if response.status == 200:
                    file_data = await response.read()
                    return await upload_to_s3(file_data, s3_key, content_type)
                else:
                    raise Exception(f"Failed to download from {url}: {response.status}")
    except Exception as e:
        logger.error(f"Download and upload failed: {e}")
        raise


async def save_json_to_s3(data, s3_key):
    """Save JSON data to GCS and return public URL."""
    try:
        import json
        json_data = json.dumps(data, ensure_ascii=False, indent=2).encode('utf-8')
        return await upload_to_s3(json_data, s3_key, "application/json")
    except Exception as e:
        logger.error(f"JSON upload failed: {e}")
        raise
