# /var/www/html/govbot/app/services/timeline_service.py
from __future__ import annotations
from typing import List, Dict, Any, Optional
from datetime import datetime, date, timezone
from app.services.supabase_service import get_supabase

def _as_motie_url(article_id: str) -> str:
    return f"https://www.motie.go.kr/kor/article/ATCL6e90bb9de/{article_id}/view?"

def _as_moef_url(bbs_id: str, post_id: str) -> str:
    return f"https://www.moef.go.kr/nw/notice/hrDetail.do?searchBbsId1={bbs_id}&searchNttId1={post_id}"

def _parse_posted(v) -> date:
    if isinstance(v, date):
        return v
    if not v:
        return date.min
    s = str(v)[:10].replace(".", "-").strip()
    try:
        y, m, d = s.split("-")
        return date(int(y), int(m), int(d))
    except Exception:
        return date.min

def _parse_created(v) -> datetime:
    if not v:
        return datetime.min.replace(tzinfo=timezone.utc)
    s = str(v).strip().replace("Z", "+00:00")
    try:
        dt = datetime.fromisoformat(s)
    except Exception:
        return datetime.min.replace(tzinfo=timezone.utc)
    if dt.tzinfo is None:
        dt = dt.replace(tzinfo=timezone.utc)
    else:
        dt = dt.astimezone(timezone.utc)
    return dt

def _to_str(v, default=""):
    try:
        return v if isinstance(v, str) else (str(v) if v is not None else default)
    except Exception:
        return default

def _parse_input_date(s: Optional[str]) -> Optional[date]:
    if not s:
        return None
    try:
        y, m, d = s.split("-")
        return date(int(y), int(m), int(d))
    except Exception:
        return None

def get_timeline(
    source: str = "all",
    page: int = 1,
    page_size: int = 20,
    title_query: Optional[str] = None,
    start_date: Optional[str] = None,
    end_date: Optional[str] = None,
    internal_fetch: Optional[int] = None,
) -> Dict[str, Any]:
    """
    - source: all/MOTIE/MOEF
    - title_query: 제목 키워드(부분일치, 대소문자 무시)
    - start_date, end_date: 'YYYY-MM-DD' (게시일 posted_at 기준)
    - page/page_size: 페이지네이션
    """
    sb = get_supabase()
    pool: List[Dict[str, Any]] = []

    # 내부 fetch 규모 (필터 있을 때 더 넉넉히)
    if internal_fetch is None:
        has_filters = bool(title_query or start_date or end_date)
        internal_fetch = 1000 if has_filters else max(page_size * 10, 200)

    # MOTIE
    if source in ("all", "MOTIE", "motie"):
        motie_rows = (
            sb.table("motie_id")
              .select("id,created_at,title,posted_at")
              .order("posted_at", desc=True)
              .limit(internal_fetch)
              .execute().data or []
        )
        for r in motie_rows:
            item_id   = _to_str(r.get("id"))
            title     = _to_str(r.get("title") or "산업부 인사발령")
            posted_at = r.get("posted_at")
            created_at= r.get("created_at")
            pool.append({
                "source": "MOTIE",
                "title": title,
                "tag": None,
                "url": _as_motie_url(item_id) if item_id else "",
                "posted_at": posted_at,
                "created_at": created_at,
                "_posted_date": _parse_posted(posted_at),
                "_created_dt": _parse_created(created_at),
            })

    # MOEF
    if source in ("all", "MOEF", "moef"):
        moef_rows = (
            sb.table("moef_id")
              .select('id,created_at,"bbsId","postId",title,tag,posted_at')
              .order("posted_at", desc=True)
              .limit(internal_fetch)
              .execute().data or []
        )
        for r in moef_rows:
            bbs       = _to_str(r.get("bbsId"))
            pid       = _to_str(r.get("postId"))
            title     = _to_str(r.get("title") or "기재부 인사발령")
            tag       = _to_str(r.get("tag") or "")
            posted_at = r.get("posted_at")
            created_at= r.get("created_at")
            pool.append({
                "source": "MOEF",
                "title": title,
                "tag": (tag or None),
                "url": _as_moef_url(bbs, pid) if (bbs and pid) else "",
                "posted_at": posted_at,
                "created_at": created_at,
                "_posted_date": _parse_posted(posted_at),
                "_created_dt": _parse_created(created_at),
            })

    # ---- 필터링 ----
    # 제목 필터 (title + tag)
    if title_query:
        q = title_query.lower().strip()
        pool = [
            it for it in pool
            if (it["title"] and q in it["title"].lower())
               or (it["tag"] and q in str(it["tag"]).lower())
        ]

    # 기간 필터 (posted_at 기준, 없는 항목은 제외)
    sd = _parse_input_date(start_date)
    ed = _parse_input_date(end_date)
    if sd or ed:
        def _in_range(d: date) -> bool:
            if d == date.min:
                return False
            if sd and d < sd:
                return False
            if ed and d > ed:
                return False
            return True
        pool = [it for it in pool if _in_range(it["_posted_date"])]

    # 정렬 (posted_at → created_at)
    pool.sort(key=lambda x: (x["_posted_date"], x["_created_dt"]), reverse=True)

    # 페이지네이션
    page = max(1, int(page or 1))
    page_size = max(1, int(page_size or 20))
    start = (page - 1) * page_size
    end   = start + page_size

    rows = [{
        "source": it["source"],
        "title": it["title"],
        "tag": it["tag"],
        "url": it["url"],
        "posted_at": it["posted_at"],
        "created_at": it["created_at"],
    } for it in pool[start:end]]

    return {
        "rows": rows,
        "page": page,
        "page_size": page_size,
        "has_more": end < len(pool),
        "total_estimate": len(pool),
    }
