|
@@ -1,7 +1,7 @@
|
|
|
from __future__ import annotations
|
|
from __future__ import annotations
|
|
|
|
|
|
|
|
from datetime import datetime
|
|
from datetime import datetime
|
|
|
-from typing import List, Optional
|
|
|
|
|
|
|
+from typing import List, Optional, Union
|
|
|
from urllib.parse import urlparse
|
|
from urllib.parse import urlparse
|
|
|
|
|
|
|
|
import json
|
|
import json
|
|
@@ -60,6 +60,7 @@ class ModelTypeItem(BaseModel):
|
|
|
|
|
|
|
|
|
|
|
|
|
class PricesResponse(BaseModel):
|
|
class PricesResponse(BaseModel):
|
|
|
|
|
+ version: int
|
|
|
models: List[PublicPriceOut]
|
|
models: List[PublicPriceOut]
|
|
|
parsed_prices: List[ParsedPriceItem]
|
|
parsed_prices: List[ParsedPriceItem]
|
|
|
discounted_prices: List[DiscountedPriceItem]
|
|
discounted_prices: List[DiscountedPriceItem]
|
|
@@ -67,6 +68,11 @@ class PricesResponse(BaseModel):
|
|
|
discount: float = 1.0
|
|
discount: float = 1.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
+class UpToDateResponse(BaseModel):
|
|
|
|
|
+ up_to_date: bool = True
|
|
|
|
|
+ version: int
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
def _extract_domain(referer: Optional[str]) -> Optional[str]:
|
|
def _extract_domain(referer: Optional[str]) -> Optional[str]:
|
|
|
if not referer:
|
|
if not referer:
|
|
|
return None
|
|
return None
|
|
@@ -76,20 +82,30 @@ def _extract_domain(referer: Optional[str]) -> Optional[str]:
|
|
|
return None
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
|
-@router.get("/prices", response_model=PricesResponse)
|
|
|
|
|
-async def get_public_prices(request: Request, url: Optional[str] = None) -> PricesResponse:
|
|
|
|
|
|
|
+@router.get("/prices", response_model=Union[PricesResponse, UpToDateResponse])
|
|
|
|
|
+async def get_public_prices(
|
|
|
|
|
+ request: Request,
|
|
|
|
|
+ url: Optional[str] = None,
|
|
|
|
|
+) -> Union[PricesResponse, UpToDateResponse]:
|
|
|
pool = get_pool()
|
|
pool = get_pool()
|
|
|
|
|
|
|
|
|
|
+ # referer 必须提供
|
|
|
|
|
+ referer = request.headers.get("referer") or request.headers.get("origin")
|
|
|
|
|
+ if not referer:
|
|
|
|
|
+ raise HTTPException(status_code=400, detail="Missing Referer header")
|
|
|
|
|
+
|
|
|
|
|
+ # version 从 Header 读取,默认 0(首次请求)
|
|
|
|
|
+ try:
|
|
|
|
|
+ version = int(request.headers.get("version", "0") or "0")
|
|
|
|
|
+ except ValueError:
|
|
|
|
|
+ version = 0
|
|
|
|
|
+
|
|
|
# 记录调用来源
|
|
# 记录调用来源
|
|
|
ip = request.client.host if request.client else "unknown"
|
|
ip = request.client.host if request.client else "unknown"
|
|
|
- referer = request.headers.get("referer") or request.headers.get("origin")
|
|
|
|
|
geo = geo_resolver.resolve(ip)
|
|
geo = geo_resolver.resolve(ip)
|
|
|
try:
|
|
try:
|
|
|
await pool.execute(
|
|
await pool.execute(
|
|
|
- """
|
|
|
|
|
- INSERT INTO price_api_logs (ip, referer, org, country, city)
|
|
|
|
|
- VALUES ($1, $2, $3, $4, $5)
|
|
|
|
|
- """,
|
|
|
|
|
|
|
+ "INSERT INTO price_api_logs (ip, referer, org, country, city) VALUES ($1, $2, $3, $4, $5)",
|
|
|
ip, referer, geo.org, geo.country, geo.city,
|
|
ip, referer, geo.org, geo.country, geo.city,
|
|
|
)
|
|
)
|
|
|
except Exception:
|
|
except Exception:
|
|
@@ -99,9 +115,7 @@ async def get_public_prices(request: Request, url: Optional[str] = None) -> Pric
|
|
|
caller_domain = _extract_domain(referer)
|
|
caller_domain = _extract_domain(referer)
|
|
|
discount_rate: Optional[float] = None
|
|
discount_rate: Optional[float] = None
|
|
|
if caller_domain:
|
|
if caller_domain:
|
|
|
- row = await pool.fetchrow(
|
|
|
|
|
- "SELECT discount FROM discounts WHERE domain = $1", caller_domain
|
|
|
|
|
- )
|
|
|
|
|
|
|
+ row = await pool.fetchrow("SELECT discount FROM discounts WHERE domain = $1", caller_domain)
|
|
|
if row:
|
|
if row:
|
|
|
discount_rate = float(row["discount"])
|
|
discount_rate = float(row["discount"])
|
|
|
|
|
|
|
@@ -110,39 +124,38 @@ async def get_public_prices(request: Request, url: Optional[str] = None) -> Pric
|
|
|
return None
|
|
return None
|
|
|
return v if isinstance(v, (dict, list)) else json.loads(v)
|
|
return v if isinstance(v, (dict, list)) else json.loads(v)
|
|
|
|
|
|
|
|
|
|
+ # 读取全局版本号(0 表示尚未有任何快照)
|
|
|
|
|
+ ver_row = await pool.fetchrow("SELECT version FROM price_snapshot_version WHERE id = 1")
|
|
|
|
|
+ current_version: int = int(ver_row["version"]) if ver_row else 0
|
|
|
|
|
+
|
|
|
|
|
+ # version != 0 且与当前一致 → 无需更新(0 视为首次请求,强制返回数据)
|
|
|
|
|
+ if version != 0 and version == current_version:
|
|
|
|
|
+ return UpToDateResponse(up_to_date=True, version=current_version)
|
|
|
|
|
+
|
|
|
|
|
+ # 从 price_snapshot 读取数据
|
|
|
if url is None:
|
|
if url is None:
|
|
|
rows = await pool.fetch(
|
|
rows = await pool.fetch(
|
|
|
- """
|
|
|
|
|
- WITH latest_job AS (
|
|
|
|
|
- SELECT id FROM scrape_jobs
|
|
|
|
|
- WHERE status = 'done'
|
|
|
|
|
- ORDER BY created_at DESC LIMIT 1
|
|
|
|
|
- )
|
|
|
|
|
- SELECT DISTINCT ON (r.url) r.url, r.model_name, r.prices,
|
|
|
|
|
- r.model_info, r.rate_limits, r.tool_prices, r.scraped_at
|
|
|
|
|
- FROM scrape_results r
|
|
|
|
|
- JOIN latest_job j ON r.job_id = j.id
|
|
|
|
|
- ORDER BY r.url, r.scraped_at DESC
|
|
|
|
|
- """
|
|
|
|
|
|
|
+ "SELECT url, model_name, prices, model_info, rate_limits, tool_prices, updated_at FROM price_snapshot ORDER BY url"
|
|
|
)
|
|
)
|
|
|
else:
|
|
else:
|
|
|
rows = await pool.fetch(
|
|
rows = await pool.fetch(
|
|
|
- """
|
|
|
|
|
- SELECT url, model_name, prices, model_info, rate_limits, tool_prices, scraped_at
|
|
|
|
|
- FROM scrape_results
|
|
|
|
|
- WHERE url = $1
|
|
|
|
|
- ORDER BY scraped_at DESC LIMIT 1
|
|
|
|
|
- """,
|
|
|
|
|
|
|
+ "SELECT url, model_name, prices, model_info, rate_limits, tool_prices, updated_at FROM price_snapshot WHERE url = $1",
|
|
|
url,
|
|
url,
|
|
|
)
|
|
)
|
|
|
if not rows:
|
|
if not rows:
|
|
|
- raise HTTPException(status_code=404, detail="No scrape results found for the given URL")
|
|
|
|
|
|
|
+ raise HTTPException(status_code=404, detail="No price snapshot found for the given URL")
|
|
|
|
|
+
|
|
|
|
|
+ if not rows:
|
|
|
|
|
+ raise HTTPException(status_code=503, detail="Price snapshot not yet available")
|
|
|
|
|
+
|
|
|
|
|
+ # version != 0 且与当前一致 → 无需更新
|
|
|
|
|
+ if version != 0 and version == current_version:
|
|
|
|
|
+ return UpToDateResponse(up_to_date=True, version=current_version)
|
|
|
|
|
|
|
|
def _extract_type(model_info: Optional[dict]) -> Optional[List[str]]:
|
|
def _extract_type(model_info: Optional[dict]) -> Optional[List[str]]:
|
|
|
if not model_info:
|
|
if not model_info:
|
|
|
return None
|
|
return None
|
|
|
tags = model_info.get("display_tags", [])
|
|
tags = model_info.get("display_tags", [])
|
|
|
- # 只保留模型类型标签,排除系列名和能力标签(深度思考等)
|
|
|
|
|
TYPE_TAGS = {"文本生成", "图像生成", "视觉理解", "音频理解", "视频理解", "视频生成", "向量表示", "语音识别", "语音合成"}
|
|
TYPE_TAGS = {"文本生成", "图像生成", "视觉理解", "音频理解", "视频理解", "视频生成", "向量表示", "语音识别", "语音合成"}
|
|
|
result = [t for t in tags if t in TYPE_TAGS]
|
|
result = [t for t in tags if t in TYPE_TAGS]
|
|
|
return result if result else None
|
|
return result if result else None
|
|
@@ -154,7 +167,7 @@ async def get_public_prices(request: Request, url: Optional[str] = None) -> Pric
|
|
|
model_info=_j(r["model_info"]),
|
|
model_info=_j(r["model_info"]),
|
|
|
rate_limits=_j(r["rate_limits"]),
|
|
rate_limits=_j(r["rate_limits"]),
|
|
|
tool_prices=_j(r["tool_prices"]),
|
|
tool_prices=_j(r["tool_prices"]),
|
|
|
- scraped_at=r["scraped_at"],
|
|
|
|
|
|
|
+ scraped_at=r["updated_at"],
|
|
|
) for r in rows]
|
|
) for r in rows]
|
|
|
|
|
|
|
|
parsed_prices: List[ParsedPriceItem] = []
|
|
parsed_prices: List[ParsedPriceItem] = []
|
|
@@ -162,28 +175,25 @@ async def get_public_prices(request: Request, url: Optional[str] = None) -> Pric
|
|
|
|
|
|
|
|
for r in rows:
|
|
for r in rows:
|
|
|
for item in parse_prices(_j(r["prices"]) or {}):
|
|
for item in parse_prices(_j(r["prices"]) or {}):
|
|
|
- parsed_prices.append(ParsedPriceItem(
|
|
|
|
|
- url=r["url"],
|
|
|
|
|
- model_name=r["model_name"],
|
|
|
|
|
- **item,
|
|
|
|
|
- ))
|
|
|
|
|
- # 折扣价:有折扣就乘,没有就原价(discount=None)
|
|
|
|
|
|
|
+ parsed_prices.append(ParsedPriceItem(url=r["url"], model_name=r["model_name"], **item))
|
|
|
d_item = dict(item)
|
|
d_item = dict(item)
|
|
|
if discount_rate is not None:
|
|
if discount_rate is not None:
|
|
|
if d_item.get("input_price") is not None:
|
|
if d_item.get("input_price") is not None:
|
|
|
d_item["input_price"] = round(d_item["input_price"] * discount_rate, 6)
|
|
d_item["input_price"] = round(d_item["input_price"] * discount_rate, 6)
|
|
|
if d_item.get("output_price") is not None:
|
|
if d_item.get("output_price") is not None:
|
|
|
d_item["output_price"] = round(d_item["output_price"] * discount_rate, 6)
|
|
d_item["output_price"] = round(d_item["output_price"] * discount_rate, 6)
|
|
|
- discounted_prices.append(DiscountedPriceItem(
|
|
|
|
|
- url=r["url"],
|
|
|
|
|
- model_name=r["model_name"],
|
|
|
|
|
- discount=discount_rate,
|
|
|
|
|
- **d_item,
|
|
|
|
|
- ))
|
|
|
|
|
|
|
+ discounted_prices.append(DiscountedPriceItem(url=r["url"], model_name=r["model_name"], discount=discount_rate, **d_item))
|
|
|
|
|
|
|
|
all_types = [
|
|
all_types = [
|
|
|
ModelTypeItem(model_name=r["model_name"], type=_extract_type(_j(r["model_info"])) or [])
|
|
ModelTypeItem(model_name=r["model_name"], type=_extract_type(_j(r["model_info"])) or [])
|
|
|
for r in rows
|
|
for r in rows
|
|
|
]
|
|
]
|
|
|
|
|
|
|
|
- return PricesResponse(models=models, parsed_prices=parsed_prices, discounted_prices=discounted_prices, types=all_types, discount=discount_rate if discount_rate is not None else 1.0)
|
|
|
|
|
|
|
+ return PricesResponse(
|
|
|
|
|
+ version=current_version,
|
|
|
|
|
+ models=models,
|
|
|
|
|
+ parsed_prices=parsed_prices,
|
|
|
|
|
+ discounted_prices=discounted_prices,
|
|
|
|
|
+ types=all_types,
|
|
|
|
|
+ discount=discount_rate if discount_rate is not None else 1.0,
|
|
|
|
|
+ )
|