Skip to content

Instantly share code, notes, and snippets.

@bsima
Last active March 26, 2026 17:54
Show Gist options
  • Select an option

  • Save bsima/b007d3581f9de49ced39260655afcc85 to your computer and use it in GitHub Desktop.

Select an option

Save bsima/b007d3581f9de49ced39260655afcc85 to your computer and use it in GitHub Desktop.
Sync pi and OpenCode configs from Parasail /v1/models and serverless pricing
#!/usr/bin/env python3
import json
import os
import sys
from pathlib import Path
from urllib.error import HTTPError, URLError
from urllib.request import Request, urlopen
API_BASE = os.environ.get("PARASAIL_BASE_URL", "https://api.parasail.io/v1").rstrip("/")
SERVERLESS_PRICES_URL = os.environ.get(
"PARASAIL_SERVERLESS_PRICES_URL", "https://www.saas.parasail.io/api/v1/prices/serverlessEndpoints"
)
PREFERRED_DEFAULT_MODEL = os.environ.get("PARASAIL_DEFAULT_MODEL", "parasail-glm-46v")
DEFAULT_CONTEXT = 131072
DEFAULT_MAX_OUTPUT = 8192
DEFAULT_MODEL_CANDIDATES = [
"parasail-glm-46v",
"parasail-glm47",
"zai-org/glm-4.7",
"parasail-deepseek-v32",
"deepseek-ai/deepseek-v3.2",
"qwen/qwen3-coder-next",
"parasail-qwen3-coder-next",
"moonshotai/kimi-k2.5",
"parasail-mistral-small-32-24b",
"mistralai/mistral-small-3.2-24b-instruct-2506",
"openai/gpt-oss-120b",
"openai/gpt-oss-20b",
]
# Optional manual overrides for known models.
# These are mostly display-name / capability hints for models that may not have full metadata.
# Live pricing and context from SERVERLESS_PRICES_URL wins when present.
MODEL_OVERRIDES = {
"parasail-glm47": {"name": "GLM-4.7", "image": True},
"zai-org/glm-4.7": {"name": "GLM-4.7", "image": True},
"parasail-glm-46v": {"name": "GLM-4.6V", "image": True},
"parasail-deepseek-v32": {"name": "DeepSeek V3.2"},
"deepseek-ai/deepseek-v3.2": {"name": "DeepSeek V3.2"},
"parasail-qwen3-235b-a22b-instruct-2507": {"name": "Qwen3 235B"},
"qwen/qwen3-235b-a22b-instruct-2507": {"name": "Qwen3 235B"},
"parasail-kimi-k2-instruct": {"name": "Kimi K2 Instruct"},
"parasail-kimi-k25": {"name": "Kimi K2.5"},
"moonshotai/kimi-k2.5": {"name": "Kimi K2.5"},
"parasail-mistral-small-32-24b": {"name": "Mistral Small 3.2 24B"},
"mistralai/mistral-small-3.2-24b-instruct-2506": {"name": "Mistral Small 3.2 24B"},
}
def norm(value: str) -> str:
return value.strip().lower()
def fetch_json(url: str, api_key: str | None = None):
headers = {}
if api_key:
headers["Authorization"] = f"Bearer {api_key}"
request = Request(url, headers=headers)
with urlopen(request, timeout=45) as response:
return json.loads(response.read().decode("utf-8"))
def fetch_model_ids(api_key: str) -> list[str]:
payload = fetch_json(f"{API_BASE}/models", api_key=api_key)
model_ids = sorted({item.get("id") for item in payload.get("data", []) if item.get("id")})
if not model_ids:
raise RuntimeError("No models returned from /v1/models. Check API key and account access.")
return model_ids
def fetch_serverless_price_lookup(api_key: str | None) -> dict[str, dict]:
"""
Returns lookup keyed by normalized model id/alias/name.
"""
data = fetch_json(SERVERLESS_PRICES_URL, api_key=api_key)
if not isinstance(data, list):
raise RuntimeError(f"Unexpected response from {SERVERLESS_PRICES_URL}: expected a list")
lookup: dict[str, dict] = {}
for item in data:
external_alias = item.get("externalAlias")
model_name = item.get("modelName")
deployment_name = item.get("deploymentName")
context = item.get("contextLength")
max_output = item.get("maxCompletionTokens")
tags = [str(t).lower() for t in (item.get("tags") or [])]
multimodal = (
any("multimodal" in t or "vision" in t for t in tags)
or "vl" in (model_name or "").lower()
or "vl" in (external_alias or "").lower()
or "ui-tars" in (model_name or "").lower()
or "ui-tars" in (external_alias or "").lower()
)
compact = {
"name": model_name or external_alias or deployment_name,
"input_cost": item.get("inputCost"),
"output_cost": item.get("outputCost"),
"cached_cost": item.get("cachedCost"),
"context": context,
"max_output": max_output,
"image": multimodal,
}
for key in [external_alias, model_name, deployment_name]:
if key:
lookup[norm(key)] = compact
return lookup
def choose_default_model(model_ids: list[str]) -> str:
by_norm = {norm(model_id): model_id for model_id in model_ids}
preferred = by_norm.get(norm(PREFERRED_DEFAULT_MODEL))
if preferred:
return preferred
for candidate in DEFAULT_MODEL_CANDIDATES:
match = by_norm.get(norm(candidate))
if match:
return match
keyword_order = ["coder", "glm", "deepseek", "kimi", "qwen", "mistral", "gpt-oss", "llama"]
sorted_ids = sorted(model_ids)
for keyword in keyword_order:
for model_id in sorted_ids:
if keyword in model_id.lower():
return model_id
return sorted_ids[0]
def merged_metadata(model_id: str, prices: dict[str, dict]) -> tuple[dict, bool]:
price_meta = prices.get(norm(model_id), {})
override_meta = MODEL_OVERRIDES.get(norm(model_id), {})
# Use live pricing/context first, then fill any missing bits from overrides.
# Exception: keep override display names for readability.
merged = dict(price_meta)
for key, value in override_meta.items():
if key == "name":
merged[key] = value
elif key not in merged or merged.get(key) in (None, "", []):
merged[key] = value
matched_pricing = bool(price_meta)
return merged, matched_pricing
def make_pi_model(model_id: str, prices: dict[str, dict]) -> tuple[dict, bool]:
meta, matched_pricing = merged_metadata(model_id, prices)
image = bool(meta.get("image", False))
model = {
"id": model_id,
"name": meta.get("name", model_id),
"reasoning": bool(meta.get("reasoning", False)),
"input": ["text", "image"] if image else ["text"],
"cost": {
"input": float(meta.get("input_cost", 0) or 0),
"output": float(meta.get("output_cost", 0) or 0),
# Map cached token pricing (when available) to cacheRead.
"cacheRead": float(meta.get("cached_cost", 0) or 0),
"cacheWrite": 0,
},
"contextWindow": int(meta.get("context", DEFAULT_CONTEXT) or DEFAULT_CONTEXT),
"maxTokens": int(meta.get("max_output", DEFAULT_MAX_OUTPUT) or DEFAULT_MAX_OUTPUT),
}
return model, matched_pricing
def make_opencode_model(model_id: str, prices: dict[str, dict]) -> tuple[dict, bool]:
meta, matched_pricing = merged_metadata(model_id, prices)
image = bool(meta.get("image", False))
model = {
"name": meta.get("name", model_id),
"reasoning": bool(meta.get("reasoning", False)),
"limit": {
"context": int(meta.get("context", DEFAULT_CONTEXT) or DEFAULT_CONTEXT),
"output": int(meta.get("max_output", DEFAULT_MAX_OUTPUT) or DEFAULT_MAX_OUTPUT),
},
"modalities": {
"input": ["text", "image"] if image else ["text"],
"output": ["text"],
},
"cost": {
"input": float(meta.get("input_cost", 0) or 0),
"output": float(meta.get("output_cost", 0) or 0),
"cache_read": float(meta.get("cached_cost", 0) or 0),
"cache_write": 0,
},
}
return model, matched_pricing
def write_json(path: Path, payload: dict) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
path.write_text(json.dumps(payload, indent=2) + "\n", encoding="utf-8")
def main() -> int:
api_key = os.environ.get("PARASAIL_API_KEY")
if not api_key:
print("Error: PARASAIL_API_KEY is not set", file=sys.stderr)
return 1
try:
model_ids = fetch_model_ids(api_key)
except HTTPError as exc:
body = exc.read().decode("utf-8", errors="ignore")
print(f"HTTP error while fetching /v1/models: {exc.code}\n{body}", file=sys.stderr)
return 1
except URLError as exc:
print(f"Network error while fetching /v1/models: {exc}", file=sys.stderr)
return 1
except Exception as exc:
print(f"Failed to fetch /v1/models: {exc}", file=sys.stderr)
return 1
try:
prices = fetch_serverless_price_lookup(api_key)
except Exception as exc:
print(
f"Warning: could not fetch {SERVERLESS_PRICES_URL}. Continuing with overrides/defaults only.\n{exc}",
file=sys.stderr,
)
prices = {}
default_model = choose_default_model(model_ids)
pi_models = []
opencode_models = {}
priced_model_count = 0
for model_id in model_ids:
pi_model, pi_priced = make_pi_model(model_id, prices)
op_model, op_priced = make_opencode_model(model_id, prices)
pi_models.append(pi_model)
opencode_models[model_id] = op_model
if pi_priced or op_priced:
priced_model_count += 1
pi_config = {
"providers": {
"parasail": {
"baseUrl": API_BASE,
"apiKey": "PARASAIL_API_KEY",
"api": "openai-completions",
"models": pi_models,
}
}
}
opencode_config = {
"$schema": "https://opencode.ai/config.json",
"provider": {
"parasail": {
"npm": "@ai-sdk/openai-compatible",
"name": "Parasail",
"options": {
"baseURL": API_BASE,
"apiKey": "{env:PARASAIL_API_KEY}",
},
"models": opencode_models,
}
},
"model": f"parasail/{default_model}",
}
pi_path = Path.home() / ".pi" / "agent" / "models.json"
opencode_path = Path.home() / ".config" / "opencode" / "opencode.json"
write_json(pi_path, pi_config)
write_json(opencode_path, opencode_config)
print(f"Wrote {pi_path}")
print(f"Wrote {opencode_path}")
print(f"Default model: {default_model}")
print(f"Pricing/context matched for {priced_model_count}/{len(model_ids)} models")
print("Run this script again any time to refresh models and pricing.")
return 0
if __name__ == "__main__":
raise SystemExit(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment