Skip to content

Instantly share code, notes, and snippets.

@TheLustriVA
Forked from thorhojhus/openrouter.py
Created January 20, 2024 06:16
Show Gist options
  • Save TheLustriVA/2e7379a09ef1c213419ceea343ff9cf5 to your computer and use it in GitHub Desktop.
Save TheLustriVA/2e7379a09ef1c213419ceea343ff9cf5 to your computer and use it in GitHub Desktop.
Simple model picker for openrouter
import openai
from colorama import Fore
openai.api_base = "https://openrouter.ai/api/v1"
openai.api_key = "" #use own openrouter api key
system = "Your name is Bob. You are an intelligent and useful assistant that helps the user solve problems."
messages = [{"role" : "system", "content" : system},]
models = {
"1": {"name": "gryphe/mythomist-7b", "context_length": 32768, "is_free": True},
"2": {"name": "openchat/openchat-7b", "context_length": 8192, "is_free": True},
"3": {"name": "undi95/toppy-m-7b", "context_length": 32768, "is_free": True},
"4": {"name": "teknium/openhermes-2.5-mistral-7b", "context_length": 4096, "is_free": False},
"5": {"name": "nousresearch/nous-capybara-34b", "context_length": 32768, "is_free": False},
"6": {"name": "lizpreciatior/lzlv-70b-fp16-hf", "context_length": 4096, "is_free": False},
"7": {"name": "alpindale/goliath-120b", "context_length": 6144, "is_free": False}
}
def mk_completion(model, messages):
return openai.ChatCompletion.create(
model=model["name"],
messages=messages,
stream=True,
max_tokens=model["context_length"], # decrease if needed
temperature=0.7,
)
def select_model():
print(Fore.YELLOW + "Select a model (default 1):" + Fore.WHITE)
for key, value in models.items():
model_name = value["name"].split('/')[-1]
status = "Free" if value["is_free"] else "Paid"
print(f"{key}: {model_name} (Context Length: {value['context_length']}, {status})")
choice = input(Fore.GREEN + "\nEnter model number: " + Fore.WHITE)
return models.get(choice, models["1"])
model = select_model()
while True:
message = input(Fore.GREEN + "\nMessage: " + Fore.WHITE)
messages += [{"role" : "user", "content" : str(message)}]
completion = mk_completion(model, messages)
tokens = []
print(Fore.CYAN + "Assistant: " + Fore.WHITE , end="")
for chunk in completion:
try:
token = chunk['choices'][0]['delta']["content"]
print(token, end="", flush=True)
tokens += token
except:
print()
break
assistant_message = "".join(tokens)
messages += [{"role" : "assistant", "content" : str(assistant_message)}]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment