Skip to content

Instantly share code, notes, and snippets.

@InTheCloudDan
Created November 27, 2024 13:02
Show Gist options
  • Save InTheCloudDan/bdf4e636fbd0f292476a86f903accabd to your computer and use it in GitHub Desktop.
Save InTheCloudDan/bdf4e636fbd0f292476a86f903accabd to your computer and use it in GitHub Desktop.
import prompty
from prompty.core import Prompty, ModelSettings, PropertySettings, TemplateSettings, FilePath
from prompty.tracer import Tracer, PromptyTracer, console_tracer
import prompty.openai
import ldclient
from ldclient import Context
from ldclient.config import Config
from ldai.client import LDAIClient
ldclient.set_config(Config("CHANGE_ME"))
aiclient = LDAIClient(ldclient.get())
context = Context.builder("context-key-123abc") \
.set("firstName", "Sandy") \
.set("lastName", "Smith") \
.set("email", "[email protected]") \
.set("groups", ["Google", "Microsoft"]) \
.build()
fallback_value = {
'model': { 'modelId': 'my-default-model', },
'config': {
"prompt": []
},
'enabled': True,
}
config_value = aiclient.model_config('prompty-config', context, fallback_value, { 'example_custom_variable': 'example_custom_value'})
# add console tracer
Tracer.add("console", console_tracer)
# add PromptyTracer
json_tracer = PromptyTracer(output_dir="./output")
Tracer.add("console", json_tracer.tracer)
prompt = prompty.headless(
api="chat",
configuration={"type": "openai", "name": config_value.config.model["modelId"]},
content=[item.to_dict() for item in config_value.config.prompt][0],
)
response = prompty.execute(prompt)
print(response)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment