Skip to content

Instantly share code, notes, and snippets.

@elijahbenizzy
Created December 31, 2024 23:28
Show Gist options
  • Save elijahbenizzy/e8203d48946e2c92a8cc49942b420c29 to your computer and use it in GitHub Desktop.
Save elijahbenizzy/e8203d48946e2c92a8cc49942b420c29 to your computer and use it in GitHub Desktop.
@action(reads=[], writes=["llm_answer"])
def ask_question(state: State, user_query: str) -> State:
"""Reply to the user's query using the webpage's content."""
# Retrieve the most relevant chunks
chunks_table = lancedb.connect("./webpages").open_table("chunks")
search_results = (
chunks_table
.search(user_query)
.select(["text", "url", "position"])
.limit(3)
.to_list()
)
relevant_content = "\n".join([r["text"] for r in search_results])
# Prompt the LLM with the relevant content
system_prompt = (
"Answer the user's questions based on the provided webpage content. "
f"WEBPAGE CONTENT:\n{relevant_content}"
)
client = openai.OpenAI()
response = client.chat.completions.create(
model="gpt-4o-mini",
messages=[
{"role": "system", "content": system_prompt},
{"role": "user", "content": user_query}
],
)
llm_answer = response.choices[0].message.content
return state.update(llm_answer=llm_answer)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment