Created
February 28, 2024 14:00
-
-
Save olivercera/ae3ccedf9540b12f89d29a2ec0bf031c to your computer and use it in GitHub Desktop.
temp-file
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| import streamlit as st | |
| import boto3 | |
| import json | |
| # import random | |
| # import string | |
| region = boto3.Session(profile_name="interbank-poc").region_name | |
| session = boto3.Session(profile_name="interbank-poc", region_name=region) | |
| lambda_client = session.client("lambda") | |
| st.title("Interbank Asistant") | |
| sessionId = "" | |
| # sessionId = ''.join(random.choices(string.ascii_uppercase + string.digits, k=12)) | |
| print(sessionId) | |
| # Initialize chat history | |
| if "messages" not in st.session_state: | |
| st.session_state.messages = [] | |
| # Initialize session id | |
| if "sessionId" not in st.session_state: | |
| st.session_state["sessionId"] = sessionId | |
| # Display chat messages from history on app rerun | |
| for message in st.session_state.messages: | |
| with st.chat_message(message["role"]): | |
| st.markdown(message["content"]) | |
| # React to user input | |
| if prompt := st.chat_input("En que te puedo ayudar?"): | |
| # Display user input in chat message container | |
| question = prompt | |
| st.chat_message("user").markdown(question) | |
| # Call lambda function to get response from the model | |
| payload = json.dumps( | |
| { | |
| "question": "<br><br>you are the virtual assistant to the martech team in interbank, interbank is the number one financial institution in peru. <br>your job is to provide answers to this team regarding the usage of Adobe experience manager. <br>Do not hallucinate any answer, always provide answers contained in the knowledge base. <br> always ask a follow up question. <br> the client needs the answer to this query: " | |
| + prompt | |
| + "<br><br>return response in markdown format", | |
| "sessionId": st.session_state["sessionId"], | |
| } | |
| ) | |
| print(payload) | |
| result = lambda_client.invoke(FunctionName="InvokeKnowledgeBase", Payload=payload) | |
| result = json.loads(result["Payload"].read().decode("utf-8")) | |
| print(result) | |
| answer = result["body"]["answer"] | |
| sessionId = result["body"]["sessionId"] | |
| st.session_state["sessionId"] = sessionId | |
| # Add user input to chat history | |
| st.session_state.messages.append({"role": "user", "content": question}) | |
| # Display assistant response in chat message container | |
| with st.chat_message("assistant"): | |
| st.markdown(answer) | |
| # Add assistant response to chat history | |
| st.session_state.messages.append({"role": "assistant", "content": answer}) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment