trial / my_first_genai_chatbot.py
DeepakKolhe1995's picture
My Chatbot
05c9c1c
# Import os to handle environment variables
#
# To run this app
#
# streamlit run <path to this file, for example, rag\streamlit_app_basic.py>
#
import os
from dotenv import load_dotenv
import streamlit as st
import boto3
import json
load_dotenv()
st.subheader("DeepBuddy's AI chatbot")
def generate_response(input_text):
load_dotenv()
# Initialize the Bedrock client with credentials from .env
bedrock_runtime = boto3.client(
service_name='bedrock-runtime',
region_name=os.getenv('AWS_REGION'),
aws_access_key_id=os.getenv('AWS_ACCESS_KEY_ID'),
aws_secret_access_key=os.getenv('AWS_SECRET_ACCESS_KEY')
)
# Define your prompt for Claude
request_body = {
"anthropic_version": "bedrock-2023-05-31",
"max_tokens": 1000,
"temperature" : 0.2,
"messages": [
{"role": "user", "content": user_question}
]
}
# Make the API call to Claude
response = bedrock_runtime.invoke_model(
modelId = "anthropic.claude-3-5-sonnet-20240620-v1:0",
# Or "meta.llama3-70b-instruct-v1" or "anthropic.claude-3-sonnet-20240229-v1:0"
body=json.dumps(request_body)
)
# Parse the response
response_body = json.loads(response['body'].read())
# print(response_body['content'][0]['text'])
# 1. Access Gen AI Model from AWS Bedrock service
# 2. Pass the user text to the AWS Bedrock API
# 3. Get the response from the Gen AI Model
# 4. Print it on the output
st.info(response_body['content'][0]['text']) # Display the response content
user_question = st.text_input("Ask any question. Press enter to submit", key="user_question")
if user_question:
generate_response(user_question)