File size: 1,493 Bytes
55e9eb6 547ed9c e6738a8 7dc67f6 2d0a57c c356de9 7ef13aa 2d0a57c 547ed9c 7ef13aa 2d0a57c 7dc67f6 547ed9c 2d0a57c 55e9eb6 946aeb4 2d0a57c 946aeb4 2d0a57c 84213a2 946aeb4 2d0a57c 946aeb4 2d0a57c 946aeb4 84213a2 946aeb4 55e9eb6 f2f39bb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 |
import gradio as gr
from transformers import pipeline
# Load the model
pipe = pipeline("text-generation", model="Writer/palmyra-mini")
def chat_fn(message, history):
# Format messages for Palmyra
messages = [{"role": "user", "content": message}]
# Generate response
response = pipe(
messages,
max_new_tokens=100,
temperature=0.7,
do_sample=True
)
# Extract the response content
return response[0]['generated_text']
# Messenger style interface
with gr.Blocks(theme=gr.themes.Soft()) as demo:
gr.Markdown("# 💬 FlzAI Messenger")
gr.Markdown("**Created by Felix Lan**")
chatbot = gr.Chatbot(
height=500,
show_label=False
)
with gr.Row():
msg = gr.Textbox(
placeholder="Type a message...",
show_label=False,
container=False,
scale=4
)
submit = gr.Button("Send", variant="primary", scale=1)
clear = gr.Button("Clear Chat")
def respond(message, chat_history):
if not message.strip():
return "", chat_history
bot_message = chat_fn(message, chat_history)
chat_history.append((message, bot_message))
return "", chat_history
submit.click(respond, [msg, chatbot], [msg, chatbot])
msg.submit(respond, [msg, chatbot], [msg, chatbot])
clear.click(lambda: None, None, chatbot, queue=False)
if __name__ == "__main__":
demo.launch() |