| import gradio as gr | |
| from transformers import pipeline | |
| # Create a text-generation pipeline using GPT-2 | |
| generator = pipeline('text-generation', model='gpt2') | |
| def generate_text(prompt): | |
| # Generate text with a maximum length of 50 tokens | |
| generated = generator(prompt, max_length=50, num_return_sequences=1) | |
| return generated[0]['generated_text'] | |
| # Create a Gradio interface with one text input and one text output | |
| iface = gr.Interface( | |
| fn=generate_text, | |
| inputs="text", | |
| outputs="text", | |
| title="Simple LLM with Hugging Face & Gradio", | |
| description="Enter a prompt and get text generated by a basic GPT-2 model." | |
| ) | |
| # Launch the interface | |
| iface.launch() | |