Spaces:
Build error
Build error
| import gradio as gr | |
| import os | |
| import io | |
| import PyPDF2 | |
| #from langchain.llms import OpenAIChat | |
| from langchain_openai import ChatOpenAI | |
| from langchain.chains import LLMChain | |
| from langchain.memory import ConversationBufferMemory | |
| from langchain import PromptTemplate | |
| # Updated imports for Gradio components | |
| from gradio.components import File, Textbox | |
| def extract_text_from_pdf_binary(pdf_binary): | |
| text = "" | |
| pdf_data = io.BytesIO(pdf_binary) | |
| reader = PyPDF2.PdfReader(pdf_data) | |
| num_pages = len(reader.pages) | |
| for page in range(num_pages): | |
| current_page = reader.pages[page] | |
| page_text = current_page.extract_text() | |
| if page_text: # Check if page_text is not None or empty | |
| text += page_text | |
| return text | |
| def format_resume_to_yaml(api_key, file_content): | |
| # Set the API key for OpenAI | |
| os.environ['OPENAI_API_KEY'] = api_key | |
| # Check if the file content is not empty | |
| if not file_content: | |
| raise ValueError("The uploaded file is empty.") | |
| # Extract text from the uploaded PDF binary | |
| resume_text = extract_text_from_pdf_binary(file_content) | |
| template = """Format the provided resume to this YAML template: | |
| --- | |
| name: '' | |
| phoneNumbers: | |
| - '' | |
| websites: | |
| - '' | |
| emails: | |
| - '' | |
| dateOfBirth: '' | |
| addresses: | |
| - street: '' | |
| city: '' | |
| state: '' | |
| zip: '' | |
| country: '' | |
| summary: '' | |
| education: | |
| - school: '' | |
| degree: '' | |
| fieldOfStudy: '' | |
| startDate: '' | |
| endDate: '' | |
| workExperience: | |
| - company: '' | |
| position: '' | |
| startDate: '' | |
| endDate: '' | |
| skills: | |
| - name: '' | |
| certifications: | |
| - name: '' | |
| {chat_history} | |
| {human_input}""" | |
| prompt = PromptTemplate( | |
| input_variables=["chat_history", "human_input"], | |
| template=template | |
| ) | |
| memory = ConversationBufferMemory(memory_key="chat_history") | |
| llm_chain = LLMChain( | |
| llm=ChatOpenAI(model="gpt-3.5-turbo"), | |
| prompt=prompt, | |
| verbose=True, | |
| memory=memory, | |
| ) | |
| res = llm_chain.predict(human_input=resume_text) | |
| return res | |
| def main(): | |
| input_api_key = Textbox(label="Enter your OpenAI API Key") | |
| input_pdf_file = File(label="Upload your PDF resume", type="binary") | |
| output_yaml = Textbox(label="Formatted Resume in YAML") | |
| iface = gr.Interface( | |
| fn=format_resume_to_yaml, | |
| inputs=[input_api_key, input_pdf_file], | |
| outputs=output_yaml, | |
| title="Resume to YAML Formatter", | |
| description="""This tool allows you to upload a PDF resume and formats it into a YAML template, | |
| suitable for modern Applicant Tracking Systems. Please provide your OpenAI API key (you can obtain one | |
| from [OpenAI](https://platform.openai.com/api-keys)) and upload your resume as a PDF. The backend is powered by Python and | |
| uses the LangChain π¦οΈπ library for processing. For more information and updates, visit my [GitHub](https://github.com/barghavanii/Resume-analytics-).""", | |
| allow_flagging="never", | |
| ) | |
| iface.launch(debug=True, share=True) | |
| if __name__ == "__main__": | |
| main() |