import gradio as gr from huggingface_hub import InferenceClient from pypdf import PdfReader client = InferenceClient("HuggingFaceH4/zephyr-7b-beta") def extract_text_from_pdf(pdf_file): text = "" try: reader = PdfReader(pdf_file) for page in reader.pages: text += page.extract_text() except Exception as e: text = str(e) return text def respond( message, history, system_message, job_description, resume_file, max_tokens, temperature, top_p, ): resume_text = "" if resume_file is not None: resume_text = extract_text_from_pdf(resume_file) system_message_with_info = f"{system_message}\nJob Description: {job_description}\nResume: {resume_text}" messages = [{"role": "system", "content": system_message_with_info}] for val in history: if val[0]: messages.append({"role": "user", "content": val[0]}) if val[1]: messages.append({"role": "assistant", "content": val[1]}) messages.append({"role": "user", "content": message}) response = "" try: for message in client.chat_completion( messages, max_tokens=max_tokens, stream=True, temperature=temperature, top_p=top_p, ): if message.choices and message.choices[0].delta and message.choices[0].delta.content: token = message.choices[0].delta.content response += token yield response else: yield "Error: received unexpected response format" except Exception as e: yield f"Error: {str(e)}" initial_system_message = "You are a recruiter. Your task is to interview the candidate based on the job description and resume provided. After the interview, provide feedback on the candidate's strengths and areas for improvement." with gr.Blocks() as demo: gr.Markdown("## AI Recruiter Chatbot") with gr.Row(): message = gr.Textbox(lines=2, placeholder="Enter your message", label="Message") history = gr.State([]) # for the chat history job_description = gr.Textbox(lines=10, placeholder="Enter the job description", label="Job Description") resume_file = gr.File(label="Upload your resume (PDF)") max_tokens = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens") temperature = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature") top_p = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)") response = gr.Textbox(label="Response") btn = gr.Button("Submit") def chat_flow(message, job_description, resume_file, max_tokens, temperature, top_p, history): system_message = initial_system_message response_generator = respond(message, history, system_message, job_description, resume_file, max_tokens, temperature, top_p) final_response = "" for response in response_generator: final_response = response history.append((message, final_response)) return final_response, history btn.click(chat_flow, [message, job_description, resume_file, max_tokens, temperature, top_p, history], [response, history]) if __name__ == "__main__": demo.launch()