Spaces:
				
			
			
	
			
			
		Sleeping
		
	
	
	
			
			
	
	
	
	
		
		
		Sleeping
		
	| def highlight_answer(context, answer): | |
| """ | |
| Highlight the answer in the given context. | |
| Parameters: | |
| - context (str): The context in which the answer is found. | |
| - answer (str): The answer to be highlighted. | |
| Returns: | |
| - str: The context with the answer highlighted by '<h>' tags. | |
| Example: | |
| >>> context = 'The quick brown fox jumps over the lazy dog.' | |
| >>> answer = 'fox' | |
| >>> highlight_answer(context, answer) | |
| 'The quick brown <h> fox <h> jumps over the lazy dog.' | |
| """ | |
| context_splits = context.split(answer) | |
| text = "" | |
| for split in context_splits: | |
| text += split | |
| text += ' <h> ' | |
| text += answer | |
| text += ' <h> ' | |
| text += split | |
| return text | |
| def prepare_instruction(answer_highlighted_context): | |
| """ | |
| Prepare an instruction prompt for generating a question. | |
| Parameters: | |
| - answer_highlighted_context (str): The context with the answer highlighted by '<h>' tags. | |
| Returns: | |
| - str: The instruction prompt string. | |
| Example: | |
| >>> answer_highlighted_context = 'The quick brown <h> fox <h> jumps over the lazy dog.' | |
| >>> prepare_instruction(answer_highlighted_context) | |
| 'Generate a question whose answer is highlighted by <h> from the context delimited by the triple backticks.\\n context:\\n ```\\n The quick brown <h> fox <h> jumps over the lazy dog.\\n ```\\n ' | |
| """ | |
| instruction_prompt = f"""Generate a question whose answer is highlighted by <h> from the context delimited by the triple backticks. | |
| context: | |
| ``` | |
| {answer_highlighted_context} | |
| ``` | |
| """ | |
| return instruction_prompt | |
| from transformers import pipeline | |
| pipe = pipeline('text2text-generation', model='mohammedaly2222002/t5-small-squad-qg-v2', device_map='auto') | |
| import gradio as gr | |
| def processed(question,answer,num): | |
| # The uploaded image is a PIL image | |
| answer_highlighted_context = highlight_answer(context=question, answer=answer) | |
| prompt = prepare_instruction(answer_highlighted_context) | |
| outputs = pipe(prompt,num_return_sequences=int(num),num_beams=5,num_beam_groups=5,diversity_penalty=1.0) | |
| result="Generated questions are "+"\n"+"\n" | |
| number=0 | |
| for output in outputs: | |
| number=number+1 | |
| result+=str(number)+"."+output['generated_text']+"\n" | |
| return result | |
| iface = gr.Interface(processed, # Function to process the image | |
| inputs=[ | |
| gr.Textbox(label="Question"), | |
| gr.Textbox(label="Answer"), | |
| gr.Textbox(label="Numbers of question")], | |
| outputs="text" # Image output | |
| ) | |
| iface.launch() |