File size: 2,371 Bytes
03593d2
b1480b1
 
 
 
753cb33
b1480b1
 
 
 
 
 
8a0f19e
b1480b1
 
 
 
 
 
 
 
 
 
 
 
 
753cb33
b1480b1
 
b7139fd
b1480b1
753cb33
b1480b1
753cb33
 
b1480b1
 
 
8a0f19e
b1480b1
 
 
 
8050f4c
753cb33
b1480b1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9997a25
03593d2
b1480b1
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64

import gradio as gr
import ctranslate2
from transformers import AutoTokenizer
from huggingface_hub import snapshot_download
from codeexecutor import postprocess_completion,get_majority_vote

# Define the model and tokenizer loading
model_prompt = "Solve the following mathematical problem: "
tokenizer = AutoTokenizer.from_pretrained("AI-MO/NuminaMath-7B-TIR")
model_path = snapshot_download(repo_id="Makima57/deepseek-math-Numina")
generator = ctranslate2.Generator(model_path, device="cpu", compute_type="int8")
iterations=10

# Function to generate predictions using the model
def get_prediction(question):
    input_text = model_prompt + question
    input_tokens = tokenizer.tokenize(input_text)
    results = generator.generate_batch([input_tokens])
    output_tokens = results[0].sequences[0]
    predicted_answer = tokenizer.convert_tokens_to_string(output_tokens)
    return predicted_answer

# Function to perform majority voting across multiple predictions
def majority_vote(question, num_iterations=10):
    all_predictions = []
    all_answer=[]
    for _ in range(num_iterations):
        prediction = get_prediction(question)
        answer=postprocess_completion(prediction,True,True)
        all_predictions.append(prediction)
        all_answer.append(answer)
    majority_voted_pred = max(set(all_predictions), key=all_predictions.count)
    majority_voted_ans=get_majority_vote(all_answer)
    return majority_voted_pred, all_predictions,majority_voted_ans

# Gradio interface for user input and output
def gradio_interface(question, correct_answer):
    final_prediction, all_predictions,final_answer = majority_vote(question, iterations)
    return {
        "Question": question,
        "Generated Answers (10 iterations)": all_predictions,
        "Majority-Voted Prediction": final_prediction,
        "Correct solution": correct_answer,
        "Majority answer": final_answer
    }

# Gradio app setup
interface = gr.Interface(
    fn=gradio_interface,
    inputs=[
        gr.Textbox(label="Math Question"),
        gr.Textbox(label="Correct Answer"),
    ],
    outputs=[
        gr.JSON(label="Results"),  # Display the results in a JSON format
    ],
    title="Math Question Solver",
    description="Enter a math question to get the model prediction and see all generated answers.",
)

if __name__ == "__main__":
    interface.launch()