File size: 1,891 Bytes
b5ca3b7
e96f2b4
45e97b1
e96f2b4
 
 
 
 
 
 
 
 
 
 
 
b7aeacb
e96f2b4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a5c1fd4
e96f2b4
a5d123a
fae4fdc
 
bcd9bad
eb34dff
e96f2b4
eb34dff
 
e96f2b4
 
45e97b1
e96f2b4
 
 
 
 
 
 
 
 
c91fb03
a5c1fd4
c91fb03
e96f2b4
 
3a07988
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
from flask import Flask, request, Response, jsonify
from huggingface_hub import InferenceClient
from flask_cors import CORS

client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")

def format_prompt(message, history):
  prompt = "<s>"
  for user_prompt, bot_response in history:
    prompt += f"[INST] {user_prompt} [/INST]"
    prompt += f" {bot_response}</s> "
  prompt += f"[INST] {message} [/INST]"
  return prompt

def generate(
    prompt, history=[], temperature=0, max_new_tokens=2000, top_p=0.95, repetition_penalty=1.0,
):
    temperature = float(temperature)
    if temperature < 1e-2:
        temperature = 1e-2
    top_p = float(top_p)

    generate_kwargs = dict(
        temperature=temperature,
        max_new_tokens=max_new_tokens,
        top_p=top_p,
        repetition_penalty=repetition_penalty,
        do_sample=True,
        seed=42,
    )

    #formatted_prompt = format_prompt(prompt, history)

    #stream = client.text_generation(prompt, **generate_kwargs, stream=True, details=False, return_full_text=False)
    response = client.text_generation(prompt, **generate_kwargs, stream=False, details=False, return_full_text=False)
    print(response)
    return response.encode('utf-8')
    #output = ""

    #for response in stream:
    #    yield response.token.text.encode('utf-8')

app = Flask(__name__)
CORS(app)

@app.route('/health', methods=['GET'])
def health():
    return jsonify({"status": "ok"})

@app.route('/completion', methods=['POST'])
def search_route():
    data = request.get_json()
    prompt = data.get('prompt', '')
    temperature = data.get('temperature', 0.0)
    #truncated_prompt = prompt[:32768]
    return Response(generate(prompt[:52768], temperature=temperature), content_type='text/plain; charset=utf-8', status=200, direct_passthrough=True)

if __name__ == '__main__':
    app.run(debug=False, host='0.0.0.0', port=7860)