sachin commited on
Commit
d152ed5
1 Parent(s): 8243283

migrate to POST for text

Browse files
Files changed (2) hide show
  1. recipes/urls.py +4 -4
  2. recipes/views.py +41 -22
recipes/urls.py CHANGED
@@ -1,10 +1,10 @@
1
  from django.urls import path
2
- from .views import recipe_generate_route, execute_prompt_route_get
3
- from .views import VisionLLMView, NIMVisionLLMView
4
 
5
  urlpatterns = [
6
- path('execute_prompt_get/', execute_prompt_route_get, name='execute_prompt_get'),
7
  path('recipe_generate/', recipe_generate_route, name='recipe_generate'),
8
  path('vision_llm_url/', VisionLLMView.as_view()),
9
- path('nim_vision_llm_url/', NIMVisionLLMView.as_view()),
 
10
  ]
 
1
  from django.urls import path
2
+ from .views import recipe_generate_route
3
+ from .views import VisionLLMView, NIMVisionLLMView, TextLLMView
4
 
5
  urlpatterns = [
 
6
  path('recipe_generate/', recipe_generate_route, name='recipe_generate'),
7
  path('vision_llm_url/', VisionLLMView.as_view()),
8
+ path('nim_vision_llm_url/', NIMVisionLLMView.as_view()),
9
+ path('text_llm_url/', TextLLMView.as_view()),
10
  ]
recipes/views.py CHANGED
@@ -10,19 +10,43 @@ import base64
10
  import json
11
  import requests
12
 
13
- class PromptSerializer(serializers.Serializer):
14
- prompt = serializers.CharField()
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
- @api_view(['GET'])
18
- def execute_prompt_route_get(request):
19
- prompt = request.query_params.get('prompt', None)
20
- print(prompt)
21
- if prompt is None:
22
- return Response({"error": "No prompt provided"}, status=400)
23
- is_local = False
24
- result = execute_prompt(prompt, is_local)
25
- return Response(result)
26
 
27
 
28
  @api_view(['GET'])
@@ -46,23 +70,16 @@ def recipe_generate_route(request):
46
  class VisionLLMView(APIView):
47
  def post(self, request, format=None):
48
  data = request.data
49
-
50
- #print(data)
51
- # Retrieve the API key from environment variables
52
  api_key = os.environ["MISTRAL_API_KEY"]
53
 
54
- # Specify model
55
- model = "pixtral-12b-2409"
56
-
57
  # Initialize the Mistral client
58
  client = Mistral(api_key=api_key)
59
 
60
- # Decode the base64 image
61
- #image_data = base64.b64decode(data['image'])
62
- #image_data = base64.b64decode(data['messages'][0]['image'][0])
63
  image_data = (data['messages'][0]['image'][0])
64
  prompt = data['messages'][0]['prompt']
65
-
 
 
66
  # Define the messages for the chat
67
  messages = [
68
  {
@@ -99,6 +116,8 @@ class NIMVisionLLMView(APIView):
99
  stream = False
100
  api_key = os.environ["NIM_API_KEY"]
101
  data = request.data
 
 
102
  image_data = (data['messages'][0]['image'][0])
103
  prompt = data['messages'][0]['prompt']
104
  headers = {
@@ -106,7 +125,7 @@ class NIMVisionLLMView(APIView):
106
  "Accept": "text/event-stream" if stream else "application/json"
107
  }
108
  payload = {
109
- "model": 'meta/llama-3.2-11b-vision-instruct',
110
  "messages": [
111
  {
112
  "role": "user",
 
10
  import json
11
  import requests
12
 
 
 
13
 
14
+ class TextLLMView(APIView):
15
+ def post(self, request, format=None):
16
+ data = request.data
17
+ api_key = os.environ["MISTRAL_API_KEY"]
18
+
19
+ # Initialize the Mistral client
20
+ client = Mistral(api_key=api_key)
21
+
22
+ prompt = data['messages'][0]['prompt']
23
+ # Specify model
24
+ #model = "pixtral-12b-2409"
25
+ model = data['model']
26
+ # Define the messages for the chat
27
+ messages = [
28
+ {
29
+ "role": "user",
30
+ "content": [
31
+ {
32
+ "type": "text",
33
+ "text": prompt
34
+ }
35
+ ]
36
+ }
37
+ ]
38
+
39
+ # Get the chat response
40
+ chat_response = client.chat.complete(
41
+ model=model,
42
+ messages=messages
43
+ )
44
+
45
+ content = chat_response.choices[0].message.content
46
+ #print(chat_response.choices[0].message.content)
47
+ # Return the content of the response
48
+ return Response({"response": content})
49
 
 
 
 
 
 
 
 
 
 
50
 
51
 
52
  @api_view(['GET'])
 
70
  class VisionLLMView(APIView):
71
  def post(self, request, format=None):
72
  data = request.data
 
 
 
73
  api_key = os.environ["MISTRAL_API_KEY"]
74
 
 
 
 
75
  # Initialize the Mistral client
76
  client = Mistral(api_key=api_key)
77
 
 
 
 
78
  image_data = (data['messages'][0]['image'][0])
79
  prompt = data['messages'][0]['prompt']
80
+ # Specify model
81
+ #model = "pixtral-12b-2409"
82
+ model = data['model']
83
  # Define the messages for the chat
84
  messages = [
85
  {
 
116
  stream = False
117
  api_key = os.environ["NIM_API_KEY"]
118
  data = request.data
119
+ model = data['model']
120
+ print(model)
121
  image_data = (data['messages'][0]['image'][0])
122
  prompt = data['messages'][0]['prompt']
123
  headers = {
 
125
  "Accept": "text/event-stream" if stream else "application/json"
126
  }
127
  payload = {
128
+ "model": model,
129
  "messages": [
130
  {
131
  "role": "user",