gmshroff commited on
Commit
275dbc9
1 Parent(s): 28a9672

additional error handling in call_gemini

Browse files
Files changed (2) hide show
  1. app.py +0 -10
  2. library.ipynb +8 -4
app.py CHANGED
@@ -4,7 +4,6 @@ from transformers import AutoTokenizer, AutoModel
4
  import anvil.server
5
  import pathlib
6
  import textwrap
7
- import google.generativeai as genai
8
  import import_ipynb
9
  from library import call_gpt, call_gemini
10
  from background_service import BackgroundTaskService
@@ -20,9 +19,6 @@ MESSAGED={'title':'API Server',
20
  tokenizer = AutoTokenizer.from_pretrained('allenai/specter')
21
  encoder = AutoModel.from_pretrained('allenai/specter')
22
 
23
- # GOOGLE_API_KEY=os.getenv('GOOGLE_API_KEY')
24
- # genai.configure(api_key=GOOGLE_API_KEY)
25
-
26
  service=BackgroundTaskService(max_tasks=10)
27
  service.register(call_gpt)
28
  service.register(call_gemini)
@@ -50,12 +46,6 @@ def poll(task_id):
50
  return str(result)
51
  else: return str(result)
52
 
53
- # @anvil.server.callable
54
- # def call_gemini(text):
55
- # model = genai.GenerativeModel('gemini-pro')
56
- # response = model.generate_content(text)
57
- # return response.text
58
-
59
  @anvil.server.callable
60
  def encode_anvil(text):
61
  inputs = tokenizer(text, padding=True, truncation=True,
 
4
  import anvil.server
5
  import pathlib
6
  import textwrap
 
7
  import import_ipynb
8
  from library import call_gpt, call_gemini
9
  from background_service import BackgroundTaskService
 
19
  tokenizer = AutoTokenizer.from_pretrained('allenai/specter')
20
  encoder = AutoModel.from_pretrained('allenai/specter')
21
 
 
 
 
22
  service=BackgroundTaskService(max_tasks=10)
23
  service.register(call_gpt)
24
  service.register(call_gemini)
 
46
  return str(result)
47
  else: return str(result)
48
 
 
 
 
 
 
 
49
  @anvil.server.callable
50
  def encode_anvil(text):
51
  inputs = tokenizer(text, padding=True, truncation=True,
library.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 2,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -10,7 +10,9 @@
10
  "import openai\n",
11
  "import pathlib\n",
12
  "import textwrap\n",
13
- "import google.generativeai as genai"
 
 
14
  ]
15
  },
16
  {
@@ -28,12 +30,14 @@
28
  " response = model.generate_content(text)\n",
29
  " except Exception as e:\n",
30
  " return -1,str(e)\n",
31
- " return 0,response.text"
 
 
32
  ]
33
  },
34
  {
35
  "cell_type": "code",
36
- "execution_count": 3,
37
  "metadata": {},
38
  "outputs": [],
39
  "source": [
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": null,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
10
  "import openai\n",
11
  "import pathlib\n",
12
  "import textwrap\n",
13
+ "import google.generativeai as genai #comment this for local deployment and uncomment dummy def below\n",
14
+ "# class genai:\n",
15
+ "# pass"
16
  ]
17
  },
18
  {
 
30
  " response = model.generate_content(text)\n",
31
  " except Exception as e:\n",
32
  " return -1,str(e)\n",
33
+ " if 'text' in response.__dict__ :\n",
34
+ " return 0,response.text\n",
35
+ " else: return -1,'no generation'"
36
  ]
37
  },
38
  {
39
  "cell_type": "code",
40
+ "execution_count": null,
41
  "metadata": {},
42
  "outputs": [],
43
  "source": [