fxied UI bugs
Browse files- .env.example +1 -1
- app_modules/presets.py +2 -6
.env.example
CHANGED
@@ -23,7 +23,7 @@ DISABLE_MODEL_PRELOADING=false
|
|
23 |
CHAT_HISTORY_ENABLED=true
|
24 |
SHOW_PARAM_SETTINGS=false
|
25 |
SHARE_GRADIO_APP=false
|
26 |
-
PDF_FILE_BASE_URL=https://ai-engd.netlify.app/pdfs/
|
27 |
|
28 |
# if unset, default to "hkunlp/instructor-xl"
|
29 |
HF_EMBEDDINGS_MODEL_NAME="hkunlp/instructor-large"
|
|
|
23 |
CHAT_HISTORY_ENABLED=true
|
24 |
SHOW_PARAM_SETTINGS=false
|
25 |
SHARE_GRADIO_APP=false
|
26 |
+
PDF_FILE_BASE_URL=https://ai-engd.netlify.app/pdfs/books/
|
27 |
|
28 |
# if unset, default to "hkunlp/instructor-xl"
|
29 |
HF_EMBEDDINGS_MODEL_NAME="hkunlp/instructor-large"
|
app_modules/presets.py
CHANGED
@@ -6,16 +6,12 @@ import gradio as gr
|
|
6 |
from app_modules.utils import *
|
7 |
|
8 |
using_openai = os.environ.get("LLM_MODEL_TYPE") == "openai"
|
9 |
-
href = (
|
10 |
-
"https://openai.com/gpt-4"
|
11 |
-
if using_openai
|
12 |
-
else "https://huggingface.co/lmsys/fastchat-t5-3b-v1.0"
|
13 |
-
)
|
14 |
model = (
|
15 |
"OpenAI GPT-4" if using_openai else os.environ.get("HUGGINGFACE_MODEL_NAME_OR_PATH")
|
16 |
)
|
|
|
17 |
|
18 |
-
title = """<h1 align="left" style="min-width:200px; margin-top:0;"> Chat with
|
19 |
|
20 |
description_top = f"""\
|
21 |
<div align="left">
|
|
|
6 |
from app_modules.utils import *
|
7 |
|
8 |
using_openai = os.environ.get("LLM_MODEL_TYPE") == "openai"
|
|
|
|
|
|
|
|
|
|
|
9 |
model = (
|
10 |
"OpenAI GPT-4" if using_openai else os.environ.get("HUGGINGFACE_MODEL_NAME_OR_PATH")
|
11 |
)
|
12 |
+
href = "https://openai.com/gpt-4" if using_openai else f"https://huggingface.co/{model}"
|
13 |
|
14 |
+
title = """<h1 align="left" style="min-width:200px; margin-top:0;"> Chat with AI Books </h1>"""
|
15 |
|
16 |
description_top = f"""\
|
17 |
<div align="left">
|