pseudotensor commited on
Commit
5f5e828
1 Parent(s): 82934e3

Update with h2oGPT hash af1d5e70b778aa5e135d71451369908620cc7c0b

Browse files
gen.py CHANGED
@@ -20,8 +20,6 @@ from urllib3.exceptions import ConnectTimeoutError, MaxRetryError, ConnectionErr
20
  from requests.exceptions import ConnectionError as ConnectionError2
21
  from requests.exceptions import ReadTimeout as ReadTimeout2
22
 
23
- from evaluate_params import eval_func_param_names, no_default_param_names
24
-
25
  if os.path.dirname(os.path.abspath(__file__)) not in sys.path:
26
  sys.path.append(os.path.dirname(os.path.abspath(__file__)))
27
 
@@ -29,6 +27,7 @@ os.environ['HF_HUB_DISABLE_TELEMETRY'] = '1'
29
  os.environ['BITSANDBYTES_NOWELCOME'] = '1'
30
  warnings.filterwarnings('ignore', category=UserWarning, message='TypedStorage is deprecated')
31
 
 
32
  from enums import DocumentChoices, LangChainMode, no_lora_str, model_token_mapping, no_model_str, source_prefix, \
33
  source_postfix, LangChainAction
34
  from loaders import get_loaders
 
20
  from requests.exceptions import ConnectionError as ConnectionError2
21
  from requests.exceptions import ReadTimeout as ReadTimeout2
22
 
 
 
23
  if os.path.dirname(os.path.abspath(__file__)) not in sys.path:
24
  sys.path.append(os.path.dirname(os.path.abspath(__file__)))
25
 
 
27
  os.environ['BITSANDBYTES_NOWELCOME'] = '1'
28
  warnings.filterwarnings('ignore', category=UserWarning, message='TypedStorage is deprecated')
29
 
30
+ from evaluate_params import eval_func_param_names, no_default_param_names
31
  from enums import DocumentChoices, LangChainMode, no_lora_str, model_token_mapping, no_model_str, source_prefix, \
32
  source_postfix, LangChainAction
33
  from loaders import get_loaders
gpt_langchain.py CHANGED
@@ -25,7 +25,8 @@ from tqdm import tqdm
25
 
26
  from enums import DocumentChoices, no_lora_str, model_token_mapping, source_prefix, source_postfix, non_query_commands, \
27
  LangChainAction, LangChainMode
28
- from gen import gen_hyper, get_model, SEED
 
29
  from prompter import non_hf_types, PromptType, Prompter
30
  from utils import wrapped_partial, EThread, import_matplotlib, sanitize_filename, makedirs, get_url, flatten_list, \
31
  get_device, ProgressParallel, remove, hash_file, clear_torch_cache, NullContext, get_hf_server, FakeTokenizer
 
25
 
26
  from enums import DocumentChoices, no_lora_str, model_token_mapping, source_prefix, source_postfix, non_query_commands, \
27
  LangChainAction, LangChainMode
28
+ from evaluate_params import gen_hyper
29
+ from gen import get_model, SEED
30
  from prompter import non_hf_types, PromptType, Prompter
31
  from utils import wrapped_partial, EThread, import_matplotlib, sanitize_filename, makedirs, get_url, flatten_list, \
32
  get_device, ProgressParallel, remove, hash_file, clear_torch_cache, NullContext, get_hf_server, FakeTokenizer
gradio_utils/__pycache__/prompt_form.cpython-310.pyc CHANGED
Binary files a/gradio_utils/__pycache__/prompt_form.cpython-310.pyc and b/gradio_utils/__pycache__/prompt_form.cpython-310.pyc differ